nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/mindrecord/shardheader.py
python
ShardHeader.add_schema
(self, schema)
return schema_id
Add object of ShardSchema. Args: schema (ShardSchema): Object of ShardSchema. Returns: int, schema id. Raises: MRMAddSchemaError: If failed to add schema.
Add object of ShardSchema.
[ "Add", "object", "of", "ShardSchema", "." ]
def add_schema(self, schema): """ Add object of ShardSchema. Args: schema (ShardSchema): Object of ShardSchema. Returns: int, schema id. Raises: MRMAddSchemaError: If failed to add schema. """ schema_id = self._header.add_schema(schema) if schema_id == -1: logger.critical("Failed to add schema.") raise MRMAddSchemaError return schema_id
[ "def", "add_schema", "(", "self", ",", "schema", ")", ":", "schema_id", "=", "self", ".", "_header", ".", "add_schema", "(", "schema", ")", "if", "schema_id", "==", "-", "1", ":", "logger", ".", "critical", "(", "\"Failed to add schema.\"", ")", "raise", "MRMAddSchemaError", "return", "schema_id" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/mindrecord/shardheader.py#L37-L54
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py
python
RequestsCookieJar._find_no_duplicates
(self, name, domain=None, path=None)
Both ``__get_item__`` and ``get`` call this function: it's never used elsewhere in Requests. :param name: a string containing name of cookie :param domain: (optional) string containing domain of cookie :param path: (optional) string containing path of cookie :raises KeyError: if cookie is not found :raises CookieConflictError: if there are multiple cookies that match name and optionally domain and path :return: cookie.value
Both ``__get_item__`` and ``get`` call this function: it's never
[ "Both", "__get_item__", "and", "get", "call", "this", "function", ":", "it", "s", "never" ]
def _find_no_duplicates(self, name, domain=None, path=None): """Both ``__get_item__`` and ``get`` call this function: it's never used elsewhere in Requests. :param name: a string containing name of cookie :param domain: (optional) string containing domain of cookie :param path: (optional) string containing path of cookie :raises KeyError: if cookie is not found :raises CookieConflictError: if there are multiple cookies that match name and optionally domain and path :return: cookie.value """ toReturn = None for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if toReturn is not None: # if there are multiple cookies that meet passed in criteria raise CookieConflictError('There are multiple cookies with name, %r' % (name)) toReturn = cookie.value # we will eventually return this as long as no cookie conflict if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
[ "def", "_find_no_duplicates", "(", "self", ",", "name", ",", "domain", "=", "None", ",", "path", "=", "None", ")", ":", "toReturn", "=", "None", "for", "cookie", "in", "iter", "(", "self", ")", ":", "if", "cookie", ".", "name", "==", "name", ":", "if", "domain", "is", "None", "or", "cookie", ".", "domain", "==", "domain", ":", "if", "path", "is", "None", "or", "cookie", ".", "path", "==", "path", ":", "if", "toReturn", "is", "not", "None", ":", "# if there are multiple cookies that meet passed in criteria", "raise", "CookieConflictError", "(", "'There are multiple cookies with name, %r'", "%", "(", "name", ")", ")", "toReturn", "=", "cookie", ".", "value", "# we will eventually return this as long as no cookie conflict", "if", "toReturn", ":", "return", "toReturn", "raise", "KeyError", "(", "'name=%r, domain=%r, path=%r'", "%", "(", "name", ",", "domain", ",", "path", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/requests/cookies.py#L751-L797
Ewenwan/MVision
97b394dfa48cb21c82cd003b1a952745e413a17f
CNN/SqueezeNet/SqueezeNet-SSD-OD/ssd_detect.py
python
main
(args)
main
main
[ "main" ]
def main(args): '''main ''' # 定义一个检测器类 detection = CaffeDetection(args.gpu_id, args.model_def, args.model_weights, args.image_resize, args.labelmap_file) # 检测并获取结果 result = detection.detect(args.image_file) # 打印结果 print result #结果显示 img = Image.open(args.image_file)#打开图像 draw = ImageDraw.Draw(img)#显示 width, height = img.size#原来图像大小 print width, height for item in result: # 获取坐标实际整数值 xmin = int(round(item[0] * width)) ymin = int(round(item[1] * height)) xmax = int(round(item[2] * width)) ymax = int(round(item[3] * height)) draw.rectangle([xmin, ymin, xmax, ymax], outline=(255, 0, 0))#红色框 # [6] label_name [5] score draw.text([xmin, ymin], item[-1] + str(item[-2]), (0, 0, 255))#显示文本标签 绿色 print item print [xmin, ymin, xmax, ymax] print [xmin, ymin], item[-1] img.save('detect_result.jpg')
[ "def", "main", "(", "args", ")", ":", "# 定义一个检测器类", "detection", "=", "CaffeDetection", "(", "args", ".", "gpu_id", ",", "args", ".", "model_def", ",", "args", ".", "model_weights", ",", "args", ".", "image_resize", ",", "args", ".", "labelmap_file", ")", "# 检测并获取结果", "result", "=", "detection", ".", "detect", "(", "args", ".", "image_file", ")", "# 打印结果", "print", "result", "#结果显示", "img", "=", "Image", ".", "open", "(", "args", ".", "image_file", ")", "#打开图像", "draw", "=", "ImageDraw", ".", "Draw", "(", "img", ")", "#显示", "width", ",", "height", "=", "img", ".", "size", "#原来图像大小", "print", "width", ",", "height", "for", "item", "in", "result", ":", "# 获取坐标实际整数值", "xmin", "=", "int", "(", "round", "(", "item", "[", "0", "]", "*", "width", ")", ")", "ymin", "=", "int", "(", "round", "(", "item", "[", "1", "]", "*", "height", ")", ")", "xmax", "=", "int", "(", "round", "(", "item", "[", "2", "]", "*", "width", ")", ")", "ymax", "=", "int", "(", "round", "(", "item", "[", "3", "]", "*", "height", ")", ")", "draw", ".", "rectangle", "(", "[", "xmin", ",", "ymin", ",", "xmax", ",", "ymax", "]", ",", "outline", "=", "(", "255", ",", "0", ",", "0", ")", ")", "#红色框", "# [6] label_name [5] score ", "draw", ".", "text", "(", "[", "xmin", ",", "ymin", "]", ",", "item", "[", "-", "1", "]", "+", "str", "(", "item", "[", "-", "2", "]", ")", ",", "(", "0", ",", "0", ",", "255", ")", ")", "#显示文本标签 绿色", "print", "item", "print", "[", "xmin", ",", "ymin", ",", "xmax", ",", "ymax", "]", "print", "[", "xmin", ",", "ymin", "]", ",", "item", "[", "-", "1", "]", "img", ".", "save", "(", "'detect_result.jpg'", ")" ]
https://github.com/Ewenwan/MVision/blob/97b394dfa48cb21c82cd003b1a952745e413a17f/CNN/SqueezeNet/SqueezeNet-SSD-OD/ssd_detect.py#L122-L149
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Alignment/OfflineValidation/python/TkAlAllInOneTool/zMuMuValidation.py
python
ZMuMuValidation.appendToPlots
(self)
return replaceByMap(' filenames.push_back("root://eoscms//eos/cms/store/group/alca_trackeralign/AlignmentValidation/.oO[eosdir]Oo./BiasCheck.root"); titles.push_back(".oO[title]Oo."); colors.push_back(.oO[color]Oo.); linestyles.push_back(.oO[style]Oo.);\n', repMap)
if no argument or "" is passed a string with an instantiation is returned, else the validation is appended to the list
if no argument or "" is passed a string with an instantiation is returned, else the validation is appended to the list
[ "if", "no", "argument", "or", "is", "passed", "a", "string", "with", "an", "instantiation", "is", "returned", "else", "the", "validation", "is", "appended", "to", "the", "list" ]
def appendToPlots(self): """ if no argument or "" is passed a string with an instantiation is returned, else the validation is appended to the list """ repMap = self.getRepMap() return replaceByMap(' filenames.push_back("root://eoscms//eos/cms/store/group/alca_trackeralign/AlignmentValidation/.oO[eosdir]Oo./BiasCheck.root"); titles.push_back(".oO[title]Oo."); colors.push_back(.oO[color]Oo.); linestyles.push_back(.oO[style]Oo.);\n', repMap)
[ "def", "appendToPlots", "(", "self", ")", ":", "repMap", "=", "self", ".", "getRepMap", "(", ")", "return", "replaceByMap", "(", "' filenames.push_back(\"root://eoscms//eos/cms/store/group/alca_trackeralign/AlignmentValidation/.oO[eosdir]Oo./BiasCheck.root\"); titles.push_back(\".oO[title]Oo.\"); colors.push_back(.oO[color]Oo.); linestyles.push_back(.oO[style]Oo.);\\n'", ",", "repMap", ")" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/OfflineValidation/python/TkAlAllInOneTool/zMuMuValidation.py#L114-L120
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py
python
LuongAttention.__init__
(self, num_units, memory, memory_sequence_length=None, scale=False, probability_fn=None, score_mask_value=None, dtype=None, custom_key_value_fn=None, name="LuongAttention")
Construct the AttentionMechanism mechanism. Args: num_units: The depth of the attention mechanism. memory: The memory to query; usually the output of an RNN encoder. This tensor should be shaped `[batch_size, max_time, ...]`. memory_sequence_length: (optional) Sequence lengths for the batch entries in memory. If provided, the memory tensor rows are masked with zeros for values past the respective sequence lengths. scale: Python boolean. Whether to scale the energy term. probability_fn: (optional) A `callable`. Converts the score to probabilities. The default is `tf.nn.softmax`. Other options include `tf.contrib.seq2seq.hardmax` and `tf.contrib.sparsemax.sparsemax`. Its signature should be: `probabilities = probability_fn(score)`. score_mask_value: (optional) The mask value for score before passing into `probability_fn`. The default is -inf. Only used if `memory_sequence_length` is not None. dtype: The data type for the memory layer of the attention mechanism. custom_key_value_fn: (optional): The custom function for computing keys and values. name: Name to use when creating ops.
Construct the AttentionMechanism mechanism.
[ "Construct", "the", "AttentionMechanism", "mechanism", "." ]
def __init__(self, num_units, memory, memory_sequence_length=None, scale=False, probability_fn=None, score_mask_value=None, dtype=None, custom_key_value_fn=None, name="LuongAttention"): """Construct the AttentionMechanism mechanism. Args: num_units: The depth of the attention mechanism. memory: The memory to query; usually the output of an RNN encoder. This tensor should be shaped `[batch_size, max_time, ...]`. memory_sequence_length: (optional) Sequence lengths for the batch entries in memory. If provided, the memory tensor rows are masked with zeros for values past the respective sequence lengths. scale: Python boolean. Whether to scale the energy term. probability_fn: (optional) A `callable`. Converts the score to probabilities. The default is `tf.nn.softmax`. Other options include `tf.contrib.seq2seq.hardmax` and `tf.contrib.sparsemax.sparsemax`. Its signature should be: `probabilities = probability_fn(score)`. score_mask_value: (optional) The mask value for score before passing into `probability_fn`. The default is -inf. Only used if `memory_sequence_length` is not None. dtype: The data type for the memory layer of the attention mechanism. custom_key_value_fn: (optional): The custom function for computing keys and values. name: Name to use when creating ops. """ # For LuongAttention, we only transform the memory layer; thus # num_units **must** match expected the query depth. if probability_fn is None: probability_fn = nn_ops.softmax if dtype is None: dtype = dtypes.float32 wrapped_probability_fn = lambda score, _: probability_fn(score) super(LuongAttention, self).__init__( query_layer=None, memory_layer=layers_core.Dense( num_units, name="memory_layer", use_bias=False, dtype=dtype), memory=memory, probability_fn=wrapped_probability_fn, memory_sequence_length=memory_sequence_length, score_mask_value=score_mask_value, custom_key_value_fn=custom_key_value_fn, name=name) self._num_units = num_units self._scale = scale self._name = name
[ "def", "__init__", "(", "self", ",", "num_units", ",", "memory", ",", "memory_sequence_length", "=", "None", ",", "scale", "=", "False", ",", "probability_fn", "=", "None", ",", "score_mask_value", "=", "None", ",", "dtype", "=", "None", ",", "custom_key_value_fn", "=", "None", ",", "name", "=", "\"LuongAttention\"", ")", ":", "# For LuongAttention, we only transform the memory layer; thus", "# num_units **must** match expected the query depth.", "if", "probability_fn", "is", "None", ":", "probability_fn", "=", "nn_ops", ".", "softmax", "if", "dtype", "is", "None", ":", "dtype", "=", "dtypes", ".", "float32", "wrapped_probability_fn", "=", "lambda", "score", ",", "_", ":", "probability_fn", "(", "score", ")", "super", "(", "LuongAttention", ",", "self", ")", ".", "__init__", "(", "query_layer", "=", "None", ",", "memory_layer", "=", "layers_core", ".", "Dense", "(", "num_units", ",", "name", "=", "\"memory_layer\"", ",", "use_bias", "=", "False", ",", "dtype", "=", "dtype", ")", ",", "memory", "=", "memory", ",", "probability_fn", "=", "wrapped_probability_fn", ",", "memory_sequence_length", "=", "memory_sequence_length", ",", "score_mask_value", "=", "score_mask_value", ",", "custom_key_value_fn", "=", "custom_key_value_fn", ",", "name", "=", "name", ")", "self", ".", "_num_units", "=", "num_units", "self", ".", "_scale", "=", "scale", "self", ".", "_name", "=", "name" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py#L672-L723
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
caffe2/python/checkpoint.py
python
MultiNodeCheckpointManager.report_checkpoint_stats
(self, action_name)
Report the checkpoint stats for all the nodes, we need to aggregate all the node's stats together so that we know which node's checkpoint operation dominates. Args: action_name: A string of the name of checkpoint operation.
Report the checkpoint stats for all the nodes, we need to aggregate all the node's stats together so that we know which node's checkpoint operation dominates.
[ "Report", "the", "checkpoint", "stats", "for", "all", "the", "nodes", "we", "need", "to", "aggregate", "all", "the", "node", "s", "stats", "together", "so", "that", "we", "know", "which", "node", "s", "checkpoint", "operation", "dominates", "." ]
def report_checkpoint_stats(self, action_name): """ Report the checkpoint stats for all the nodes, we need to aggregate all the node's stats together so that we know which node's checkpoint operation dominates. Args: action_name: A string of the name of checkpoint operation. """ all_stats = {} for _, manager in self._node_managers: manager.collect_checkpoint_stats(all_stats) logger.debug("checkpoint stats: {}".format(all_stats)) if self._metadata_handler: self._metadata_handler.report(action_name, all_stats)
[ "def", "report_checkpoint_stats", "(", "self", ",", "action_name", ")", ":", "all_stats", "=", "{", "}", "for", "_", ",", "manager", "in", "self", ".", "_node_managers", ":", "manager", ".", "collect_checkpoint_stats", "(", "all_stats", ")", "logger", ".", "debug", "(", "\"checkpoint stats: {}\"", ".", "format", "(", "all_stats", ")", ")", "if", "self", ".", "_metadata_handler", ":", "self", ".", "_metadata_handler", ".", "report", "(", "action_name", ",", "all_stats", ")" ]
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/caffe2/python/checkpoint.py#L549-L563
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/rsa/rsa/_compat.py
python
byte
(num)
return pack("B", num)
Converts a number between 0 and 255 (both inclusive) to a base-256 (byte) representation. Use it as a replacement for ``chr`` where you are expecting a byte because this will work on all current versions of Python:: :param num: An unsigned integer between 0 and 255 (both inclusive). :returns: A single byte.
Converts a number between 0 and 255 (both inclusive) to a base-256 (byte) representation.
[ "Converts", "a", "number", "between", "0", "and", "255", "(", "both", "inclusive", ")", "to", "a", "base", "-", "256", "(", "byte", ")", "representation", "." ]
def byte(num): """ Converts a number between 0 and 255 (both inclusive) to a base-256 (byte) representation. Use it as a replacement for ``chr`` where you are expecting a byte because this will work on all current versions of Python:: :param num: An unsigned integer between 0 and 255 (both inclusive). :returns: A single byte. """ return pack("B", num)
[ "def", "byte", "(", "num", ")", ":", "return", "pack", "(", "\"B\"", ",", "num", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/rsa/rsa/_compat.py#L108-L121
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/multiprocessing/connection.py
python
_ConnectionBase.closed
(self)
return self._handle is None
True if the connection is closed
True if the connection is closed
[ "True", "if", "the", "connection", "is", "closed" ]
def closed(self): """True if the connection is closed""" return self._handle is None
[ "def", "closed", "(", "self", ")", ":", "return", "self", ".", "_handle", "is", "None" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/multiprocessing/connection.py#L154-L156
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/tty.py
python
setcbreak
(fd, when=TCSAFLUSH)
Put terminal into a cbreak mode.
Put terminal into a cbreak mode.
[ "Put", "terminal", "into", "a", "cbreak", "mode", "." ]
def setcbreak(fd, when=TCSAFLUSH): """Put terminal into a cbreak mode.""" mode = tcgetattr(fd) mode[LFLAG] = mode[LFLAG] & ~(ECHO | ICANON) mode[CC][VMIN] = 1 mode[CC][VTIME] = 0 tcsetattr(fd, when, mode)
[ "def", "setcbreak", "(", "fd", ",", "when", "=", "TCSAFLUSH", ")", ":", "mode", "=", "tcgetattr", "(", "fd", ")", "mode", "[", "LFLAG", "]", "=", "mode", "[", "LFLAG", "]", "&", "~", "(", "ECHO", "|", "ICANON", ")", "mode", "[", "CC", "]", "[", "VMIN", "]", "=", "1", "mode", "[", "CC", "]", "[", "VTIME", "]", "=", "0", "tcsetattr", "(", "fd", ",", "when", ",", "mode", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/tty.py#L30-L36
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqt/mantidqt/widgets/superplot/ads_observer.py
python
SuperplotAdsObserver.replacedHandle
(self, ws_name, ws)
Triggered when a workspace is replaces. Args: ws_name (str): name of the workspace ws (workspace): reference to the workspace
Triggered when a workspace is replaces.
[ "Triggered", "when", "a", "workspace", "is", "replaces", "." ]
def replacedHandle(self, ws_name, ws): """ Triggered when a workspace is replaces. Args: ws_name (str): name of the workspace ws (workspace): reference to the workspace """ self.signals.sig_ws_replaced.emit(ws_name)
[ "def", "replacedHandle", "(", "self", ",", "ws_name", ",", "ws", ")", ":", "self", ".", "signals", ".", "sig_ws_replaced", ".", "emit", "(", "ws_name", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqt/mantidqt/widgets/superplot/ads_observer.py#L74-L82
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/PIL/Image.py
python
Image.seek
(self, frame)
Seeks to the given frame in this sequence file. If you seek beyond the end of the sequence, the method raises an **EOFError** exception. When a sequence file is opened, the library automatically seeks to frame 0. See :py:meth:`~PIL.Image.Image.tell`. :param frame: Frame number, starting at 0. :exception EOFError: If the call attempts to seek beyond the end of the sequence.
Seeks to the given frame in this sequence file. If you seek beyond the end of the sequence, the method raises an **EOFError** exception. When a sequence file is opened, the library automatically seeks to frame 0.
[ "Seeks", "to", "the", "given", "frame", "in", "this", "sequence", "file", ".", "If", "you", "seek", "beyond", "the", "end", "of", "the", "sequence", "the", "method", "raises", "an", "**", "EOFError", "**", "exception", ".", "When", "a", "sequence", "file", "is", "opened", "the", "library", "automatically", "seeks", "to", "frame", "0", "." ]
def seek(self, frame): """ Seeks to the given frame in this sequence file. If you seek beyond the end of the sequence, the method raises an **EOFError** exception. When a sequence file is opened, the library automatically seeks to frame 0. See :py:meth:`~PIL.Image.Image.tell`. :param frame: Frame number, starting at 0. :exception EOFError: If the call attempts to seek beyond the end of the sequence. """ # overridden by file handlers if frame != 0: raise EOFError
[ "def", "seek", "(", "self", ",", "frame", ")", ":", "# overridden by file handlers", "if", "frame", "!=", "0", ":", "raise", "EOFError" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/PIL/Image.py#L2140-L2156
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_aarch64/python2.7/dist-packages/geographic_msgs/msg/_RoutePath.py
python
RoutePath.serialize_numpy
(self, buff, numpy)
serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module
serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module
[ "serialize", "message", "with", "numpy", "array", "types", "into", "buffer", ":", "param", "buff", ":", "buffer", "StringIO", ":", "param", "numpy", ":", "numpy", "python", "module" ]
def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs)) _x = self.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) if python3: buff.write(struct.pack('<I%sB'%length, length, *_x)) else: buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self.network.uuid # - if encoded as a list instead, serialize as bytes instead of string if type(_x) in [list, tuple]: buff.write(_struct_16B.pack(*_x)) else: buff.write(_struct_16s.pack(_x)) length = len(self.segments) buff.write(_struct_I.pack(length)) for val1 in self.segments: _x = val1.uuid # - if encoded as a list instead, serialize as bytes instead of string if type(_x) in [list, tuple]: buff.write(_struct_16B.pack(*_x)) else: buff.write(_struct_16s.pack(_x)) length = len(self.props) buff.write(_struct_I.pack(length)) for val1 in self.props: _x = val1.key length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) if python3: buff.write(struct.pack('<I%sB'%length, length, *_x)) else: buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.value length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) if python3: buff.write(struct.pack('<I%sB'%length, length, *_x)) else: buff.write(struct.pack('<I%ss'%length, length, _x)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x)))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
[ "def", "serialize_numpy", "(", "self", ",", "buff", ",", "numpy", ")", ":", "try", ":", "_x", "=", "self", "buff", ".", "write", "(", "_struct_3I", ".", "pack", "(", "_x", ".", "header", ".", "seq", ",", "_x", ".", "header", ".", "stamp", ".", "secs", ",", "_x", ".", "header", ".", "stamp", ".", "nsecs", ")", ")", "_x", "=", "self", ".", "header", ".", "frame_id", "length", "=", "len", "(", "_x", ")", "if", "python3", "or", "type", "(", "_x", ")", "==", "unicode", ":", "_x", "=", "_x", ".", "encode", "(", "'utf-8'", ")", "length", "=", "len", "(", "_x", ")", "if", "python3", ":", "buff", ".", "write", "(", "struct", ".", "pack", "(", "'<I%sB'", "%", "length", ",", "length", ",", "*", "_x", ")", ")", "else", ":", "buff", ".", "write", "(", "struct", ".", "pack", "(", "'<I%ss'", "%", "length", ",", "length", ",", "_x", ")", ")", "_x", "=", "self", ".", "network", ".", "uuid", "# - if encoded as a list instead, serialize as bytes instead of string", "if", "type", "(", "_x", ")", "in", "[", "list", ",", "tuple", "]", ":", "buff", ".", "write", "(", "_struct_16B", ".", "pack", "(", "*", "_x", ")", ")", "else", ":", "buff", ".", "write", "(", "_struct_16s", ".", "pack", "(", "_x", ")", ")", "length", "=", "len", "(", "self", ".", "segments", ")", "buff", ".", "write", "(", "_struct_I", ".", "pack", "(", "length", ")", ")", "for", "val1", "in", "self", ".", "segments", ":", "_x", "=", "val1", ".", "uuid", "# - if encoded as a list instead, serialize as bytes instead of string", "if", "type", "(", "_x", ")", "in", "[", "list", ",", "tuple", "]", ":", "buff", ".", "write", "(", "_struct_16B", ".", "pack", "(", "*", "_x", ")", ")", "else", ":", "buff", ".", "write", "(", "_struct_16s", ".", "pack", "(", "_x", ")", ")", "length", "=", "len", "(", "self", ".", "props", ")", "buff", ".", "write", "(", "_struct_I", ".", "pack", "(", "length", ")", ")", "for", "val1", "in", "self", ".", "props", ":", "_x", "=", "val1", ".", "key", "length", "=", "len", "(", "_x", ")", "if", "python3", "or", "type", "(", "_x", ")", "==", "unicode", ":", "_x", "=", "_x", ".", "encode", "(", "'utf-8'", ")", "length", "=", "len", "(", "_x", ")", "if", "python3", ":", "buff", ".", "write", "(", "struct", ".", "pack", "(", "'<I%sB'", "%", "length", ",", "length", ",", "*", "_x", ")", ")", "else", ":", "buff", ".", "write", "(", "struct", ".", "pack", "(", "'<I%ss'", "%", "length", ",", "length", ",", "_x", ")", ")", "_x", "=", "val1", ".", "value", "length", "=", "len", "(", "_x", ")", "if", "python3", "or", "type", "(", "_x", ")", "==", "unicode", ":", "_x", "=", "_x", ".", "encode", "(", "'utf-8'", ")", "length", "=", "len", "(", "_x", ")", "if", "python3", ":", "buff", ".", "write", "(", "struct", ".", "pack", "(", "'<I%sB'", "%", "length", ",", "length", ",", "*", "_x", ")", ")", "else", ":", "buff", ".", "write", "(", "struct", ".", "pack", "(", "'<I%ss'", "%", "length", ",", "length", ",", "_x", ")", ")", "except", "struct", ".", "error", "as", "se", ":", "self", ".", "_check_types", "(", "struct", ".", "error", "(", "\"%s: '%s' when writing '%s'\"", "%", "(", "type", "(", "se", ")", ",", "str", "(", "se", ")", ",", "str", "(", "_x", ")", ")", ")", ")", "except", "TypeError", "as", "te", ":", "self", ".", "_check_types", "(", "ValueError", "(", "\"%s: '%s' when writing '%s'\"", "%", "(", "type", "(", "te", ")", ",", "str", "(", "te", ")", ",", "str", "(", "_x", ")", ")", ")", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_aarch64/python2.7/dist-packages/geographic_msgs/msg/_RoutePath.py#L233-L288
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/core/arrays/categorical.py
python
Categorical.map
(self, mapper)
Map categories using input correspondence (dict, Series, or function). Maps the categories to new categories. If the mapping correspondence is one-to-one the result is a :class:`~pandas.Categorical` which has the same order property as the original, otherwise a :class:`~pandas.Index` is returned. NaN values are unaffected. If a `dict` or :class:`~pandas.Series` is used any unmapped category is mapped to `NaN`. Note that if this happens an :class:`~pandas.Index` will be returned. Parameters ---------- mapper : function, dict, or Series Mapping correspondence. Returns ------- pandas.Categorical or pandas.Index Mapped categorical. See Also -------- CategoricalIndex.map : Apply a mapping correspondence on a :class:`~pandas.CategoricalIndex`. Index.map : Apply a mapping correspondence on an :class:`~pandas.Index`. Series.map : Apply a mapping correspondence on a :class:`~pandas.Series`. Series.apply : Apply more complex functions on a :class:`~pandas.Series`. Examples -------- >>> cat = pd.Categorical(['a', 'b', 'c']) >>> cat ['a', 'b', 'c'] Categories (3, object): ['a', 'b', 'c'] >>> cat.map(lambda x: x.upper()) ['A', 'B', 'C'] Categories (3, object): ['A', 'B', 'C'] >>> cat.map({'a': 'first', 'b': 'second', 'c': 'third'}) ['first', 'second', 'third'] Categories (3, object): ['first', 'second', 'third'] If the mapping is one-to-one the ordering of the categories is preserved: >>> cat = pd.Categorical(['a', 'b', 'c'], ordered=True) >>> cat ['a', 'b', 'c'] Categories (3, object): ['a' < 'b' < 'c'] >>> cat.map({'a': 3, 'b': 2, 'c': 1}) [3, 2, 1] Categories (3, int64): [3 < 2 < 1] If the mapping is not one-to-one an :class:`~pandas.Index` is returned: >>> cat.map({'a': 'first', 'b': 'second', 'c': 'first'}) Index(['first', 'second', 'first'], dtype='object') If a `dict` is used, all unmapped categories are mapped to `NaN` and the result is an :class:`~pandas.Index`: >>> cat.map({'a': 'first', 'b': 'second'}) Index(['first', 'second', nan], dtype='object')
Map categories using input correspondence (dict, Series, or function).
[ "Map", "categories", "using", "input", "correspondence", "(", "dict", "Series", "or", "function", ")", "." ]
def map(self, mapper): """ Map categories using input correspondence (dict, Series, or function). Maps the categories to new categories. If the mapping correspondence is one-to-one the result is a :class:`~pandas.Categorical` which has the same order property as the original, otherwise a :class:`~pandas.Index` is returned. NaN values are unaffected. If a `dict` or :class:`~pandas.Series` is used any unmapped category is mapped to `NaN`. Note that if this happens an :class:`~pandas.Index` will be returned. Parameters ---------- mapper : function, dict, or Series Mapping correspondence. Returns ------- pandas.Categorical or pandas.Index Mapped categorical. See Also -------- CategoricalIndex.map : Apply a mapping correspondence on a :class:`~pandas.CategoricalIndex`. Index.map : Apply a mapping correspondence on an :class:`~pandas.Index`. Series.map : Apply a mapping correspondence on a :class:`~pandas.Series`. Series.apply : Apply more complex functions on a :class:`~pandas.Series`. Examples -------- >>> cat = pd.Categorical(['a', 'b', 'c']) >>> cat ['a', 'b', 'c'] Categories (3, object): ['a', 'b', 'c'] >>> cat.map(lambda x: x.upper()) ['A', 'B', 'C'] Categories (3, object): ['A', 'B', 'C'] >>> cat.map({'a': 'first', 'b': 'second', 'c': 'third'}) ['first', 'second', 'third'] Categories (3, object): ['first', 'second', 'third'] If the mapping is one-to-one the ordering of the categories is preserved: >>> cat = pd.Categorical(['a', 'b', 'c'], ordered=True) >>> cat ['a', 'b', 'c'] Categories (3, object): ['a' < 'b' < 'c'] >>> cat.map({'a': 3, 'b': 2, 'c': 1}) [3, 2, 1] Categories (3, int64): [3 < 2 < 1] If the mapping is not one-to-one an :class:`~pandas.Index` is returned: >>> cat.map({'a': 'first', 'b': 'second', 'c': 'first'}) Index(['first', 'second', 'first'], dtype='object') If a `dict` is used, all unmapped categories are mapped to `NaN` and the result is an :class:`~pandas.Index`: >>> cat.map({'a': 'first', 'b': 'second'}) Index(['first', 'second', nan], dtype='object') """ new_categories = self.categories.map(mapper) try: return self.from_codes( self._codes.copy(), categories=new_categories, ordered=self.ordered ) except ValueError: # NA values are represented in self._codes with -1 # np.take causes NA values to take final element in new_categories if np.any(self._codes == -1): new_categories = new_categories.insert(len(new_categories), np.nan) return np.take(new_categories, self._codes)
[ "def", "map", "(", "self", ",", "mapper", ")", ":", "new_categories", "=", "self", ".", "categories", ".", "map", "(", "mapper", ")", "try", ":", "return", "self", ".", "from_codes", "(", "self", ".", "_codes", ".", "copy", "(", ")", ",", "categories", "=", "new_categories", ",", "ordered", "=", "self", ".", "ordered", ")", "except", "ValueError", ":", "# NA values are represented in self._codes with -1", "# np.take causes NA values to take final element in new_categories", "if", "np", ".", "any", "(", "self", ".", "_codes", "==", "-", "1", ")", ":", "new_categories", "=", "new_categories", ".", "insert", "(", "len", "(", "new_categories", ")", ",", "np", ".", "nan", ")", "return", "np", ".", "take", "(", "new_categories", ",", "self", ".", "_codes", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/arrays/categorical.py#L1306-L1385
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/debug/cli/curses_ui.py
python
CursesUI._screen_terminate
(self)
Terminate the curses screen.
Terminate the curses screen.
[ "Terminate", "the", "curses", "screen", "." ]
def _screen_terminate(self): """Terminate the curses screen.""" self._stdscr.keypad(0) curses.nocbreak() curses.echo() curses.endwin() try: # Remove SIGINT handler. signal.signal(signal.SIGINT, signal.SIG_DFL) except ValueError: # Can't catch signals unless you're the main thread. pass
[ "def", "_screen_terminate", "(", "self", ")", ":", "self", ".", "_stdscr", ".", "keypad", "(", "0", ")", "curses", ".", "nocbreak", "(", ")", "curses", ".", "echo", "(", ")", "curses", ".", "endwin", "(", ")", "try", ":", "# Remove SIGINT handler.", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "signal", ".", "SIG_DFL", ")", "except", "ValueError", ":", "# Can't catch signals unless you're the main thread.", "pass" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/debug/cli/curses_ui.py#L470-L483
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
python/mxnet/kvstore.py
python
KVStore._send_command_to_servers
(self, head, body)
Sends a command to all server nodes. Sending command to a server node will cause that server node to invoke ``KVStoreServer.controller`` to execute the command. This function returns after the command has been executed on all server nodes. Parameters ---------- head : int the head of the command. body : str the body of the command.
Sends a command to all server nodes.
[ "Sends", "a", "command", "to", "all", "server", "nodes", "." ]
def _send_command_to_servers(self, head, body): """Sends a command to all server nodes. Sending command to a server node will cause that server node to invoke ``KVStoreServer.controller`` to execute the command. This function returns after the command has been executed on all server nodes. Parameters ---------- head : int the head of the command. body : str the body of the command. """ check_call(_LIB.MXKVStoreSendCommmandToServers( self.handle, mx_uint(head), c_str(body)))
[ "def", "_send_command_to_servers", "(", "self", ",", "head", ",", "body", ")", ":", "check_call", "(", "_LIB", ".", "MXKVStoreSendCommmandToServers", "(", "self", ".", "handle", ",", "mx_uint", "(", "head", ")", ",", "c_str", "(", "body", ")", ")", ")" ]
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/kvstore.py#L616-L633
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/python/turicreate/toolkits/style_transfer/_tf_model_architecture.py
python
define_resnet
(tf_input, tf_index, weights, prefix="transformer_")
return decode_3_relu_1
This function defines the resnet network using the tensorflow nn api. Parameters ---------- tf_input: tensorflow.Tensor The input tensor to the network. The image is expected to be in RGB format. tf_index: tensorflow.Tensor The index tensor to the network. weights: dictionary The dictionary of weights to the network. The naming convention used is that from the CoreML export of the Style Transfer Network. prefix: string The prefix column is used to prefix the variables of the network for weight export. Returns ------- out: tensorflow.Tensor The sigmoid output tensor to the network.
This function defines the resnet network using the tensorflow nn api.
[ "This", "function", "defines", "the", "resnet", "network", "using", "the", "tensorflow", "nn", "api", "." ]
def define_resnet(tf_input, tf_index, weights, prefix="transformer_"): """ This function defines the resnet network using the tensorflow nn api. Parameters ---------- tf_input: tensorflow.Tensor The input tensor to the network. The image is expected to be in RGB format. tf_index: tensorflow.Tensor The index tensor to the network. weights: dictionary The dictionary of weights to the network. The naming convention used is that from the CoreML export of the Style Transfer Network. prefix: string The prefix column is used to prefix the variables of the network for weight export. Returns ------- out: tensorflow.Tensor The sigmoid output tensor to the network. """ _tf = _lazy_import_tensorflow() # encoding 1 conv_1_paddings = _tf.constant([[0, 0], [4, 4], [4, 4], [0, 0]]) conv_1_pad = _tf.pad(tf_input, conv_1_paddings, "REFLECT") conv_1_filter = weights[prefix + "encode_1_conv_weight"] conv_1 = _tf.nn.conv2d( conv_1_pad, conv_1_filter, strides=[1, 1, 1, 1], padding="VALID" ) inst_1 = define_instance_norm(conv_1, tf_index, weights, prefix + "encode_1_inst_") relu_1 = _tf.nn.relu(inst_1) # encoding 2 conv_2_paddings = _tf.constant([[0, 0], [1, 1], [1, 1], [0, 0]]) conv_2_pad = _tf.pad(relu_1, conv_2_paddings, "REFLECT") conv_2_filter = weights[prefix + "encode_2_conv_weight"] conv_2 = _tf.nn.conv2d( conv_2_pad, conv_2_filter, strides=[1, 2, 2, 1], padding="VALID" ) inst_2 = define_instance_norm(conv_2, tf_index, weights, prefix + "encode_2_inst_") relu_2 = _tf.nn.relu(inst_2) # encoding 3 conv_3_paddings = _tf.constant([[0, 0], [1, 1], [1, 1], [0, 0]]) conv_3_pad = _tf.pad(relu_2, conv_3_paddings, "REFLECT") conv_3_filter = weights[prefix + "encode_3_conv_weight"] conv_3 = _tf.nn.conv2d( conv_3_pad, conv_3_filter, strides=[1, 2, 2, 1], padding="VALID" ) inst_3 = define_instance_norm(conv_3, tf_index, weights, prefix + "encode_3_inst_") relu_3 = _tf.nn.relu(inst_3) # Residual Blocks residual_1 = define_residual(relu_3, tf_index, weights, prefix + "residual_1_") residual_2 = define_residual(residual_1, tf_index, weights, prefix + "residual_2_") residual_3 = define_residual(residual_2, tf_index, weights, prefix + "residual_3_") residual_4 = define_residual(residual_3, tf_index, weights, prefix + "residual_4_") residual_5 = define_residual(residual_4, tf_index, weights, prefix + "residual_5_") # decode 1 decode_1_image_shape = _tf.shape(residual_5) decode_1_new_height = decode_1_image_shape[1] * 2 decode_1_new_width = decode_1_image_shape[2] * 2 decoding_1_upsample_1 = _tf.image.resize_images( residual_5, [decode_1_new_height, decode_1_new_width], method=_tf.image.ResizeMethod.NEAREST_NEIGHBOR, ) decode_1_conv_1_paddings = _tf.constant([[0, 0], [1, 1], [1, 1], [0, 0]]) decode_1_conv_1_pad = _tf.pad( decoding_1_upsample_1, decode_1_conv_1_paddings, "REFLECT" ) decode_1_conv_1_filter = weights[prefix + "decoding_1_conv_weight"] decode_1_conv_1 = _tf.nn.conv2d( decode_1_conv_1_pad, decode_1_conv_1_filter, strides=[1, 1, 1, 1], padding="VALID", ) decode_1_inst_1 = define_instance_norm( decode_1_conv_1, tf_index, weights, prefix + "decoding_1_inst_" ) decode_1_relu_1 = _tf.nn.relu(decode_1_inst_1) # decode 2 decode_2_image_shape = _tf.shape(decode_1_relu_1) decode_2_new_height = decode_2_image_shape[1] * 2 decode_2_new_width = decode_2_image_shape[2] * 2 decoding_2_upsample_1 = _tf.image.resize_images( decode_1_relu_1, [decode_2_new_height, decode_2_new_width], method=_tf.image.ResizeMethod.NEAREST_NEIGHBOR, ) decode_2_conv_1_paddings = _tf.constant([[0, 0], [1, 1], [1, 1], [0, 0]]) decode_2_conv_1_pad = _tf.pad( decoding_2_upsample_1, decode_2_conv_1_paddings, "REFLECT" ) decode_2_conv_1_filter = weights[prefix + "decoding_2_conv_weight"] decode_2_conv_1 = _tf.nn.conv2d( decode_2_conv_1_pad, decode_2_conv_1_filter, strides=[1, 1, 1, 1], padding="VALID", ) decode_2_inst_1 = define_instance_norm( decode_2_conv_1, tf_index, weights, prefix + "decoding_2_inst_" ) decode_2_relu_1 = _tf.nn.relu(decode_2_inst_1) # decode 3 decode_3_conv_1_paddings = _tf.constant([[0, 0], [4, 4], [4, 4], [0, 0]]) decode_3_conv_1_pad = _tf.pad(decode_2_relu_1, decode_3_conv_1_paddings, "REFLECT") decode_3_conv_1_filter = weights[prefix + "conv5_weight"] decode_3_conv_1 = _tf.nn.conv2d( decode_3_conv_1_pad, decode_3_conv_1_filter, strides=[1, 1, 1, 1], padding="VALID", ) decode_3_inst_1 = define_instance_norm( decode_3_conv_1, tf_index, weights, prefix + "instancenorm5_" ) decode_3_relu_1 = _tf.nn.sigmoid(decode_3_inst_1) return decode_3_relu_1
[ "def", "define_resnet", "(", "tf_input", ",", "tf_index", ",", "weights", ",", "prefix", "=", "\"transformer_\"", ")", ":", "_tf", "=", "_lazy_import_tensorflow", "(", ")", "# encoding 1", "conv_1_paddings", "=", "_tf", ".", "constant", "(", "[", "[", "0", ",", "0", "]", ",", "[", "4", ",", "4", "]", ",", "[", "4", ",", "4", "]", ",", "[", "0", ",", "0", "]", "]", ")", "conv_1_pad", "=", "_tf", ".", "pad", "(", "tf_input", ",", "conv_1_paddings", ",", "\"REFLECT\"", ")", "conv_1_filter", "=", "weights", "[", "prefix", "+", "\"encode_1_conv_weight\"", "]", "conv_1", "=", "_tf", ".", "nn", ".", "conv2d", "(", "conv_1_pad", ",", "conv_1_filter", ",", "strides", "=", "[", "1", ",", "1", ",", "1", ",", "1", "]", ",", "padding", "=", "\"VALID\"", ")", "inst_1", "=", "define_instance_norm", "(", "conv_1", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"encode_1_inst_\"", ")", "relu_1", "=", "_tf", ".", "nn", ".", "relu", "(", "inst_1", ")", "# encoding 2", "conv_2_paddings", "=", "_tf", ".", "constant", "(", "[", "[", "0", ",", "0", "]", ",", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", ",", "[", "0", ",", "0", "]", "]", ")", "conv_2_pad", "=", "_tf", ".", "pad", "(", "relu_1", ",", "conv_2_paddings", ",", "\"REFLECT\"", ")", "conv_2_filter", "=", "weights", "[", "prefix", "+", "\"encode_2_conv_weight\"", "]", "conv_2", "=", "_tf", ".", "nn", ".", "conv2d", "(", "conv_2_pad", ",", "conv_2_filter", ",", "strides", "=", "[", "1", ",", "2", ",", "2", ",", "1", "]", ",", "padding", "=", "\"VALID\"", ")", "inst_2", "=", "define_instance_norm", "(", "conv_2", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"encode_2_inst_\"", ")", "relu_2", "=", "_tf", ".", "nn", ".", "relu", "(", "inst_2", ")", "# encoding 3", "conv_3_paddings", "=", "_tf", ".", "constant", "(", "[", "[", "0", ",", "0", "]", ",", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", ",", "[", "0", ",", "0", "]", "]", ")", "conv_3_pad", "=", "_tf", ".", "pad", "(", "relu_2", ",", "conv_3_paddings", ",", "\"REFLECT\"", ")", "conv_3_filter", "=", "weights", "[", "prefix", "+", "\"encode_3_conv_weight\"", "]", "conv_3", "=", "_tf", ".", "nn", ".", "conv2d", "(", "conv_3_pad", ",", "conv_3_filter", ",", "strides", "=", "[", "1", ",", "2", ",", "2", ",", "1", "]", ",", "padding", "=", "\"VALID\"", ")", "inst_3", "=", "define_instance_norm", "(", "conv_3", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"encode_3_inst_\"", ")", "relu_3", "=", "_tf", ".", "nn", ".", "relu", "(", "inst_3", ")", "# Residual Blocks", "residual_1", "=", "define_residual", "(", "relu_3", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"residual_1_\"", ")", "residual_2", "=", "define_residual", "(", "residual_1", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"residual_2_\"", ")", "residual_3", "=", "define_residual", "(", "residual_2", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"residual_3_\"", ")", "residual_4", "=", "define_residual", "(", "residual_3", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"residual_4_\"", ")", "residual_5", "=", "define_residual", "(", "residual_4", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"residual_5_\"", ")", "# decode 1", "decode_1_image_shape", "=", "_tf", ".", "shape", "(", "residual_5", ")", "decode_1_new_height", "=", "decode_1_image_shape", "[", "1", "]", "*", "2", "decode_1_new_width", "=", "decode_1_image_shape", "[", "2", "]", "*", "2", "decoding_1_upsample_1", "=", "_tf", ".", "image", ".", "resize_images", "(", "residual_5", ",", "[", "decode_1_new_height", ",", "decode_1_new_width", "]", ",", "method", "=", "_tf", ".", "image", ".", "ResizeMethod", ".", "NEAREST_NEIGHBOR", ",", ")", "decode_1_conv_1_paddings", "=", "_tf", ".", "constant", "(", "[", "[", "0", ",", "0", "]", ",", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", ",", "[", "0", ",", "0", "]", "]", ")", "decode_1_conv_1_pad", "=", "_tf", ".", "pad", "(", "decoding_1_upsample_1", ",", "decode_1_conv_1_paddings", ",", "\"REFLECT\"", ")", "decode_1_conv_1_filter", "=", "weights", "[", "prefix", "+", "\"decoding_1_conv_weight\"", "]", "decode_1_conv_1", "=", "_tf", ".", "nn", ".", "conv2d", "(", "decode_1_conv_1_pad", ",", "decode_1_conv_1_filter", ",", "strides", "=", "[", "1", ",", "1", ",", "1", ",", "1", "]", ",", "padding", "=", "\"VALID\"", ",", ")", "decode_1_inst_1", "=", "define_instance_norm", "(", "decode_1_conv_1", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"decoding_1_inst_\"", ")", "decode_1_relu_1", "=", "_tf", ".", "nn", ".", "relu", "(", "decode_1_inst_1", ")", "# decode 2", "decode_2_image_shape", "=", "_tf", ".", "shape", "(", "decode_1_relu_1", ")", "decode_2_new_height", "=", "decode_2_image_shape", "[", "1", "]", "*", "2", "decode_2_new_width", "=", "decode_2_image_shape", "[", "2", "]", "*", "2", "decoding_2_upsample_1", "=", "_tf", ".", "image", ".", "resize_images", "(", "decode_1_relu_1", ",", "[", "decode_2_new_height", ",", "decode_2_new_width", "]", ",", "method", "=", "_tf", ".", "image", ".", "ResizeMethod", ".", "NEAREST_NEIGHBOR", ",", ")", "decode_2_conv_1_paddings", "=", "_tf", ".", "constant", "(", "[", "[", "0", ",", "0", "]", ",", "[", "1", ",", "1", "]", ",", "[", "1", ",", "1", "]", ",", "[", "0", ",", "0", "]", "]", ")", "decode_2_conv_1_pad", "=", "_tf", ".", "pad", "(", "decoding_2_upsample_1", ",", "decode_2_conv_1_paddings", ",", "\"REFLECT\"", ")", "decode_2_conv_1_filter", "=", "weights", "[", "prefix", "+", "\"decoding_2_conv_weight\"", "]", "decode_2_conv_1", "=", "_tf", ".", "nn", ".", "conv2d", "(", "decode_2_conv_1_pad", ",", "decode_2_conv_1_filter", ",", "strides", "=", "[", "1", ",", "1", ",", "1", ",", "1", "]", ",", "padding", "=", "\"VALID\"", ",", ")", "decode_2_inst_1", "=", "define_instance_norm", "(", "decode_2_conv_1", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"decoding_2_inst_\"", ")", "decode_2_relu_1", "=", "_tf", ".", "nn", ".", "relu", "(", "decode_2_inst_1", ")", "# decode 3", "decode_3_conv_1_paddings", "=", "_tf", ".", "constant", "(", "[", "[", "0", ",", "0", "]", ",", "[", "4", ",", "4", "]", ",", "[", "4", ",", "4", "]", ",", "[", "0", ",", "0", "]", "]", ")", "decode_3_conv_1_pad", "=", "_tf", ".", "pad", "(", "decode_2_relu_1", ",", "decode_3_conv_1_paddings", ",", "\"REFLECT\"", ")", "decode_3_conv_1_filter", "=", "weights", "[", "prefix", "+", "\"conv5_weight\"", "]", "decode_3_conv_1", "=", "_tf", ".", "nn", ".", "conv2d", "(", "decode_3_conv_1_pad", ",", "decode_3_conv_1_filter", ",", "strides", "=", "[", "1", ",", "1", ",", "1", ",", "1", "]", ",", "padding", "=", "\"VALID\"", ",", ")", "decode_3_inst_1", "=", "define_instance_norm", "(", "decode_3_conv_1", ",", "tf_index", ",", "weights", ",", "prefix", "+", "\"instancenorm5_\"", ")", "decode_3_relu_1", "=", "_tf", ".", "nn", ".", "sigmoid", "(", "decode_3_inst_1", ")", "return", "decode_3_relu_1" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/toolkits/style_transfer/_tf_model_architecture.py#L161-L307
google/sandboxed-api
7004d59150c9fbfaa3e5fd1872affffd1ab14fe8
oss-internship-2020/libuv/generator/wrapper_generator.py
python
get_signatures
(text: str)
return zip(method_types, names, arguments_lists)
Gets the signatures of all the methods in the header. Note: This method only works on a certain version of LibUV's header. Args: text: The contents of the header file Returns: The extracted method signatures.
Gets the signatures of all the methods in the header.
[ "Gets", "the", "signatures", "of", "all", "the", "methods", "in", "the", "header", "." ]
def get_signatures(text: str) -> str: """Gets the signatures of all the methods in the header. Note: This method only works on a certain version of LibUV's header. Args: text: The contents of the header file Returns: The extracted method signatures. """ signatures = [x.split(";")[0].strip() for x in text.split("UV_EXTERN")[1:]] method_types = [ " ".join(s.split("(")[0].split(" ")[:-1]).strip() for s in signatures ] names = [s.split("(")[0].split(" ")[-1].strip() for s in signatures] arguments = [s.split("(")[1][:-1] for s in signatures] arguments_lists = [[x.strip() for x in a.split(",")] for a in arguments] return zip(method_types, names, arguments_lists)
[ "def", "get_signatures", "(", "text", ":", "str", ")", "->", "str", ":", "signatures", "=", "[", "x", ".", "split", "(", "\";\"", ")", "[", "0", "]", ".", "strip", "(", ")", "for", "x", "in", "text", ".", "split", "(", "\"UV_EXTERN\"", ")", "[", "1", ":", "]", "]", "method_types", "=", "[", "\" \"", ".", "join", "(", "s", ".", "split", "(", "\"(\"", ")", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", ":", "-", "1", "]", ")", ".", "strip", "(", ")", "for", "s", "in", "signatures", "]", "names", "=", "[", "s", ".", "split", "(", "\"(\"", ")", "[", "0", "]", ".", "split", "(", "\" \"", ")", "[", "-", "1", "]", ".", "strip", "(", ")", "for", "s", "in", "signatures", "]", "arguments", "=", "[", "s", ".", "split", "(", "\"(\"", ")", "[", "1", "]", "[", ":", "-", "1", "]", "for", "s", "in", "signatures", "]", "arguments_lists", "=", "[", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "a", ".", "split", "(", "\",\"", ")", "]", "for", "a", "in", "arguments", "]", "return", "zip", "(", "method_types", ",", "names", ",", "arguments_lists", ")" ]
https://github.com/google/sandboxed-api/blob/7004d59150c9fbfaa3e5fd1872affffd1ab14fe8/oss-internship-2020/libuv/generator/wrapper_generator.py#L184-L203
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/labeled_tensor/python/ops/ops.py
python
pad
(labeled_tensor, paddings, mode='CONSTANT', name=None)
Pads a tensor. See tf.pad. Args: labeled_tensor: The input tensor. paddings: A mapping where the keys are axis names and the values are tuples where the first element is the padding to insert at the beginning of the axis and the second is the padding to insert at the end of the axis. mode: One of "CONSTANT", "REFLECT", or "SYMMETRIC". name: Optional op name. Returns: A tensor with the indicated axes padded, optionally with those axes extended with the provided labels. Raises: ValueError: If the padded axes are not axes in the input tensor.
Pads a tensor.
[ "Pads", "a", "tensor", "." ]
def pad(labeled_tensor, paddings, mode='CONSTANT', name=None): """Pads a tensor. See tf.pad. Args: labeled_tensor: The input tensor. paddings: A mapping where the keys are axis names and the values are tuples where the first element is the padding to insert at the beginning of the axis and the second is the padding to insert at the end of the axis. mode: One of "CONSTANT", "REFLECT", or "SYMMETRIC". name: Optional op name. Returns: A tensor with the indicated axes padded, optionally with those axes extended with the provided labels. Raises: ValueError: If the padded axes are not axes in the input tensor. """ with ops.name_scope(name, 'lt_pad', [labeled_tensor]) as scope: labeled_tensor = core.convert_to_labeled_tensor(labeled_tensor) if not set(paddings.keys()) <= set(labeled_tensor.axes.keys()): raise ValueError('pad axes %r are not contained in the set of axis ' 'names %r on the input labeled tensor' % (paddings.keys(), labeled_tensor.axes)) new_axes = [] padding_pairs = [] for name, axis in labeled_tensor.axes.items(): if name in paddings: padding_before, padding_after = paddings[name] axis_before = core.Axis(name, padding_before) axis_after = core.Axis(name, padding_after) new_axes.append(core.concat_axes([axis_before, axis, axis_after])) padding_pairs.append((len(axis_before), len(axis_after))) else: new_axes.append(axis) padding_pairs.append((0, 0)) pad_op = array_ops.pad(labeled_tensor.tensor, padding_pairs, mode, name=scope) return core.LabeledTensor(pad_op, new_axes)
[ "def", "pad", "(", "labeled_tensor", ",", "paddings", ",", "mode", "=", "'CONSTANT'", ",", "name", "=", "None", ")", ":", "with", "ops", ".", "name_scope", "(", "name", ",", "'lt_pad'", ",", "[", "labeled_tensor", "]", ")", "as", "scope", ":", "labeled_tensor", "=", "core", ".", "convert_to_labeled_tensor", "(", "labeled_tensor", ")", "if", "not", "set", "(", "paddings", ".", "keys", "(", ")", ")", "<=", "set", "(", "labeled_tensor", ".", "axes", ".", "keys", "(", ")", ")", ":", "raise", "ValueError", "(", "'pad axes %r are not contained in the set of axis '", "'names %r on the input labeled tensor'", "%", "(", "paddings", ".", "keys", "(", ")", ",", "labeled_tensor", ".", "axes", ")", ")", "new_axes", "=", "[", "]", "padding_pairs", "=", "[", "]", "for", "name", ",", "axis", "in", "labeled_tensor", ".", "axes", ".", "items", "(", ")", ":", "if", "name", "in", "paddings", ":", "padding_before", ",", "padding_after", "=", "paddings", "[", "name", "]", "axis_before", "=", "core", ".", "Axis", "(", "name", ",", "padding_before", ")", "axis_after", "=", "core", ".", "Axis", "(", "name", ",", "padding_after", ")", "new_axes", ".", "append", "(", "core", ".", "concat_axes", "(", "[", "axis_before", ",", "axis", ",", "axis_after", "]", ")", ")", "padding_pairs", ".", "append", "(", "(", "len", "(", "axis_before", ")", ",", "len", "(", "axis_after", ")", ")", ")", "else", ":", "new_axes", ".", "append", "(", "axis", ")", "padding_pairs", ".", "append", "(", "(", "0", ",", "0", ")", ")", "pad_op", "=", "array_ops", ".", "pad", "(", "labeled_tensor", ".", "tensor", ",", "padding_pairs", ",", "mode", ",", "name", "=", "scope", ")", "return", "core", ".", "LabeledTensor", "(", "pad_op", ",", "new_axes", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/labeled_tensor/python/ops/ops.py#L1031-L1078
NREL/EnergyPlus
fadc5973b85c70e8cc923efb69c144e808a26078
scripts/dev/licensetext.py
python
pythonize
(text, line_limit=79, toolname='unspecified', message=report_error)
return '#' + '\n#'.join(lines) + '\n'
Convert the C++ comment text into Python comments
Convert the C++ comment text into Python comments
[ "Convert", "the", "C", "++", "comment", "text", "into", "Python", "comments" ]
def pythonize(text, line_limit=79, toolname='unspecified', message=report_error): '''Convert the C++ comment text into Python comments''' paragraphs = [el for el in merge_paragraphs(text).splitlines() if el != ''] if len(paragraphs) != 8 or line_limit < 7: message({'tool': toolname, 'filename': 'unknown', 'file': 'unknown', 'line': 1, 'messagetype': 'error', 'message': 'License text cannot processed'}) return '' lines = [] # Work the first three paragraphs limit = line_limit - 2 for pg in paragraphs[0:3]: lines.extend([' ' + el for el in textwrap.wrap(pg, width=limit)]) lines.append('') # Work the next four paragraphs limit = line_limit - 6 for i, pg in enumerate(paragraphs[3:7]): sublines = textwrap.wrap(pg[4:], width=limit) lines.append((' (%d) ' % (i + 1)) + sublines[0]) for el in sublines[1:]: lines.append(' ' + el) lines.append('') # Work the last paragraph limit = line_limit - 2 lines.extend([' ' + el for el in textwrap.wrap(paragraphs[7], width=limit)]) return '#' + '\n#'.join(lines) + '\n'
[ "def", "pythonize", "(", "text", ",", "line_limit", "=", "79", ",", "toolname", "=", "'unspecified'", ",", "message", "=", "report_error", ")", ":", "paragraphs", "=", "[", "el", "for", "el", "in", "merge_paragraphs", "(", "text", ")", ".", "splitlines", "(", ")", "if", "el", "!=", "''", "]", "if", "len", "(", "paragraphs", ")", "!=", "8", "or", "line_limit", "<", "7", ":", "message", "(", "{", "'tool'", ":", "toolname", ",", "'filename'", ":", "'unknown'", ",", "'file'", ":", "'unknown'", ",", "'line'", ":", "1", ",", "'messagetype'", ":", "'error'", ",", "'message'", ":", "'License text cannot processed'", "}", ")", "return", "''", "lines", "=", "[", "]", "# Work the first three paragraphs", "limit", "=", "line_limit", "-", "2", "for", "pg", "in", "paragraphs", "[", "0", ":", "3", "]", ":", "lines", ".", "extend", "(", "[", "' '", "+", "el", "for", "el", "in", "textwrap", ".", "wrap", "(", "pg", ",", "width", "=", "limit", ")", "]", ")", "lines", ".", "append", "(", "''", ")", "# Work the next four paragraphs", "limit", "=", "line_limit", "-", "6", "for", "i", ",", "pg", "in", "enumerate", "(", "paragraphs", "[", "3", ":", "7", "]", ")", ":", "sublines", "=", "textwrap", ".", "wrap", "(", "pg", "[", "4", ":", "]", ",", "width", "=", "limit", ")", "lines", ".", "append", "(", "(", "' (%d) '", "%", "(", "i", "+", "1", ")", ")", "+", "sublines", "[", "0", "]", ")", "for", "el", "in", "sublines", "[", "1", ":", "]", ":", "lines", ".", "append", "(", "' '", "+", "el", ")", "lines", ".", "append", "(", "''", ")", "# Work the last paragraph", "limit", "=", "line_limit", "-", "2", "lines", ".", "extend", "(", "[", "' '", "+", "el", "for", "el", "in", "textwrap", ".", "wrap", "(", "paragraphs", "[", "7", "]", ",", "width", "=", "limit", ")", "]", ")", "return", "'#'", "+", "'\\n#'", ".", "join", "(", "lines", ")", "+", "'\\n'" ]
https://github.com/NREL/EnergyPlus/blob/fadc5973b85c70e8cc923efb69c144e808a26078/scripts/dev/licensetext.py#L166-L196
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Alignment/MuonAlignment/python/svgfig.py
python
make_symbol
(id, shape="dot", **attr)
return output
Creates a new instance of an SVG symbol to avoid cross-linking objects. id required a new identifier (string/Unicode) shape default="dot" the shape name from _symbol_templates attribute=value list keyword list modify the SVG attributes of the new symbol
Creates a new instance of an SVG symbol to avoid cross-linking objects.
[ "Creates", "a", "new", "instance", "of", "an", "SVG", "symbol", "to", "avoid", "cross", "-", "linking", "objects", "." ]
def make_symbol(id, shape="dot", **attr): """Creates a new instance of an SVG symbol to avoid cross-linking objects. id required a new identifier (string/Unicode) shape default="dot" the shape name from _symbol_templates attribute=value list keyword list modify the SVG attributes of the new symbol """ output = copy.deepcopy(_symbol_templates[shape]) for i in output.sub: i.attr.update(attr_preprocess(attr)) output["id"] = id return output
[ "def", "make_symbol", "(", "id", ",", "shape", "=", "\"dot\"", ",", "*", "*", "attr", ")", ":", "output", "=", "copy", ".", "deepcopy", "(", "_symbol_templates", "[", "shape", "]", ")", "for", "i", "in", "output", ".", "sub", ":", "i", ".", "attr", ".", "update", "(", "attr_preprocess", "(", "attr", ")", ")", "output", "[", "\"id\"", "]", "=", "id", "return", "output" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/MuonAlignment/python/svgfig.py#L1894-L1904
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
base/android/jni_generator/jni_generator.py
python
JavaDataTypeToC
(java_type)
Returns a C datatype for the given java type.
Returns a C datatype for the given java type.
[ "Returns", "a", "C", "datatype", "for", "the", "given", "java", "type", "." ]
def JavaDataTypeToC(java_type): """Returns a C datatype for the given java type.""" java_pod_type_map = { 'int': 'jint', 'byte': 'jbyte', 'boolean': 'jboolean', 'long': 'jlong', 'double': 'jdouble', 'float': 'jfloat', } java_type_map = { 'void': 'void', 'String': 'jstring', } if java_type in java_pod_type_map: return java_pod_type_map[java_type] elif java_type in java_type_map: return java_type_map[java_type] elif java_type.endswith('[]'): if java_type[:-2] in java_pod_type_map: return java_pod_type_map[java_type[:-2]] + 'Array' return 'jobjectArray' else: return 'jobject'
[ "def", "JavaDataTypeToC", "(", "java_type", ")", ":", "java_pod_type_map", "=", "{", "'int'", ":", "'jint'", ",", "'byte'", ":", "'jbyte'", ",", "'boolean'", ":", "'jboolean'", ",", "'long'", ":", "'jlong'", ",", "'double'", ":", "'jdouble'", ",", "'float'", ":", "'jfloat'", ",", "}", "java_type_map", "=", "{", "'void'", ":", "'void'", ",", "'String'", ":", "'jstring'", ",", "}", "if", "java_type", "in", "java_pod_type_map", ":", "return", "java_pod_type_map", "[", "java_type", "]", "elif", "java_type", "in", "java_type_map", ":", "return", "java_type_map", "[", "java_type", "]", "elif", "java_type", ".", "endswith", "(", "'[]'", ")", ":", "if", "java_type", "[", ":", "-", "2", "]", "in", "java_pod_type_map", ":", "return", "java_pod_type_map", "[", "java_type", "[", ":", "-", "2", "]", "]", "+", "'Array'", "return", "'jobjectArray'", "else", ":", "return", "'jobject'" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/base/android/jni_generator/jni_generator.py#L91-L114
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/ops/composite/multitype_ops/_compile_utils.py
python
_tensor_getitem
(self, index)
Handle tensor getitem
Handle tensor getitem
[ "Handle", "tensor", "getitem" ]
def _tensor_getitem(self, index): """Handle tensor getitem""" if isinstance(index, Tensor): return tensor_index_by_tensor(self, index) if isinstance(index, list): return tensor_index_by_list(self, index) if isinstance(index, tuple): return tensor_index_by_tuple(self, index) if isinstance(index, bool): return _tensor_index_by_bool(self, index) if isinstance(index, int): return _tensor_index_by_integer(self, index) if isinstance(index, slice): return tensor_index_by_slice(self, index) if index is None: return F.expand_dims(self, 0) if index is ...: return self raise IndexError(f"Only support integers, slices(`:`), ellipsis(`...`), None, bool, tensor with int, " f"list and tuple ,but got {index} with type {type(index)}.")
[ "def", "_tensor_getitem", "(", "self", ",", "index", ")", ":", "if", "isinstance", "(", "index", ",", "Tensor", ")", ":", "return", "tensor_index_by_tensor", "(", "self", ",", "index", ")", "if", "isinstance", "(", "index", ",", "list", ")", ":", "return", "tensor_index_by_list", "(", "self", ",", "index", ")", "if", "isinstance", "(", "index", ",", "tuple", ")", ":", "return", "tensor_index_by_tuple", "(", "self", ",", "index", ")", "if", "isinstance", "(", "index", ",", "bool", ")", ":", "return", "_tensor_index_by_bool", "(", "self", ",", "index", ")", "if", "isinstance", "(", "index", ",", "int", ")", ":", "return", "_tensor_index_by_integer", "(", "self", ",", "index", ")", "if", "isinstance", "(", "index", ",", "slice", ")", ":", "return", "tensor_index_by_slice", "(", "self", ",", "index", ")", "if", "index", "is", "None", ":", "return", "F", ".", "expand_dims", "(", "self", ",", "0", ")", "if", "index", "is", "...", ":", "return", "self", "raise", "IndexError", "(", "f\"Only support integers, slices(`:`), ellipsis(`...`), None, bool, tensor with int, \"", "f\"list and tuple ,but got {index} with type {type(index)}.\"", ")" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/composite/multitype_ops/_compile_utils.py#L32-L51
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/core/arrays/datetimes.py
python
objects_to_datetime64ns
( data: np.ndarray, dayfirst, yearfirst, utc=False, errors="raise", require_iso8601: bool = False, allow_object: bool = False, allow_mixed: bool = False, )
Convert data to array of timestamps. Parameters ---------- data : np.ndarray[object] dayfirst : bool yearfirst : bool utc : bool, default False Whether to convert timezone-aware timestamps to UTC. errors : {'raise', 'ignore', 'coerce'} require_iso8601 : bool, default False allow_object : bool Whether to return an object-dtype ndarray instead of raising if the data contains more than one timezone. allow_mixed : bool, default False Interpret integers as timestamps when datetime objects are also present. Returns ------- result : ndarray np.int64 dtype if returned values represent UTC timestamps np.datetime64[ns] if returned values represent wall times object if mixed timezones inferred_tz : tzinfo or None Raises ------ ValueError : if data cannot be converted to datetimes
Convert data to array of timestamps.
[ "Convert", "data", "to", "array", "of", "timestamps", "." ]
def objects_to_datetime64ns( data: np.ndarray, dayfirst, yearfirst, utc=False, errors="raise", require_iso8601: bool = False, allow_object: bool = False, allow_mixed: bool = False, ): """ Convert data to array of timestamps. Parameters ---------- data : np.ndarray[object] dayfirst : bool yearfirst : bool utc : bool, default False Whether to convert timezone-aware timestamps to UTC. errors : {'raise', 'ignore', 'coerce'} require_iso8601 : bool, default False allow_object : bool Whether to return an object-dtype ndarray instead of raising if the data contains more than one timezone. allow_mixed : bool, default False Interpret integers as timestamps when datetime objects are also present. Returns ------- result : ndarray np.int64 dtype if returned values represent UTC timestamps np.datetime64[ns] if returned values represent wall times object if mixed timezones inferred_tz : tzinfo or None Raises ------ ValueError : if data cannot be converted to datetimes """ assert errors in ["raise", "ignore", "coerce"] # if str-dtype, convert data = np.array(data, copy=False, dtype=np.object_) flags = data.flags order: Literal["F", "C"] = "F" if flags.f_contiguous else "C" try: result, tz_parsed = tslib.array_to_datetime( data.ravel("K"), errors=errors, utc=utc, dayfirst=dayfirst, yearfirst=yearfirst, require_iso8601=require_iso8601, allow_mixed=allow_mixed, ) result = result.reshape(data.shape, order=order) except ValueError as err: try: values, tz_parsed = conversion.datetime_to_datetime64(data.ravel("K")) # If tzaware, these values represent unix timestamps, so we # return them as i8 to distinguish from wall times values = values.reshape(data.shape, order=order) return values.view("i8"), tz_parsed except (ValueError, TypeError): raise err if tz_parsed is not None: # We can take a shortcut since the datetime64 numpy array # is in UTC # Return i8 values to denote unix timestamps return result.view("i8"), tz_parsed elif is_datetime64_dtype(result): # returning M8[ns] denotes wall-times; since tz is None # the distinction is a thin one return result, tz_parsed elif is_object_dtype(result): # GH#23675 when called via `pd.to_datetime`, returning an object-dtype # array is allowed. When called via `pd.DatetimeIndex`, we can # only accept datetime64 dtype, so raise TypeError if object-dtype # is returned, as that indicates the values can be recognized as # datetimes but they have conflicting timezones/awareness if allow_object: return result, tz_parsed raise TypeError(result) else: # pragma: no cover # GH#23675 this TypeError should never be hit, whereas the TypeError # in the object-dtype branch above is reachable. raise TypeError(result)
[ "def", "objects_to_datetime64ns", "(", "data", ":", "np", ".", "ndarray", ",", "dayfirst", ",", "yearfirst", ",", "utc", "=", "False", ",", "errors", "=", "\"raise\"", ",", "require_iso8601", ":", "bool", "=", "False", ",", "allow_object", ":", "bool", "=", "False", ",", "allow_mixed", ":", "bool", "=", "False", ",", ")", ":", "assert", "errors", "in", "[", "\"raise\"", ",", "\"ignore\"", ",", "\"coerce\"", "]", "# if str-dtype, convert", "data", "=", "np", ".", "array", "(", "data", ",", "copy", "=", "False", ",", "dtype", "=", "np", ".", "object_", ")", "flags", "=", "data", ".", "flags", "order", ":", "Literal", "[", "\"F\"", ",", "\"C\"", "]", "=", "\"F\"", "if", "flags", ".", "f_contiguous", "else", "\"C\"", "try", ":", "result", ",", "tz_parsed", "=", "tslib", ".", "array_to_datetime", "(", "data", ".", "ravel", "(", "\"K\"", ")", ",", "errors", "=", "errors", ",", "utc", "=", "utc", ",", "dayfirst", "=", "dayfirst", ",", "yearfirst", "=", "yearfirst", ",", "require_iso8601", "=", "require_iso8601", ",", "allow_mixed", "=", "allow_mixed", ",", ")", "result", "=", "result", ".", "reshape", "(", "data", ".", "shape", ",", "order", "=", "order", ")", "except", "ValueError", "as", "err", ":", "try", ":", "values", ",", "tz_parsed", "=", "conversion", ".", "datetime_to_datetime64", "(", "data", ".", "ravel", "(", "\"K\"", ")", ")", "# If tzaware, these values represent unix timestamps, so we", "# return them as i8 to distinguish from wall times", "values", "=", "values", ".", "reshape", "(", "data", ".", "shape", ",", "order", "=", "order", ")", "return", "values", ".", "view", "(", "\"i8\"", ")", ",", "tz_parsed", "except", "(", "ValueError", ",", "TypeError", ")", ":", "raise", "err", "if", "tz_parsed", "is", "not", "None", ":", "# We can take a shortcut since the datetime64 numpy array", "# is in UTC", "# Return i8 values to denote unix timestamps", "return", "result", ".", "view", "(", "\"i8\"", ")", ",", "tz_parsed", "elif", "is_datetime64_dtype", "(", "result", ")", ":", "# returning M8[ns] denotes wall-times; since tz is None", "# the distinction is a thin one", "return", "result", ",", "tz_parsed", "elif", "is_object_dtype", "(", "result", ")", ":", "# GH#23675 when called via `pd.to_datetime`, returning an object-dtype", "# array is allowed. When called via `pd.DatetimeIndex`, we can", "# only accept datetime64 dtype, so raise TypeError if object-dtype", "# is returned, as that indicates the values can be recognized as", "# datetimes but they have conflicting timezones/awareness", "if", "allow_object", ":", "return", "result", ",", "tz_parsed", "raise", "TypeError", "(", "result", ")", "else", ":", "# pragma: no cover", "# GH#23675 this TypeError should never be hit, whereas the TypeError", "# in the object-dtype branch above is reachable.", "raise", "TypeError", "(", "result", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/arrays/datetimes.py#L2132-L2221
tomahawk-player/tomahawk-resolvers
7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d
archive/spotify/breakpad/third_party/protobuf/protobuf/python/google/protobuf/internal/decoder.py
python
_SkipFixed32
(buffer, pos, end)
return pos
Skip a fixed32 value. Returns the new position.
Skip a fixed32 value. Returns the new position.
[ "Skip", "a", "fixed32", "value", ".", "Returns", "the", "new", "position", "." ]
def _SkipFixed32(buffer, pos, end): """Skip a fixed32 value. Returns the new position.""" pos += 4 if pos > end: raise _DecodeError('Truncated message.') return pos
[ "def", "_SkipFixed32", "(", "buffer", ",", "pos", ",", "end", ")", ":", "pos", "+=", "4", "if", "pos", ">", "end", ":", "raise", "_DecodeError", "(", "'Truncated message.'", ")", "return", "pos" ]
https://github.com/tomahawk-player/tomahawk-resolvers/blob/7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d/archive/spotify/breakpad/third_party/protobuf/protobuf/python/google/protobuf/internal/decoder.py#L668-L674
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/parso/py2/parso/utils.py
python
parse_version_string
(version=None)
return _parse_version(version)
Checks for a valid version number (e.g. `3.8` or `2.7.1` or `3`) and returns a corresponding version info that is always two characters long in decimal.
Checks for a valid version number (e.g. `3.8` or `2.7.1` or `3`) and returns a corresponding version info that is always two characters long in decimal.
[ "Checks", "for", "a", "valid", "version", "number", "(", "e", ".", "g", ".", "3", ".", "8", "or", "2", ".", "7", ".", "1", "or", "3", ")", "and", "returns", "a", "corresponding", "version", "info", "that", "is", "always", "two", "characters", "long", "in", "decimal", "." ]
def parse_version_string(version=None): """ Checks for a valid version number (e.g. `3.8` or `2.7.1` or `3`) and returns a corresponding version info that is always two characters long in decimal. """ if version is None: version = '%s.%s' % sys.version_info[:2] if not isinstance(version, (unicode, str)): raise TypeError('version must be a string like "3.8"') return _parse_version(version)
[ "def", "parse_version_string", "(", "version", "=", "None", ")", ":", "if", "version", "is", "None", ":", "version", "=", "'%s.%s'", "%", "sys", ".", "version_info", "[", ":", "2", "]", "if", "not", "isinstance", "(", "version", ",", "(", "unicode", ",", "str", ")", ")", ":", "raise", "TypeError", "(", "'version must be a string like \"3.8\"'", ")", "return", "_parse_version", "(", "version", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/parso/py2/parso/utils.py#L174-L185
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py
python
yield_lines
(strs)
Yield non-empty/non-comment lines of a string or sequence
Yield non-empty/non-comment lines of a string or sequence
[ "Yield", "non", "-", "empty", "/", "non", "-", "comment", "lines", "of", "a", "string", "or", "sequence" ]
def yield_lines(strs): """Yield non-empty/non-comment lines of a string or sequence""" if isinstance(strs, six.string_types): for s in strs.splitlines(): s = s.strip() # skip blank lines/comments if s and not s.startswith('#'): yield s else: for ss in strs: for s in yield_lines(ss): yield s
[ "def", "yield_lines", "(", "strs", ")", ":", "if", "isinstance", "(", "strs", ",", "six", ".", "string_types", ")", ":", "for", "s", "in", "strs", ".", "splitlines", "(", ")", ":", "s", "=", "s", ".", "strip", "(", ")", "# skip blank lines/comments", "if", "s", "and", "not", "s", ".", "startswith", "(", "'#'", ")", ":", "yield", "s", "else", ":", "for", "ss", "in", "strs", ":", "for", "s", "in", "yield_lines", "(", "ss", ")", ":", "yield", "s" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py#L2397-L2408
tfwu/FaceDetection-ConvNet-3D
f9251c48eb40c5aec8fba7455115c355466555be
python/build/lib.linux-x86_64-2.7/mxnet/monitor.py
python
Monitor.toc
(self)
return res
End collecting for current batch and return results. Call after computation of current batch. Returns ------- res : list of
End collecting for current batch and return results. Call after computation of current batch.
[ "End", "collecting", "for", "current", "batch", "and", "return", "results", ".", "Call", "after", "computation", "of", "current", "batch", "." ]
def toc(self): """End collecting for current batch and return results. Call after computation of current batch. Returns ------- res : list of """ if not self.activated: return [] for exe in self.exes: for array in exe.arg_arrays: array.wait_to_read() for exe in self.exes: for name, array in zip(exe._symbol.list_arguments(), exe.arg_arrays): if self.re_prog.match(name): self.queue.append((self.step, name, self.stat_func(array))) self.activated = False res = [] if self.sort: self.queue.sort(key=lambda x: x[1]) for n, k, v_list in self.queue: if isinstance(v_list, NDArray): v_list = [v_list] assert isinstance(v_list, list) s = '' for v in v_list: assert isinstance(v, NDArray) if v.shape == (1,): s += str(v.asscalar()) + '\t' else: s += str(v.asnumpy()) + '\t' res.append((n, k, s)) self.queue = [] return res
[ "def", "toc", "(", "self", ")", ":", "if", "not", "self", ".", "activated", ":", "return", "[", "]", "for", "exe", "in", "self", ".", "exes", ":", "for", "array", "in", "exe", ".", "arg_arrays", ":", "array", ".", "wait_to_read", "(", ")", "for", "exe", "in", "self", ".", "exes", ":", "for", "name", ",", "array", "in", "zip", "(", "exe", ".", "_symbol", ".", "list_arguments", "(", ")", ",", "exe", ".", "arg_arrays", ")", ":", "if", "self", ".", "re_prog", ".", "match", "(", "name", ")", ":", "self", ".", "queue", ".", "append", "(", "(", "self", ".", "step", ",", "name", ",", "self", ".", "stat_func", "(", "array", ")", ")", ")", "self", ".", "activated", "=", "False", "res", "=", "[", "]", "if", "self", ".", "sort", ":", "self", ".", "queue", ".", "sort", "(", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")", "for", "n", ",", "k", ",", "v_list", "in", "self", ".", "queue", ":", "if", "isinstance", "(", "v_list", ",", "NDArray", ")", ":", "v_list", "=", "[", "v_list", "]", "assert", "isinstance", "(", "v_list", ",", "list", ")", "s", "=", "''", "for", "v", "in", "v_list", ":", "assert", "isinstance", "(", "v", ",", "NDArray", ")", "if", "v", ".", "shape", "==", "(", "1", ",", ")", ":", "s", "+=", "str", "(", "v", ".", "asscalar", "(", ")", ")", "+", "'\\t'", "else", ":", "s", "+=", "str", "(", "v", ".", "asnumpy", "(", ")", ")", "+", "'\\t'", "res", ".", "append", "(", "(", "n", ",", "k", ",", "s", ")", ")", "self", ".", "queue", "=", "[", "]", "return", "res" ]
https://github.com/tfwu/FaceDetection-ConvNet-3D/blob/f9251c48eb40c5aec8fba7455115c355466555be/python/build/lib.linux-x86_64-2.7/mxnet/monitor.py#L77-L110
macchina-io/macchina.io
ef24ba0e18379c3dd48fb84e6dbf991101cb8db0
platform/JS/V8/tools/gyp/pylib/gyp/ninja_syntax.py
python
Writer._count_dollars_before_index
(self, s, i)
return dollar_count
Returns the number of '$' characters right in front of s[i].
Returns the number of '$' characters right in front of s[i].
[ "Returns", "the", "number", "of", "$", "characters", "right", "in", "front", "of", "s", "[", "i", "]", "." ]
def _count_dollars_before_index(self, s, i): """Returns the number of '$' characters right in front of s[i].""" dollar_count = 0 dollar_index = i - 1 while dollar_index > 0 and s[dollar_index] == '$': dollar_count += 1 dollar_index -= 1 return dollar_count
[ "def", "_count_dollars_before_index", "(", "self", ",", "s", ",", "i", ")", ":", "dollar_count", "=", "0", "dollar_index", "=", "i", "-", "1", "while", "dollar_index", ">", "0", "and", "s", "[", "dollar_index", "]", "==", "'$'", ":", "dollar_count", "+=", "1", "dollar_index", "-=", "1", "return", "dollar_count" ]
https://github.com/macchina-io/macchina.io/blob/ef24ba0e18379c3dd48fb84e6dbf991101cb8db0/platform/JS/V8/tools/gyp/pylib/gyp/ninja_syntax.py#L102-L109
nnrg/opennero
43e12a1bcba6e228639db3886fec1dc47ddc24cb
mods/Roomba/RLAgent.py
python
TabularRLAgent.end
(self, time, reward)
return True
receive the reward for the last observation
receive the reward for the last observation
[ "receive", "the", "reward", "for", "the", "last", "observation" ]
def end(self, time, reward): """ receive the reward for the last observation """ # get the reward from the last action r = reward[0] o = self.previous_sensors a = self.previous_action # get the updated epsilon, in case the slider was changed by the user #self.epsilon = get_environment().epsilon # Update the Q value Q_old = self.predict(o, a) q = self.update(o, a, Q_old + self.alpha * (r - Q_old) ) return True
[ "def", "end", "(", "self", ",", "time", ",", "reward", ")", ":", "# get the reward from the last action", "r", "=", "reward", "[", "0", "]", "o", "=", "self", ".", "previous_sensors", "a", "=", "self", ".", "previous_action", "# get the updated epsilon, in case the slider was changed by the user", "#self.epsilon = get_environment().epsilon", "# Update the Q value", "Q_old", "=", "self", ".", "predict", "(", "o", ",", "a", ")", "q", "=", "self", ".", "update", "(", "o", ",", "a", ",", "Q_old", "+", "self", ".", "alpha", "*", "(", "r", "-", "Q_old", ")", ")", "return", "True" ]
https://github.com/nnrg/opennero/blob/43e12a1bcba6e228639db3886fec1dc47ddc24cb/mods/Roomba/RLAgent.py#L172-L187
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/ipython/py2/IPython/utils/text.py
python
long_substr
(data)
return substr
Return the longest common substring in a list of strings. Credit: http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python
Return the longest common substring in a list of strings. Credit: http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python
[ "Return", "the", "longest", "common", "substring", "in", "a", "list", "of", "strings", ".", "Credit", ":", "http", ":", "//", "stackoverflow", ".", "com", "/", "questions", "/", "2892931", "/", "longest", "-", "common", "-", "substring", "-", "from", "-", "more", "-", "than", "-", "two", "-", "strings", "-", "python" ]
def long_substr(data): """Return the longest common substring in a list of strings. Credit: http://stackoverflow.com/questions/2892931/longest-common-substring-from-more-than-two-strings-python """ substr = '' if len(data) > 1 and len(data[0]) > 0: for i in range(len(data[0])): for j in range(len(data[0])-i+1): if j > len(substr) and all(data[0][i:i+j] in x for x in data): substr = data[0][i:i+j] elif len(data) == 1: substr = data[0] return substr
[ "def", "long_substr", "(", "data", ")", ":", "substr", "=", "''", "if", "len", "(", "data", ")", ">", "1", "and", "len", "(", "data", "[", "0", "]", ")", ">", "0", ":", "for", "i", "in", "range", "(", "len", "(", "data", "[", "0", "]", ")", ")", ":", "for", "j", "in", "range", "(", "len", "(", "data", "[", "0", "]", ")", "-", "i", "+", "1", ")", ":", "if", "j", ">", "len", "(", "substr", ")", "and", "all", "(", "data", "[", "0", "]", "[", "i", ":", "i", "+", "j", "]", "in", "x", "for", "x", "in", "data", ")", ":", "substr", "=", "data", "[", "0", "]", "[", "i", ":", "i", "+", "j", "]", "elif", "len", "(", "data", ")", "==", "1", ":", "substr", "=", "data", "[", "0", "]", "return", "substr" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py2/IPython/utils/text.py#L416-L429
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/cgi.py
python
test
(environ=os.environ)
Robust test CGI script, usable as main program. Write minimal HTTP headers and dump all information provided to the script in HTML form.
Robust test CGI script, usable as main program.
[ "Robust", "test", "CGI", "script", "usable", "as", "main", "program", "." ]
def test(environ=os.environ): """Robust test CGI script, usable as main program. Write minimal HTTP headers and dump all information provided to the script in HTML form. """ print "Content-type: text/html" print sys.stderr = sys.stdout try: form = FieldStorage() # Replace with other classes to test those print_directory() print_arguments() print_form(form) print_environ(environ) print_environ_usage() def f(): exec "testing print_exception() -- <I>italics?</I>" def g(f=f): f() print "<H3>What follows is a test, not an actual exception:</H3>" g() except: print_exception() print "<H1>Second try with a small maxlen...</H1>" global maxlen maxlen = 50 try: form = FieldStorage() # Replace with other classes to test those print_directory() print_arguments() print_form(form) print_environ(environ) except: print_exception()
[ "def", "test", "(", "environ", "=", "os", ".", "environ", ")", ":", "print", "\"Content-type: text/html\"", "print", "sys", ".", "stderr", "=", "sys", ".", "stdout", "try", ":", "form", "=", "FieldStorage", "(", ")", "# Replace with other classes to test those", "print_directory", "(", ")", "print_arguments", "(", ")", "print_form", "(", "form", ")", "print_environ", "(", "environ", ")", "print_environ_usage", "(", ")", "def", "f", "(", ")", ":", "exec", "\"testing print_exception() -- <I>italics?</I>\"", "def", "g", "(", "f", "=", "f", ")", ":", "f", "(", ")", "print", "\"<H3>What follows is a test, not an actual exception:</H3>\"", "g", "(", ")", "except", ":", "print_exception", "(", ")", "print", "\"<H1>Second try with a small maxlen...</H1>\"", "global", "maxlen", "maxlen", "=", "50", "try", ":", "form", "=", "FieldStorage", "(", ")", "# Replace with other classes to test those", "print_directory", "(", ")", "print_arguments", "(", ")", "print_form", "(", "form", ")", "print_environ", "(", "environ", ")", "except", ":", "print_exception", "(", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/cgi.py#L882-L919
vslavik/poedit
f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a
deps/boost/tools/build/src/build/virtual_target.py
python
VirtualTarget.actualize_location
(self, target)
Sets up variables on 'target' which specify its location.
Sets up variables on 'target' which specify its location.
[ "Sets", "up", "variables", "on", "target", "which", "specify", "its", "location", "." ]
def actualize_location (self, target): """ Sets up variables on 'target' which specify its location. """ raise BaseException ("method should be defined in derived classes")
[ "def", "actualize_location", "(", "self", ",", "target", ")", ":", "raise", "BaseException", "(", "\"method should be defined in derived classes\"", ")" ]
https://github.com/vslavik/poedit/blob/f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a/deps/boost/tools/build/src/build/virtual_target.py#L359-L362
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/telemetry/telemetry/core/cros_interface.py
python
CrOSInterface._GetSessionManagerPid
(self, procs)
return None
Returns the pid of the session_manager process, given the list of processes.
Returns the pid of the session_manager process, given the list of processes.
[ "Returns", "the", "pid", "of", "the", "session_manager", "process", "given", "the", "list", "of", "processes", "." ]
def _GetSessionManagerPid(self, procs): """Returns the pid of the session_manager process, given the list of processes.""" for pid, process, _, _ in procs: argv = process.split() if argv and os.path.basename(argv[0]) == 'session_manager': return pid return None
[ "def", "_GetSessionManagerPid", "(", "self", ",", "procs", ")", ":", "for", "pid", ",", "process", ",", "_", ",", "_", "in", "procs", ":", "argv", "=", "process", ".", "split", "(", ")", "if", "argv", "and", "os", ".", "path", ".", "basename", "(", "argv", "[", "0", "]", ")", "==", "'session_manager'", ":", "return", "pid", "return", "None" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/telemetry/core/cros_interface.py#L357-L364
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/python2_version/klampt/src/motionplanning.py
python
PlannerInterface.addMilestone
(self, milestone)
return _motionplanning.PlannerInterface_addMilestone(self, milestone)
addMilestone(PlannerInterface self, PyObject * milestone) -> int
addMilestone(PlannerInterface self, PyObject * milestone) -> int
[ "addMilestone", "(", "PlannerInterface", "self", "PyObject", "*", "milestone", ")", "-", ">", "int" ]
def addMilestone(self, milestone): """ addMilestone(PlannerInterface self, PyObject * milestone) -> int """ return _motionplanning.PlannerInterface_addMilestone(self, milestone)
[ "def", "addMilestone", "(", "self", ",", "milestone", ")", ":", "return", "_motionplanning", ".", "PlannerInterface_addMilestone", "(", "self", ",", "milestone", ")" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/src/motionplanning.py#L854-L861
PaddlePaddle/PaddleOCR
b756bf5f8c90142e0d89d3db0163965c686b6ffe
ppocr/utils/e2e_utils/extract_textpoint_slow.py
python
get_keep_pos_idxs
(labels, remove_blank=None)
return keep_char_idx_list, keep_pos_idx_list
Remove duplicate and get pos idxs of keep items. The value of keep_blank should be [None, 95].
Remove duplicate and get pos idxs of keep items. The value of keep_blank should be [None, 95].
[ "Remove", "duplicate", "and", "get", "pos", "idxs", "of", "keep", "items", ".", "The", "value", "of", "keep_blank", "should", "be", "[", "None", "95", "]", "." ]
def get_keep_pos_idxs(labels, remove_blank=None): """ Remove duplicate and get pos idxs of keep items. The value of keep_blank should be [None, 95]. """ duplicate_len_list = [] keep_pos_idx_list = [] keep_char_idx_list = [] for k, v_ in groupby(labels): current_len = len(list(v_)) if k != remove_blank: current_idx = int(sum(duplicate_len_list) + current_len // 2) keep_pos_idx_list.append(current_idx) keep_char_idx_list.append(k) duplicate_len_list.append(current_len) return keep_char_idx_list, keep_pos_idx_list
[ "def", "get_keep_pos_idxs", "(", "labels", ",", "remove_blank", "=", "None", ")", ":", "duplicate_len_list", "=", "[", "]", "keep_pos_idx_list", "=", "[", "]", "keep_char_idx_list", "=", "[", "]", "for", "k", ",", "v_", "in", "groupby", "(", "labels", ")", ":", "current_len", "=", "len", "(", "list", "(", "v_", ")", ")", "if", "k", "!=", "remove_blank", ":", "current_idx", "=", "int", "(", "sum", "(", "duplicate_len_list", ")", "+", "current_len", "//", "2", ")", "keep_pos_idx_list", ".", "append", "(", "current_idx", ")", "keep_char_idx_list", ".", "append", "(", "k", ")", "duplicate_len_list", ".", "append", "(", "current_len", ")", "return", "keep_char_idx_list", ",", "keep_pos_idx_list" ]
https://github.com/PaddlePaddle/PaddleOCR/blob/b756bf5f8c90142e0d89d3db0163965c686b6ffe/ppocr/utils/e2e_utils/extract_textpoint_slow.py#L107-L122
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
build/android/emma_coverage_stats.py
python
_EmmaCoverageStats._GetSourceFileToEmmaFileDict
(self, files)
return source_to_emma
Gets a dict used to correlate Java source files with EMMA HTML files. This method gathers the information needed to correlate EMMA HTML files with Java source files. EMMA XML and plain text reports do not provide line by line coverage data, so HTML reports must be used instead. Unfortunately, the HTML files that are created are given garbage names (i.e 1.html) so we need to manually correlate EMMA HTML files with the original Java source files. Args: files: A list of file names for which coverage information is desired. Returns: A dict mapping Java source file paths to EMMA HTML file paths.
Gets a dict used to correlate Java source files with EMMA HTML files.
[ "Gets", "a", "dict", "used", "to", "correlate", "Java", "source", "files", "with", "EMMA", "HTML", "files", "." ]
def _GetSourceFileToEmmaFileDict(self, files): """Gets a dict used to correlate Java source files with EMMA HTML files. This method gathers the information needed to correlate EMMA HTML files with Java source files. EMMA XML and plain text reports do not provide line by line coverage data, so HTML reports must be used instead. Unfortunately, the HTML files that are created are given garbage names (i.e 1.html) so we need to manually correlate EMMA HTML files with the original Java source files. Args: files: A list of file names for which coverage information is desired. Returns: A dict mapping Java source file paths to EMMA HTML file paths. """ # Maps Java source file paths to package names. # Example: /usr/code/file.java -> org.chromium.file.java. source_to_package = {} for file_path in files: package = self.GetPackageNameFromFile(file_path) if package: source_to_package[file_path] = package else: logging.warning("Skipping %s because it doesn\'t have a package " "statement.", file_path) # Maps package names to EMMA report HTML files. # Example: org.chromium.file.java -> out/coverage/1a.html. package_to_emma = self._emma_parser.GetPackageNameToEmmaFileDict() # Finally, we have a dict mapping Java file paths to EMMA report files. # Example: /usr/code/file.java -> out/coverage/1a.html. source_to_emma = {source: package_to_emma[package] for source, package in source_to_package.iteritems() if package in package_to_emma} return source_to_emma
[ "def", "_GetSourceFileToEmmaFileDict", "(", "self", ",", "files", ")", ":", "# Maps Java source file paths to package names.", "# Example: /usr/code/file.java -> org.chromium.file.java.", "source_to_package", "=", "{", "}", "for", "file_path", "in", "files", ":", "package", "=", "self", ".", "GetPackageNameFromFile", "(", "file_path", ")", "if", "package", ":", "source_to_package", "[", "file_path", "]", "=", "package", "else", ":", "logging", ".", "warning", "(", "\"Skipping %s because it doesn\\'t have a package \"", "\"statement.\"", ",", "file_path", ")", "# Maps package names to EMMA report HTML files.", "# Example: org.chromium.file.java -> out/coverage/1a.html.", "package_to_emma", "=", "self", ".", "_emma_parser", ".", "GetPackageNameToEmmaFileDict", "(", ")", "# Finally, we have a dict mapping Java file paths to EMMA report files.", "# Example: /usr/code/file.java -> out/coverage/1a.html.", "source_to_emma", "=", "{", "source", ":", "package_to_emma", "[", "package", "]", "for", "source", ",", "package", "in", "source_to_package", ".", "iteritems", "(", ")", "if", "package", "in", "package_to_emma", "}", "return", "source_to_emma" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/build/android/emma_coverage_stats.py#L352-L387
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
TSIn.PeekCh
(self)
return _snap.TSIn_PeekCh(self)
PeekCh(TSIn self) -> char Parameters: self: TSIn *
PeekCh(TSIn self) -> char
[ "PeekCh", "(", "TSIn", "self", ")", "-", ">", "char" ]
def PeekCh(self): """ PeekCh(TSIn self) -> char Parameters: self: TSIn * """ return _snap.TSIn_PeekCh(self)
[ "def", "PeekCh", "(", "self", ")", ":", "return", "_snap", ".", "TSIn_PeekCh", "(", "self", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L1818-L1826
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/agw/ultimatelistctrl.py
python
UltimateListCtrl.HasHeader
(self)
return self._mainWin.HasHeader()
Returns ``True`` if :class:`UltimateListCtrl` has a header window.
Returns ``True`` if :class:`UltimateListCtrl` has a header window.
[ "Returns", "True", "if", ":", "class", ":", "UltimateListCtrl", "has", "a", "header", "window", "." ]
def HasHeader(self): """ Returns ``True`` if :class:`UltimateListCtrl` has a header window. """ return self._mainWin.HasHeader()
[ "def", "HasHeader", "(", "self", ")", ":", "return", "self", ".", "_mainWin", ".", "HasHeader", "(", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ultimatelistctrl.py#L11028-L11031
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_ops.py
python
scalar_mul
(scalar, x, name=None)
Multiplies a scalar times a `Tensor` or `IndexedSlices` object. Intended for use in gradient code which might deal with `IndexedSlices` objects, which are easy to multiply by a scalar but more expensive to multiply with arbitrary tensors. Args: scalar: A 0-D scalar `Tensor`. Must have known shape. x: A `Tensor` or `IndexedSlices` to be scaled. name: A name for the operation (optional). Returns: `scalar * x` of the same type (`Tensor` or `IndexedSlices`) as `x`. Raises: ValueError: if scalar is not a 0-D `scalar`.
Multiplies a scalar times a `Tensor` or `IndexedSlices` object.
[ "Multiplies", "a", "scalar", "times", "a", "Tensor", "or", "IndexedSlices", "object", "." ]
def scalar_mul(scalar, x, name=None): """Multiplies a scalar times a `Tensor` or `IndexedSlices` object. Intended for use in gradient code which might deal with `IndexedSlices` objects, which are easy to multiply by a scalar but more expensive to multiply with arbitrary tensors. Args: scalar: A 0-D scalar `Tensor`. Must have known shape. x: A `Tensor` or `IndexedSlices` to be scaled. name: A name for the operation (optional). Returns: `scalar * x` of the same type (`Tensor` or `IndexedSlices`) as `x`. Raises: ValueError: if scalar is not a 0-D `scalar`. """ scalar = ops.convert_to_tensor( scalar, dtype=x.dtype.base_dtype, name="scalar") shape = scalar.get_shape() if shape.ndims == 0: if isinstance(x, ops.IndexedSlices): return ops.IndexedSlices( gen_math_ops.mul(scalar, x.values, name), x.indices, x.dense_shape) else: return gen_math_ops.mul(scalar, x, name) else: raise ValueError("Only scalar multiply works, got shape %s" % shape)
[ "def", "scalar_mul", "(", "scalar", ",", "x", ",", "name", "=", "None", ")", ":", "scalar", "=", "ops", ".", "convert_to_tensor", "(", "scalar", ",", "dtype", "=", "x", ".", "dtype", ".", "base_dtype", ",", "name", "=", "\"scalar\"", ")", "shape", "=", "scalar", ".", "get_shape", "(", ")", "if", "shape", ".", "ndims", "==", "0", ":", "if", "isinstance", "(", "x", ",", "ops", ".", "IndexedSlices", ")", ":", "return", "ops", ".", "IndexedSlices", "(", "gen_math_ops", ".", "mul", "(", "scalar", ",", "x", ".", "values", ",", "name", ")", ",", "x", ".", "indices", ",", "x", ".", "dense_shape", ")", "else", ":", "return", "gen_math_ops", ".", "mul", "(", "scalar", ",", "x", ",", "name", ")", "else", ":", "raise", "ValueError", "(", "\"Only scalar multiply works, got shape %s\"", "%", "shape", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_ops.py#L396-L424
9miao/CrossApp
1f5375e061bf69841eb19728598f5ae3f508d620
tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py
python
Type.get_array_size
(self)
return conf.lib.clang_getArraySize(self)
Retrieve the size of the constant array.
Retrieve the size of the constant array.
[ "Retrieve", "the", "size", "of", "the", "constant", "array", "." ]
def get_array_size(self): """ Retrieve the size of the constant array. """ return conf.lib.clang_getArraySize(self)
[ "def", "get_array_size", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_getArraySize", "(", "self", ")" ]
https://github.com/9miao/CrossApp/blob/1f5375e061bf69841eb19728598f5ae3f508d620/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py#L1622-L1626
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/lite/python/convert.py
python
convert_graphdef_with_arrays
(input_data, input_arrays_with_shape, output_arrays, control_output_arrays, **kwargs)
return data
Convert a frozen GraphDef that can't be loaded in TF. Conversion can be customized by providing arguments that are forwarded to `build_model_flags` and `build_conversion_flags` (see documentation). Args: input_data: Input data (i.e. often `sess.graph_def`), input_arrays_with_shape: Tuple of strings representing input tensor names and list of integers representing input shapes (e.g., [("foo" : [1, 16, 16, 3])]). Use only when graph cannot be loaded into TensorFlow and when `input_tensors` is None. output_arrays: List of output tensors to freeze graph with. Use only when graph cannot be loaded into TensorFlow and when `output_tensors` is None. control_output_arrays: Control output node names. This is used when converting a Graph with no output tensors. For example, if the graph's last operation is a Print op, just specify that op's name in this field. This can be used together with the `output_arrays` parameter. **kwargs: See `build_model_flags` and `build_conversion_flags`. Returns: The converted data. For example if TFLite was the destination, then this will be a tflite flatbuffer in a bytes array. Raises: Defined in `build_conversion_flags`.
Convert a frozen GraphDef that can't be loaded in TF.
[ "Convert", "a", "frozen", "GraphDef", "that", "can", "t", "be", "loaded", "in", "TF", "." ]
def convert_graphdef_with_arrays(input_data, input_arrays_with_shape, output_arrays, control_output_arrays, **kwargs): """"Convert a frozen GraphDef that can't be loaded in TF. Conversion can be customized by providing arguments that are forwarded to `build_model_flags` and `build_conversion_flags` (see documentation). Args: input_data: Input data (i.e. often `sess.graph_def`), input_arrays_with_shape: Tuple of strings representing input tensor names and list of integers representing input shapes (e.g., [("foo" : [1, 16, 16, 3])]). Use only when graph cannot be loaded into TensorFlow and when `input_tensors` is None. output_arrays: List of output tensors to freeze graph with. Use only when graph cannot be loaded into TensorFlow and when `output_tensors` is None. control_output_arrays: Control output node names. This is used when converting a Graph with no output tensors. For example, if the graph's last operation is a Print op, just specify that op's name in this field. This can be used together with the `output_arrays` parameter. **kwargs: See `build_model_flags` and `build_conversion_flags`. Returns: The converted data. For example if TFLite was the destination, then this will be a tflite flatbuffer in a bytes array. Raises: Defined in `build_conversion_flags`. """ model_flags = build_model_flags(**kwargs) conversion_flags = build_conversion_flags(**kwargs) enable_mlir_converter = kwargs.get("enable_mlir_converter", True) quantized_input_stats = kwargs.get("quantized_input_stats", None) for idx, (name, shape) in enumerate(input_arrays_with_shape): input_array = model_flags.input_arrays.add() if _is_quantized_input_stats_required(conversion_flags): if quantized_input_stats: input_array.mean_value, input_array.std_value = ( quantized_input_stats[idx]) else: raise ValueError( "The `quantized_input_stats` flag must be defined when either " "`inference_type` flag or `inference_input_type` flag is set to " "tf.int8 or tf.uint8.") input_array.name = name input_array.shape.dims.extend(list(map(int, shape))) if output_arrays: for name in output_arrays: model_flags.output_arrays.append(name) if control_output_arrays: for name in control_output_arrays: model_flags.control_output_arrays.append(name) data = convert( model_flags.SerializeToString(), conversion_flags.SerializeToString(), input_data.SerializeToString(), debug_info_str=None, enable_mlir_converter=enable_mlir_converter) return data
[ "def", "convert_graphdef_with_arrays", "(", "input_data", ",", "input_arrays_with_shape", ",", "output_arrays", ",", "control_output_arrays", ",", "*", "*", "kwargs", ")", ":", "model_flags", "=", "build_model_flags", "(", "*", "*", "kwargs", ")", "conversion_flags", "=", "build_conversion_flags", "(", "*", "*", "kwargs", ")", "enable_mlir_converter", "=", "kwargs", ".", "get", "(", "\"enable_mlir_converter\"", ",", "True", ")", "quantized_input_stats", "=", "kwargs", ".", "get", "(", "\"quantized_input_stats\"", ",", "None", ")", "for", "idx", ",", "(", "name", ",", "shape", ")", "in", "enumerate", "(", "input_arrays_with_shape", ")", ":", "input_array", "=", "model_flags", ".", "input_arrays", ".", "add", "(", ")", "if", "_is_quantized_input_stats_required", "(", "conversion_flags", ")", ":", "if", "quantized_input_stats", ":", "input_array", ".", "mean_value", ",", "input_array", ".", "std_value", "=", "(", "quantized_input_stats", "[", "idx", "]", ")", "else", ":", "raise", "ValueError", "(", "\"The `quantized_input_stats` flag must be defined when either \"", "\"`inference_type` flag or `inference_input_type` flag is set to \"", "\"tf.int8 or tf.uint8.\"", ")", "input_array", ".", "name", "=", "name", "input_array", ".", "shape", ".", "dims", ".", "extend", "(", "list", "(", "map", "(", "int", ",", "shape", ")", ")", ")", "if", "output_arrays", ":", "for", "name", "in", "output_arrays", ":", "model_flags", ".", "output_arrays", ".", "append", "(", "name", ")", "if", "control_output_arrays", ":", "for", "name", "in", "control_output_arrays", ":", "model_flags", ".", "control_output_arrays", ".", "append", "(", "name", ")", "data", "=", "convert", "(", "model_flags", ".", "SerializeToString", "(", ")", ",", "conversion_flags", ".", "SerializeToString", "(", ")", ",", "input_data", ".", "SerializeToString", "(", ")", ",", "debug_info_str", "=", "None", ",", "enable_mlir_converter", "=", "enable_mlir_converter", ")", "return", "data" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/lite/python/convert.py#L635-L697
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_internal/index/package_finder.py
python
CandidateEvaluator.__init__
( self, project_name, # type: str supported_tags, # type: List[Tag] specifier, # type: specifiers.BaseSpecifier prefer_binary=False, # type: bool allow_all_prereleases=False, # type: bool hashes=None, # type: Optional[Hashes] )
:param supported_tags: The PEP 425 tags supported by the target Python in order of preference (most preferred first).
:param supported_tags: The PEP 425 tags supported by the target Python in order of preference (most preferred first).
[ ":", "param", "supported_tags", ":", "The", "PEP", "425", "tags", "supported", "by", "the", "target", "Python", "in", "order", "of", "preference", "(", "most", "preferred", "first", ")", "." ]
def __init__( self, project_name, # type: str supported_tags, # type: List[Tag] specifier, # type: specifiers.BaseSpecifier prefer_binary=False, # type: bool allow_all_prereleases=False, # type: bool hashes=None, # type: Optional[Hashes] ): # type: (...) -> None """ :param supported_tags: The PEP 425 tags supported by the target Python in order of preference (most preferred first). """ self._allow_all_prereleases = allow_all_prereleases self._hashes = hashes self._prefer_binary = prefer_binary self._project_name = project_name self._specifier = specifier self._supported_tags = supported_tags
[ "def", "__init__", "(", "self", ",", "project_name", ",", "# type: str", "supported_tags", ",", "# type: List[Tag]", "specifier", ",", "# type: specifiers.BaseSpecifier", "prefer_binary", "=", "False", ",", "# type: bool", "allow_all_prereleases", "=", "False", ",", "# type: bool", "hashes", "=", "None", ",", "# type: Optional[Hashes]", ")", ":", "# type: (...) -> None", "self", ".", "_allow_all_prereleases", "=", "allow_all_prereleases", "self", ".", "_hashes", "=", "hashes", "self", ".", "_prefer_binary", "=", "prefer_binary", "self", ".", "_project_name", "=", "project_name", "self", ".", "_specifier", "=", "specifier", "self", ".", "_supported_tags", "=", "supported_tags" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_internal/index/package_finder.py#L423-L442
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/propgrid.py
python
PropertyGridPage.GetStatePtr
(*args)
return _propgrid.PropertyGridPage_GetStatePtr(*args)
GetStatePtr(self) GetStatePtr(self)
GetStatePtr(self) GetStatePtr(self)
[ "GetStatePtr", "(", "self", ")", "GetStatePtr", "(", "self", ")" ]
def GetStatePtr(*args): """ GetStatePtr(self) GetStatePtr(self) """ return _propgrid.PropertyGridPage_GetStatePtr(*args)
[ "def", "GetStatePtr", "(", "*", "args", ")", ":", "return", "_propgrid", ".", "PropertyGridPage_GetStatePtr", "(", "*", "args", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/propgrid.py#L3372-L3377
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/telemetry/telemetry/internal/platform/android_device.py
python
FindAllAvailableDevices
(options)
return devices
Returns a list of available devices.
Returns a list of available devices.
[ "Returns", "a", "list", "of", "available", "devices", "." ]
def FindAllAvailableDevices(options): """Returns a list of available devices. """ # Disable Android device discovery when remote testing a CrOS device if options.cros_remote: return [] android_platform_options = options.remote_platform_options devices = [] try: if CanDiscoverDevices(): blacklist = None if android_platform_options.android_blacklist_file: blacklist = device_blacklist.Blacklist( android_platform_options.android_blacklist_file) devices = AndroidDevice.GetAllConnectedDevices(blacklist) finally: if not devices and _HasValidAdb(): try: adb_wrapper.AdbWrapper.KillServer() except device_errors.NoAdbError as e: logging.warning( 'adb reported as present, but NoAdbError thrown: %s', str(e)) return devices
[ "def", "FindAllAvailableDevices", "(", "options", ")", ":", "# Disable Android device discovery when remote testing a CrOS device", "if", "options", ".", "cros_remote", ":", "return", "[", "]", "android_platform_options", "=", "options", ".", "remote_platform_options", "devices", "=", "[", "]", "try", ":", "if", "CanDiscoverDevices", "(", ")", ":", "blacklist", "=", "None", "if", "android_platform_options", ".", "android_blacklist_file", ":", "blacklist", "=", "device_blacklist", ".", "Blacklist", "(", "android_platform_options", ".", "android_blacklist_file", ")", "devices", "=", "AndroidDevice", ".", "GetAllConnectedDevices", "(", "blacklist", ")", "finally", ":", "if", "not", "devices", "and", "_HasValidAdb", "(", ")", ":", "try", ":", "adb_wrapper", ".", "AdbWrapper", ".", "KillServer", "(", ")", "except", "device_errors", ".", "NoAdbError", "as", "e", ":", "logging", ".", "warning", "(", "'adb reported as present, but NoAdbError thrown: %s'", ",", "str", "(", "e", ")", ")", "return", "devices" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/telemetry/internal/platform/android_device.py#L164-L188
baidu/AnyQ
d94d450d2aaa5f7ed73424b10aa4539835b97527
tools/simnet/train/paddle/layers/paddle_layers.py
python
ReduceMeanLayer.__init__
(self)
initialize
initialize
[ "initialize" ]
def __init__(self): """ initialize """ pass
[ "def", "__init__", "(", "self", ")", ":", "pass" ]
https://github.com/baidu/AnyQ/blob/d94d450d2aaa5f7ed73424b10aa4539835b97527/tools/simnet/train/paddle/layers/paddle_layers.py#L216-L220
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/decomposition/incremental_pca.py
python
IncrementalPCA.fit
(self, X, y=None)
return self
Fit the model with X, using minibatches of size batch_size. Parameters ---------- X: array-like, shape (n_samples, n_features) Training data, where n_samples is the number of samples and n_features is the number of features. y: Passthrough for ``Pipeline`` compatibility. Returns ------- self: object Returns the instance itself.
Fit the model with X, using minibatches of size batch_size.
[ "Fit", "the", "model", "with", "X", "using", "minibatches", "of", "size", "batch_size", "." ]
def fit(self, X, y=None): """Fit the model with X, using minibatches of size batch_size. Parameters ---------- X: array-like, shape (n_samples, n_features) Training data, where n_samples is the number of samples and n_features is the number of features. y: Passthrough for ``Pipeline`` compatibility. Returns ------- self: object Returns the instance itself. """ self.components_ = None self.n_samples_seen_ = 0 self.mean_ = .0 self.var_ = .0 self.singular_values_ = None self.explained_variance_ = None self.explained_variance_ratio_ = None self.noise_variance_ = None X = check_array(X, copy=self.copy, dtype=[np.float64, np.float32]) n_samples, n_features = X.shape if self.batch_size is None: self.batch_size_ = 5 * n_features else: self.batch_size_ = self.batch_size for batch in gen_batches(n_samples, self.batch_size_): self.partial_fit(X[batch], check_input=False) return self
[ "def", "fit", "(", "self", ",", "X", ",", "y", "=", "None", ")", ":", "self", ".", "components_", "=", "None", "self", ".", "n_samples_seen_", "=", "0", "self", ".", "mean_", "=", ".0", "self", ".", "var_", "=", ".0", "self", ".", "singular_values_", "=", "None", "self", ".", "explained_variance_", "=", "None", "self", ".", "explained_variance_ratio_", "=", "None", "self", ".", "noise_variance_", "=", "None", "X", "=", "check_array", "(", "X", ",", "copy", "=", "self", ".", "copy", ",", "dtype", "=", "[", "np", ".", "float64", ",", "np", ".", "float32", "]", ")", "n_samples", ",", "n_features", "=", "X", ".", "shape", "if", "self", ".", "batch_size", "is", "None", ":", "self", ".", "batch_size_", "=", "5", "*", "n_features", "else", ":", "self", ".", "batch_size_", "=", "self", ".", "batch_size", "for", "batch", "in", "gen_batches", "(", "n_samples", ",", "self", ".", "batch_size_", ")", ":", "self", ".", "partial_fit", "(", "X", "[", "batch", "]", ",", "check_input", "=", "False", ")", "return", "self" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/decomposition/incremental_pca.py#L146-L182
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/io/matlab/mio.py
python
savemat
(file_name, mdict, appendmat=True, format='5', long_field_names=False, do_compression=False, oned_as='row')
Save a dictionary of names and arrays into a MATLAB-style .mat file. This saves the array objects in the given dictionary to a MATLAB- style .mat file. Parameters ---------- file_name : str or file-like object Name of the .mat file (.mat extension not needed if ``appendmat == True``). Can also pass open file_like object. mdict : dict Dictionary from which to save matfile variables. appendmat : bool, optional True (the default) to append the .mat extension to the end of the given filename, if not already present. format : {'5', '4'}, string, optional '5' (the default) for MATLAB 5 and up (to 7.2), '4' for MATLAB 4 .mat files. long_field_names : bool, optional False (the default) - maximum field name length in a structure is 31 characters which is the documented maximum length. True - maximum field name length in a structure is 63 characters which works for MATLAB 7.6+. do_compression : bool, optional Whether or not to compress matrices on write. Default is False. oned_as : {'row', 'column'}, optional If 'column', write 1-D numpy arrays as column vectors. If 'row', write 1-D numpy arrays as row vectors. See also -------- mio4.MatFile4Writer mio5.MatFile5Writer
Save a dictionary of names and arrays into a MATLAB-style .mat file.
[ "Save", "a", "dictionary", "of", "names", "and", "arrays", "into", "a", "MATLAB", "-", "style", ".", "mat", "file", "." ]
def savemat(file_name, mdict, appendmat=True, format='5', long_field_names=False, do_compression=False, oned_as='row'): """ Save a dictionary of names and arrays into a MATLAB-style .mat file. This saves the array objects in the given dictionary to a MATLAB- style .mat file. Parameters ---------- file_name : str or file-like object Name of the .mat file (.mat extension not needed if ``appendmat == True``). Can also pass open file_like object. mdict : dict Dictionary from which to save matfile variables. appendmat : bool, optional True (the default) to append the .mat extension to the end of the given filename, if not already present. format : {'5', '4'}, string, optional '5' (the default) for MATLAB 5 and up (to 7.2), '4' for MATLAB 4 .mat files. long_field_names : bool, optional False (the default) - maximum field name length in a structure is 31 characters which is the documented maximum length. True - maximum field name length in a structure is 63 characters which works for MATLAB 7.6+. do_compression : bool, optional Whether or not to compress matrices on write. Default is False. oned_as : {'row', 'column'}, optional If 'column', write 1-D numpy arrays as column vectors. If 'row', write 1-D numpy arrays as row vectors. See also -------- mio4.MatFile4Writer mio5.MatFile5Writer """ file_opened = False if hasattr(file_name, 'write'): # File-like object already; use as-is file_stream = file_name else: if isinstance(file_name, string_types): if appendmat and not file_name.endswith('.mat'): file_name = file_name + ".mat" file_stream = open(file_name, 'wb') file_opened = True if format == '4': if long_field_names: raise ValueError("Long field names are not available for version 4 files") MW = MatFile4Writer(file_stream, oned_as) elif format == '5': MW = MatFile5Writer(file_stream, do_compression=do_compression, unicode_strings=True, long_field_names=long_field_names, oned_as=oned_as) else: raise ValueError("Format should be '4' or '5'") MW.put_variables(mdict) if file_opened: file_stream.close()
[ "def", "savemat", "(", "file_name", ",", "mdict", ",", "appendmat", "=", "True", ",", "format", "=", "'5'", ",", "long_field_names", "=", "False", ",", "do_compression", "=", "False", ",", "oned_as", "=", "'row'", ")", ":", "file_opened", "=", "False", "if", "hasattr", "(", "file_name", ",", "'write'", ")", ":", "# File-like object already; use as-is", "file_stream", "=", "file_name", "else", ":", "if", "isinstance", "(", "file_name", ",", "string_types", ")", ":", "if", "appendmat", "and", "not", "file_name", ".", "endswith", "(", "'.mat'", ")", ":", "file_name", "=", "file_name", "+", "\".mat\"", "file_stream", "=", "open", "(", "file_name", ",", "'wb'", ")", "file_opened", "=", "True", "if", "format", "==", "'4'", ":", "if", "long_field_names", ":", "raise", "ValueError", "(", "\"Long field names are not available for version 4 files\"", ")", "MW", "=", "MatFile4Writer", "(", "file_stream", ",", "oned_as", ")", "elif", "format", "==", "'5'", ":", "MW", "=", "MatFile5Writer", "(", "file_stream", ",", "do_compression", "=", "do_compression", ",", "unicode_strings", "=", "True", ",", "long_field_names", "=", "long_field_names", ",", "oned_as", "=", "oned_as", ")", "else", ":", "raise", "ValueError", "(", "\"Format should be '4' or '5'\"", ")", "MW", ".", "put_variables", "(", "mdict", ")", "if", "file_opened", ":", "file_stream", ".", "close", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/io/matlab/mio.py#L219-L287
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/configdialog.py
python
ConfigDialog.create_action_buttons
(self)
return outer
Return frame of action buttons for dialog. Methods: ok apply cancel help Widget Structure: outer: Frame buttons: Frame (no assignment): Button (ok) (no assignment): Button (apply) (no assignment): Button (cancel) (no assignment): Button (help) (no assignment): Frame
Return frame of action buttons for dialog.
[ "Return", "frame", "of", "action", "buttons", "for", "dialog", "." ]
def create_action_buttons(self): """Return frame of action buttons for dialog. Methods: ok apply cancel help Widget Structure: outer: Frame buttons: Frame (no assignment): Button (ok) (no assignment): Button (apply) (no assignment): Button (cancel) (no assignment): Button (help) (no assignment): Frame """ if macosx.isAquaTk(): # Changing the default padding on OSX results in unreadable # text in the buttons. padding_args = {} else: padding_args = {'padding': (6, 3)} outer = Frame(self, padding=2) buttons_frame = Frame(outer, padding=2) self.buttons = {} for txt, cmd in ( ('Ok', self.ok), ('Apply', self.apply), ('Cancel', self.cancel), ('Help', self.help)): self.buttons[txt] = Button(buttons_frame, text=txt, command=cmd, takefocus=FALSE, **padding_args) self.buttons[txt].pack(side=LEFT, padx=5) # Add space above buttons. Frame(outer, height=2, borderwidth=0).pack(side=TOP) buttons_frame.pack(side=BOTTOM) return outer
[ "def", "create_action_buttons", "(", "self", ")", ":", "if", "macosx", ".", "isAquaTk", "(", ")", ":", "# Changing the default padding on OSX results in unreadable", "# text in the buttons.", "padding_args", "=", "{", "}", "else", ":", "padding_args", "=", "{", "'padding'", ":", "(", "6", ",", "3", ")", "}", "outer", "=", "Frame", "(", "self", ",", "padding", "=", "2", ")", "buttons_frame", "=", "Frame", "(", "outer", ",", "padding", "=", "2", ")", "self", ".", "buttons", "=", "{", "}", "for", "txt", ",", "cmd", "in", "(", "(", "'Ok'", ",", "self", ".", "ok", ")", ",", "(", "'Apply'", ",", "self", ".", "apply", ")", ",", "(", "'Cancel'", ",", "self", ".", "cancel", ")", ",", "(", "'Help'", ",", "self", ".", "help", ")", ")", ":", "self", ".", "buttons", "[", "txt", "]", "=", "Button", "(", "buttons_frame", ",", "text", "=", "txt", ",", "command", "=", "cmd", ",", "takefocus", "=", "FALSE", ",", "*", "*", "padding_args", ")", "self", ".", "buttons", "[", "txt", "]", ".", "pack", "(", "side", "=", "LEFT", ",", "padx", "=", "5", ")", "# Add space above buttons.", "Frame", "(", "outer", ",", "height", "=", "2", ",", "borderwidth", "=", "0", ")", ".", "pack", "(", "side", "=", "TOP", ")", "buttons_frame", ".", "pack", "(", "side", "=", "BOTTOM", ")", "return", "outer" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/configdialog.py#L127-L165
google/earthenterprise
0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9
earth_enterprise/src/fusion/portableglobe/servers/search_services/ge_base_search.py
python
GEBaseSearch.JsonStart
(self, handler, cb, datastoreName, search_term)
Writes initial header for json results.
Writes initial header for json results.
[ "Writes", "initial", "header", "for", "json", "results", "." ]
def JsonStart(self, handler, cb, datastoreName, search_term): """Writes initial header for json results.""" handler.write(self.json_start_template_ % (cb, datastoreName, search_term))
[ "def", "JsonStart", "(", "self", ",", "handler", ",", "cb", ",", "datastoreName", ",", "search_term", ")", ":", "handler", ".", "write", "(", "self", ".", "json_start_template_", "%", "(", "cb", ",", "datastoreName", ",", "search_term", ")", ")" ]
https://github.com/google/earthenterprise/blob/0fe84e29be470cd857e3a0e52e5d0afd5bb8cee9/earth_enterprise/src/fusion/portableglobe/servers/search_services/ge_base_search.py#L131-L133
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/turtle.py
python
RawTurtle._undo
(self, action, data)
Does the main part of the work for undo()
Does the main part of the work for undo()
[ "Does", "the", "main", "part", "of", "the", "work", "for", "undo", "()" ]
def _undo(self, action, data): """Does the main part of the work for undo() """ if self.undobuffer is None: return if action == "rot": angle, degPAU = data self._rotate(-angle*degPAU/self._degreesPerAU) dummy = self.undobuffer.pop() elif action == "stamp": stitem = data[0] self.clearstamp(stitem) elif action == "go": self._undogoto(data) elif action in ["wri", "dot"]: item = data[0] self.screen._delete(item) self.items.remove(item) elif action == "dofill": item = data[0] self.screen._drawpoly(item, ((0, 0),(0, 0),(0, 0)), fill="", outline="") elif action == "beginfill": item = data[0] self._fillitem = self._fillpath = None if item in self.items: self.screen._delete(item) self.items.remove(item) elif action == "pen": TPen.pen(self, data[0]) self.undobuffer.pop()
[ "def", "_undo", "(", "self", ",", "action", ",", "data", ")", ":", "if", "self", ".", "undobuffer", "is", "None", ":", "return", "if", "action", "==", "\"rot\"", ":", "angle", ",", "degPAU", "=", "data", "self", ".", "_rotate", "(", "-", "angle", "*", "degPAU", "/", "self", ".", "_degreesPerAU", ")", "dummy", "=", "self", ".", "undobuffer", ".", "pop", "(", ")", "elif", "action", "==", "\"stamp\"", ":", "stitem", "=", "data", "[", "0", "]", "self", ".", "clearstamp", "(", "stitem", ")", "elif", "action", "==", "\"go\"", ":", "self", ".", "_undogoto", "(", "data", ")", "elif", "action", "in", "[", "\"wri\"", ",", "\"dot\"", "]", ":", "item", "=", "data", "[", "0", "]", "self", ".", "screen", ".", "_delete", "(", "item", ")", "self", ".", "items", ".", "remove", "(", "item", ")", "elif", "action", "==", "\"dofill\"", ":", "item", "=", "data", "[", "0", "]", "self", ".", "screen", ".", "_drawpoly", "(", "item", ",", "(", "(", "0", ",", "0", ")", ",", "(", "0", ",", "0", ")", ",", "(", "0", ",", "0", ")", ")", ",", "fill", "=", "\"\"", ",", "outline", "=", "\"\"", ")", "elif", "action", "==", "\"beginfill\"", ":", "item", "=", "data", "[", "0", "]", "self", ".", "_fillitem", "=", "self", ".", "_fillpath", "=", "None", "if", "item", "in", "self", ".", "items", ":", "self", ".", "screen", ".", "_delete", "(", "item", ")", "self", ".", "items", ".", "remove", "(", "item", ")", "elif", "action", "==", "\"pen\"", ":", "TPen", ".", "pen", "(", "self", ",", "data", "[", "0", "]", ")", "self", ".", "undobuffer", ".", "pop", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/turtle.py#L3592-L3622
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/general_fitting/general_fitting_view.py
python
GeneralFittingView.switch_to_simultaneous
(self)
Switches the view to simultaneous fit mode.
Switches the view to simultaneous fit mode.
[ "Switches", "the", "view", "to", "simultaneous", "fit", "mode", "." ]
def switch_to_simultaneous(self) -> None: """Switches the view to simultaneous fit mode.""" super().switch_to_simultaneous() self.set_workspace_combo_box_label(SIMULTANEOUS_FIT_LABEL) self.general_fitting_options.enable_simultaneous_fit_options()
[ "def", "switch_to_simultaneous", "(", "self", ")", "->", "None", ":", "super", "(", ")", ".", "switch_to_simultaneous", "(", ")", "self", ".", "set_workspace_combo_box_label", "(", "SIMULTANEOUS_FIT_LABEL", ")", "self", ".", "general_fitting_options", ".", "enable_simultaneous_fit_options", "(", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/general_fitting/general_fitting_view.py#L70-L74
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
tools/mo/openvino/tools/mo/ops/loop.py
python
Loop.updated_loop_output_ports_shape_and_value
(loop_node: Node)
Update shape and values for Loop output ports. If the number of iterations is dynamic then the corresponding dimension for the scan outputs (having "axis" attribute) are set to 1 because MO cannot generate IR with undefined dimensions. :param loop_node: The Loop node :return: None
Update shape and values for Loop output ports. If the number of iterations is dynamic then the corresponding dimension for the scan outputs (having "axis" attribute) are set to 1 because MO cannot generate IR with undefined dimensions.
[ "Update", "shape", "and", "values", "for", "Loop", "output", "ports", ".", "If", "the", "number", "of", "iterations", "is", "dynamic", "then", "the", "corresponding", "dimension", "for", "the", "scan", "outputs", "(", "having", "axis", "attribute", ")", "are", "set", "to", "1", "because", "MO", "cannot", "generate", "IR", "with", "undefined", "dimensions", "." ]
def updated_loop_output_ports_shape_and_value(loop_node: Node): """ Update shape and values for Loop output ports. If the number of iterations is dynamic then the corresponding dimension for the scan outputs (having "axis" attribute) are set to 1 because MO cannot generate IR with undefined dimensions. :param loop_node: The Loop node :return: None """ loop_name = loop_node.soft_get('name', loop_node.id) for record in loop_node.output_port_map: body_node = Loop.get_body_node_by_internal_id(loop_node, record['internal_layer_id']) assert body_node is not None assert body_node.soft_get('type') == 'Result' loop_port_idx = record['external_port_id'] if loop_port_idx != -1: # the id = -1 for execution condition output which is not connected anywhere output_value = body_node.in_port(0).data.get_value() output_shape = body_node.in_port(0).data.get_shape().copy() concat_axis = record['axis'] if concat_axis is not None: assert output_shape[concat_axis] == 1, 'Dimension for concatenation is not equal to 1 for scan ' \ 'output for Loop node "{}" for loop output port "{}"'.\ format(loop_name, loop_port_idx) output_shape[concat_axis] = Loop.iterations_count(loop_node) # MO does not support evaluation of Loop scan outputs with const values if concat_axis is None and output_value is not None: loop_node.out_port(loop_port_idx).data.set_value(output_value) else: loop_node.out_port(loop_port_idx).data.set_shape(output_shape)
[ "def", "updated_loop_output_ports_shape_and_value", "(", "loop_node", ":", "Node", ")", ":", "loop_name", "=", "loop_node", ".", "soft_get", "(", "'name'", ",", "loop_node", ".", "id", ")", "for", "record", "in", "loop_node", ".", "output_port_map", ":", "body_node", "=", "Loop", ".", "get_body_node_by_internal_id", "(", "loop_node", ",", "record", "[", "'internal_layer_id'", "]", ")", "assert", "body_node", "is", "not", "None", "assert", "body_node", ".", "soft_get", "(", "'type'", ")", "==", "'Result'", "loop_port_idx", "=", "record", "[", "'external_port_id'", "]", "if", "loop_port_idx", "!=", "-", "1", ":", "# the id = -1 for execution condition output which is not connected anywhere", "output_value", "=", "body_node", ".", "in_port", "(", "0", ")", ".", "data", ".", "get_value", "(", ")", "output_shape", "=", "body_node", ".", "in_port", "(", "0", ")", ".", "data", ".", "get_shape", "(", ")", ".", "copy", "(", ")", "concat_axis", "=", "record", "[", "'axis'", "]", "if", "concat_axis", "is", "not", "None", ":", "assert", "output_shape", "[", "concat_axis", "]", "==", "1", ",", "'Dimension for concatenation is not equal to 1 for scan '", "'output for Loop node \"{}\" for loop output port \"{}\"'", ".", "format", "(", "loop_name", ",", "loop_port_idx", ")", "output_shape", "[", "concat_axis", "]", "=", "Loop", ".", "iterations_count", "(", "loop_node", ")", "# MO does not support evaluation of Loop scan outputs with const values", "if", "concat_axis", "is", "None", "and", "output_value", "is", "not", "None", ":", "loop_node", ".", "out_port", "(", "loop_port_idx", ")", ".", "data", ".", "set_value", "(", "output_value", ")", "else", ":", "loop_node", ".", "out_port", "(", "loop_port_idx", ")", ".", "data", ".", "set_shape", "(", "output_shape", ")" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/ops/loop.py#L114-L143
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
Framework/PythonInterface/plugins/algorithms/LRPeakSelection.py
python
PeakFinderDerivation.initArrays
(self)
Initialize internal data members
Initialize internal data members
[ "Initialize", "internal", "data", "members" ]
def initArrays(self): """ Initialize internal data members """ self.xdata_firstderi = [] self.ydata_firstderi = [] self.peak = [-1, -1] self.low_res = [-1, -1] self.five_highest_ydata = [] self.five_highest_xdata = [] self.sum_five_highest_ydata = -1 self.peak_pixel = -1 self.deri_min = 1 self.deri_max = -1 self.deri_min_pixel_value = -1 self.deri_max_pixel_value = -1 self.mean_counts_firstderi = -1 self.std_deviation_counts_firstderi = -1 self.peak_max_final_value = -1 self.peak_min_final_value = -1
[ "def", "initArrays", "(", "self", ")", ":", "self", ".", "xdata_firstderi", "=", "[", "]", "self", ".", "ydata_firstderi", "=", "[", "]", "self", ".", "peak", "=", "[", "-", "1", ",", "-", "1", "]", "self", ".", "low_res", "=", "[", "-", "1", ",", "-", "1", "]", "self", ".", "five_highest_ydata", "=", "[", "]", "self", ".", "five_highest_xdata", "=", "[", "]", "self", ".", "sum_five_highest_ydata", "=", "-", "1", "self", ".", "peak_pixel", "=", "-", "1", "self", ".", "deri_min", "=", "1", "self", ".", "deri_max", "=", "-", "1", "self", ".", "deri_min_pixel_value", "=", "-", "1", "self", ".", "deri_max_pixel_value", "=", "-", "1", "self", ".", "mean_counts_firstderi", "=", "-", "1", "self", ".", "std_deviation_counts_firstderi", "=", "-", "1", "self", ".", "peak_max_final_value", "=", "-", "1", "self", ".", "peak_min_final_value", "=", "-", "1" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/LRPeakSelection.py#L57-L76
forkineye/ESPixelStick
22926f1c0d1131f1369fc7cad405689a095ae3cb
dist/bin/pyserial/serial/serialutil.py
python
SerialBase.write_timeout
(self, timeout)
Change timeout setting.
Change timeout setting.
[ "Change", "timeout", "setting", "." ]
def write_timeout(self, timeout): """Change timeout setting.""" if timeout is not None: if timeout < 0: raise ValueError("Not a valid timeout: {!r}".format(timeout)) try: timeout + 1 # test if it's a number, will throw a TypeError if not... except TypeError: raise ValueError("Not a valid timeout: {!r}".format(timeout)) self._write_timeout = timeout if self.is_open: self._reconfigure_port()
[ "def", "write_timeout", "(", "self", ",", "timeout", ")", ":", "if", "timeout", "is", "not", "None", ":", "if", "timeout", "<", "0", ":", "raise", "ValueError", "(", "\"Not a valid timeout: {!r}\"", ".", "format", "(", "timeout", ")", ")", "try", ":", "timeout", "+", "1", "# test if it's a number, will throw a TypeError if not...", "except", "TypeError", ":", "raise", "ValueError", "(", "\"Not a valid timeout: {!r}\"", ".", "format", "(", "timeout", ")", ")", "self", ".", "_write_timeout", "=", "timeout", "if", "self", ".", "is_open", ":", "self", ".", "_reconfigure_port", "(", ")" ]
https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/pyserial/serial/serialutil.py#L376-L388
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/special/basic.py
python
yvp
(v, z, n=1)
Compute nth derivative of Bessel function Yv(z) with respect to `z`. Parameters ---------- v : float Order of Bessel function z : complex Argument at which to evaluate the derivative n : int, default 1 Order of derivative Notes ----- The derivative is computed using the relation DLFM 10.6.7 [2]_. References ---------- .. [1] Zhang, Shanjie and Jin, Jianming. "Computation of Special Functions", John Wiley and Sons, 1996, chapter 5. https://people.sc.fsu.edu/~jburkardt/f_src/special_functions/special_functions.html .. [2] NIST Digital Library of Mathematical Functions. https://dlmf.nist.gov/10.6.E7
Compute nth derivative of Bessel function Yv(z) with respect to `z`.
[ "Compute", "nth", "derivative", "of", "Bessel", "function", "Yv", "(", "z", ")", "with", "respect", "to", "z", "." ]
def yvp(v, z, n=1): """Compute nth derivative of Bessel function Yv(z) with respect to `z`. Parameters ---------- v : float Order of Bessel function z : complex Argument at which to evaluate the derivative n : int, default 1 Order of derivative Notes ----- The derivative is computed using the relation DLFM 10.6.7 [2]_. References ---------- .. [1] Zhang, Shanjie and Jin, Jianming. "Computation of Special Functions", John Wiley and Sons, 1996, chapter 5. https://people.sc.fsu.edu/~jburkardt/f_src/special_functions/special_functions.html .. [2] NIST Digital Library of Mathematical Functions. https://dlmf.nist.gov/10.6.E7 """ n = _nonneg_int_or_fail(n, 'n') if n == 0: return yv(v, z) else: return _bessel_diff_formula(v, z, n, yv, -1)
[ "def", "yvp", "(", "v", ",", "z", ",", "n", "=", "1", ")", ":", "n", "=", "_nonneg_int_or_fail", "(", "n", ",", "'n'", ")", "if", "n", "==", "0", ":", "return", "yv", "(", "v", ",", "z", ")", "else", ":", "return", "_bessel_diff_formula", "(", "v", ",", "z", ",", "n", ",", "yv", ",", "-", "1", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/special/basic.py#L468-L497
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/stats/stats.py
python
_square_of_sums
(a, axis=0)
Sum elements of the input array, and return the square(s) of that sum. Parameters ---------- a : array_like Input array. axis : int or None, optional Axis along which to calculate. Default is 0. If None, compute over the whole array `a`. Returns ------- square_of_sums : float or ndarray The square of the sum over `axis`. See also -------- _sum_of_squares : The sum of squares (the opposite of `square_of_sums`).
Sum elements of the input array, and return the square(s) of that sum.
[ "Sum", "elements", "of", "the", "input", "array", "and", "return", "the", "square", "(", "s", ")", "of", "that", "sum", "." ]
def _square_of_sums(a, axis=0): """ Sum elements of the input array, and return the square(s) of that sum. Parameters ---------- a : array_like Input array. axis : int or None, optional Axis along which to calculate. Default is 0. If None, compute over the whole array `a`. Returns ------- square_of_sums : float or ndarray The square of the sum over `axis`. See also -------- _sum_of_squares : The sum of squares (the opposite of `square_of_sums`). """ a, axis = _chk_asarray(a, axis) s = np.sum(a, axis) if not np.isscalar(s): return s.astype(float) * s else: return float(s) * s
[ "def", "_square_of_sums", "(", "a", ",", "axis", "=", "0", ")", ":", "a", ",", "axis", "=", "_chk_asarray", "(", "a", ",", "axis", ")", "s", "=", "np", ".", "sum", "(", "a", ",", "axis", ")", "if", "not", "np", ".", "isscalar", "(", "s", ")", ":", "return", "s", ".", "astype", "(", "float", ")", "*", "s", "else", ":", "return", "float", "(", "s", ")", "*", "s" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/stats/stats.py#L5873-L5899
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/_vendor/pyparsing.py
python
ParserElement.__mul__
(self,other)
return ret
Implementation of * operator, allows use of C{expr * 3} in place of C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples may also include C{None} as in: - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroOrMore}(expr)} (read as "at least n instances of C{expr}") - C{expr*(None,n)} is equivalent to C{expr*(0,n)} (read as "0 to n instances of C{expr}") - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} Note that C{expr*(None,n)} does not raise an exception if more than n exprs exist in the input stream; that is, C{expr*(None,n)} does not enforce a maximum number of expr occurrences. If this behavior is desired, then write C{expr*(None,n) + ~expr}
Implementation of * operator, allows use of C{expr * 3} in place of C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples may also include C{None} as in: - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroOrMore}(expr)} (read as "at least n instances of C{expr}") - C{expr*(None,n)} is equivalent to C{expr*(0,n)} (read as "0 to n instances of C{expr}") - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)}
[ "Implementation", "of", "*", "operator", "allows", "use", "of", "C", "{", "expr", "*", "3", "}", "in", "place", "of", "C", "{", "expr", "+", "expr", "+", "expr", "}", ".", "Expressions", "may", "also", "me", "multiplied", "by", "a", "2", "-", "integer", "tuple", "similar", "to", "C", "{{", "min", "max", "}}", "multipliers", "in", "regular", "expressions", ".", "Tuples", "may", "also", "include", "C", "{", "None", "}", "as", "in", ":", "-", "C", "{", "expr", "*", "(", "n", "None", ")", "}", "or", "C", "{", "expr", "*", "(", "n", ")", "}", "is", "equivalent", "to", "C", "{", "expr", "*", "n", "+", "L", "{", "ZeroOrMore", "}", "(", "expr", ")", "}", "(", "read", "as", "at", "least", "n", "instances", "of", "C", "{", "expr", "}", ")", "-", "C", "{", "expr", "*", "(", "None", "n", ")", "}", "is", "equivalent", "to", "C", "{", "expr", "*", "(", "0", "n", ")", "}", "(", "read", "as", "0", "to", "n", "instances", "of", "C", "{", "expr", "}", ")", "-", "C", "{", "expr", "*", "(", "None", "None", ")", "}", "is", "equivalent", "to", "C", "{", "L", "{", "ZeroOrMore", "}", "(", "expr", ")", "}", "-", "C", "{", "expr", "*", "(", "1", "None", ")", "}", "is", "equivalent", "to", "C", "{", "L", "{", "OneOrMore", "}", "(", "expr", ")", "}" ]
def __mul__(self,other): """ Implementation of * operator, allows use of C{expr * 3} in place of C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples may also include C{None} as in: - C{expr*(n,None)} or C{expr*(n,)} is equivalent to C{expr*n + L{ZeroOrMore}(expr)} (read as "at least n instances of C{expr}") - C{expr*(None,n)} is equivalent to C{expr*(0,n)} (read as "0 to n instances of C{expr}") - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} Note that C{expr*(None,n)} does not raise an exception if more than n exprs exist in the input stream; that is, C{expr*(None,n)} does not enforce a maximum number of expr occurrences. If this behavior is desired, then write C{expr*(None,n) + ~expr} """ if isinstance(other,int): minElements, optElements = other,0 elif isinstance(other,tuple): other = (other + (None, None))[:2] if other[0] is None: other = (0, other[1]) if isinstance(other[0],int) and other[1] is None: if other[0] == 0: return ZeroOrMore(self) if other[0] == 1: return OneOrMore(self) else: return self*other[0] + ZeroOrMore(self) elif isinstance(other[0],int) and isinstance(other[1],int): minElements, optElements = other optElements -= minElements else: raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) else: raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) if minElements < 0: raise ValueError("cannot multiply ParserElement by negative value") if optElements < 0: raise ValueError("second tuple value must be greater or equal to first tuple value") if minElements == optElements == 0: raise ValueError("cannot multiply ParserElement by 0 or (0,0)") if (optElements): def makeOptionalList(n): if n>1: return Optional(self + makeOptionalList(n-1)) else: return Optional(self) if minElements: if minElements == 1: ret = self + makeOptionalList(optElements) else: ret = And([self]*minElements) + makeOptionalList(optElements) else: ret = makeOptionalList(optElements) else: if minElements == 1: ret = self else: ret = And([self]*minElements) return ret
[ "def", "__mul__", "(", "self", ",", "other", ")", ":", "if", "isinstance", "(", "other", ",", "int", ")", ":", "minElements", ",", "optElements", "=", "other", ",", "0", "elif", "isinstance", "(", "other", ",", "tuple", ")", ":", "other", "=", "(", "other", "+", "(", "None", ",", "None", ")", ")", "[", ":", "2", "]", "if", "other", "[", "0", "]", "is", "None", ":", "other", "=", "(", "0", ",", "other", "[", "1", "]", ")", "if", "isinstance", "(", "other", "[", "0", "]", ",", "int", ")", "and", "other", "[", "1", "]", "is", "None", ":", "if", "other", "[", "0", "]", "==", "0", ":", "return", "ZeroOrMore", "(", "self", ")", "if", "other", "[", "0", "]", "==", "1", ":", "return", "OneOrMore", "(", "self", ")", "else", ":", "return", "self", "*", "other", "[", "0", "]", "+", "ZeroOrMore", "(", "self", ")", "elif", "isinstance", "(", "other", "[", "0", "]", ",", "int", ")", "and", "isinstance", "(", "other", "[", "1", "]", ",", "int", ")", ":", "minElements", ",", "optElements", "=", "other", "optElements", "-=", "minElements", "else", ":", "raise", "TypeError", "(", "\"cannot multiply 'ParserElement' and ('%s','%s') objects\"", ",", "type", "(", "other", "[", "0", "]", ")", ",", "type", "(", "other", "[", "1", "]", ")", ")", "else", ":", "raise", "TypeError", "(", "\"cannot multiply 'ParserElement' and '%s' objects\"", ",", "type", "(", "other", ")", ")", "if", "minElements", "<", "0", ":", "raise", "ValueError", "(", "\"cannot multiply ParserElement by negative value\"", ")", "if", "optElements", "<", "0", ":", "raise", "ValueError", "(", "\"second tuple value must be greater or equal to first tuple value\"", ")", "if", "minElements", "==", "optElements", "==", "0", ":", "raise", "ValueError", "(", "\"cannot multiply ParserElement by 0 or (0,0)\"", ")", "if", "(", "optElements", ")", ":", "def", "makeOptionalList", "(", "n", ")", ":", "if", "n", ">", "1", ":", "return", "Optional", "(", "self", "+", "makeOptionalList", "(", "n", "-", "1", ")", ")", "else", ":", "return", "Optional", "(", "self", ")", "if", "minElements", ":", "if", "minElements", "==", "1", ":", "ret", "=", "self", "+", "makeOptionalList", "(", "optElements", ")", "else", ":", "ret", "=", "And", "(", "[", "self", "]", "*", "minElements", ")", "+", "makeOptionalList", "(", "optElements", ")", "else", ":", "ret", "=", "makeOptionalList", "(", "optElements", ")", "else", ":", "if", "minElements", "==", "1", ":", "ret", "=", "self", "else", ":", "ret", "=", "And", "(", "[", "self", "]", "*", "minElements", ")", "return", "ret" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/_vendor/pyparsing.py#L1836-L1902
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/range.py
python
RangeIndex._extended_gcd
(self, a, b)
return old_r, old_s, old_t
Extended Euclidean algorithms to solve Bezout's identity: a*x + b*y = gcd(x, y) Finds one particular solution for x, y: s, t Returns: gcd, s, t
Extended Euclidean algorithms to solve Bezout's identity: a*x + b*y = gcd(x, y) Finds one particular solution for x, y: s, t Returns: gcd, s, t
[ "Extended", "Euclidean", "algorithms", "to", "solve", "Bezout", "s", "identity", ":", "a", "*", "x", "+", "b", "*", "y", "=", "gcd", "(", "x", "y", ")", "Finds", "one", "particular", "solution", "for", "x", "y", ":", "s", "t", "Returns", ":", "gcd", "s", "t" ]
def _extended_gcd(self, a, b): """ Extended Euclidean algorithms to solve Bezout's identity: a*x + b*y = gcd(x, y) Finds one particular solution for x, y: s, t Returns: gcd, s, t """ s, old_s = 0, 1 t, old_t = 1, 0 r, old_r = b, a while r: quotient = old_r // r old_r, r = r, old_r - quotient * r old_s, s = s, old_s - quotient * s old_t, t = t, old_t - quotient * t return old_r, old_s, old_t
[ "def", "_extended_gcd", "(", "self", ",", "a", ",", "b", ")", ":", "s", ",", "old_s", "=", "0", ",", "1", "t", ",", "old_t", "=", "1", ",", "0", "r", ",", "old_r", "=", "b", ",", "a", "while", "r", ":", "quotient", "=", "old_r", "//", "r", "old_r", ",", "r", "=", "r", ",", "old_r", "-", "quotient", "*", "r", "old_s", ",", "s", "=", "s", ",", "old_s", "-", "quotient", "*", "s", "old_t", ",", "t", "=", "t", ",", "old_t", "-", "quotient", "*", "t", "return", "old_r", ",", "old_s", ",", "old_t" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/indexes/range.py#L532-L547
klzgrad/naiveproxy
ed2c513637c77b18721fe428d7ed395b4d284c83
src/tools/grit/grit/util.py
python
EncodeCdata
(cdata)
Returns the provided cdata in either escaped format or <![CDATA[xxx]]> format, depending on which is more appropriate for easy editing. The data is escaped for inclusion in an XML element's body. Args: cdata: 'If x < y and y < z then x < z' Return: '<![CDATA[If x < y and y < z then x < z]]>'
Returns the provided cdata in either escaped format or <![CDATA[xxx]]> format, depending on which is more appropriate for easy editing. The data is escaped for inclusion in an XML element's body.
[ "Returns", "the", "provided", "cdata", "in", "either", "escaped", "format", "or", "<!", "[", "CDATA", "[", "xxx", "]]", ">", "format", "depending", "on", "which", "is", "more", "appropriate", "for", "easy", "editing", ".", "The", "data", "is", "escaped", "for", "inclusion", "in", "an", "XML", "element", "s", "body", "." ]
def EncodeCdata(cdata): '''Returns the provided cdata in either escaped format or <![CDATA[xxx]]> format, depending on which is more appropriate for easy editing. The data is escaped for inclusion in an XML element's body. Args: cdata: 'If x < y and y < z then x < z' Return: '<![CDATA[If x < y and y < z then x < z]]>' ''' if cdata.count('<') > 1 or cdata.count('>') > 1 and cdata.count(']]>') == 0: return '<![CDATA[%s]]>' % cdata else: return saxutils.escape(cdata)
[ "def", "EncodeCdata", "(", "cdata", ")", ":", "if", "cdata", ".", "count", "(", "'<'", ")", ">", "1", "or", "cdata", ".", "count", "(", "'>'", ")", ">", "1", "and", "cdata", ".", "count", "(", "']]>'", ")", "==", "0", ":", "return", "'<![CDATA[%s]]>'", "%", "cdata", "else", ":", "return", "saxutils", ".", "escape", "(", "cdata", ")" ]
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/tools/grit/grit/util.py#L290-L304
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/maximum-product-of-the-length-of-two-palindromic-subsequences.py
python
Solution.maxProduct
(self, s)
return result
:type s: str :rtype: int
:type s: str :rtype: int
[ ":", "type", "s", ":", "str", ":", "rtype", ":", "int" ]
def maxProduct(self, s): """ :type s: str :rtype: int """ def palindromic_subsequence_length(s, mask): result = 0 left, right = 0, len(s)-1 left_bit, right_bit = 1<<left, 1<<right while left <= right: if mask&left_bit == 0: left, left_bit = left+1, left_bit<<1 elif mask&right_bit == 0: right, right_bit = right-1, right_bit>>1 elif s[left] == s[right]: result += 1 if left == right else 2 left, left_bit = left+1, left_bit<<1 right, right_bit = right-1, right_bit>>1 else: return 0 return result dp = [palindromic_subsequence_length(s, mask) for mask in xrange(1<<len(s))] result = 0 for mask in xrange(len(dp)): if dp[mask]*(len(s)-dp[mask]) <= result: # optimize continue # submask enumeration: # => sum(nCr(n, k) * 2^k for k in xrange(n+1)) = (1 + 2)^n = 3^n # => Time: O(3^n), see https://cp-algorithms.com/algebra/all-submasks.html submask = inverse_mask = (len(dp)-1)^mask while submask: result = max(result, dp[mask]*dp[submask]) submask = (submask-1)&inverse_mask return result
[ "def", "maxProduct", "(", "self", ",", "s", ")", ":", "def", "palindromic_subsequence_length", "(", "s", ",", "mask", ")", ":", "result", "=", "0", "left", ",", "right", "=", "0", ",", "len", "(", "s", ")", "-", "1", "left_bit", ",", "right_bit", "=", "1", "<<", "left", ",", "1", "<<", "right", "while", "left", "<=", "right", ":", "if", "mask", "&", "left_bit", "==", "0", ":", "left", ",", "left_bit", "=", "left", "+", "1", ",", "left_bit", "<<", "1", "elif", "mask", "&", "right_bit", "==", "0", ":", "right", ",", "right_bit", "=", "right", "-", "1", ",", "right_bit", ">>", "1", "elif", "s", "[", "left", "]", "==", "s", "[", "right", "]", ":", "result", "+=", "1", "if", "left", "==", "right", "else", "2", "left", ",", "left_bit", "=", "left", "+", "1", ",", "left_bit", "<<", "1", "right", ",", "right_bit", "=", "right", "-", "1", ",", "right_bit", ">>", "1", "else", ":", "return", "0", "return", "result", "dp", "=", "[", "palindromic_subsequence_length", "(", "s", ",", "mask", ")", "for", "mask", "in", "xrange", "(", "1", "<<", "len", "(", "s", ")", ")", "]", "result", "=", "0", "for", "mask", "in", "xrange", "(", "len", "(", "dp", ")", ")", ":", "if", "dp", "[", "mask", "]", "*", "(", "len", "(", "s", ")", "-", "dp", "[", "mask", "]", ")", "<=", "result", ":", "# optimize", "continue", "# submask enumeration:", "# => sum(nCr(n, k) * 2^k for k in xrange(n+1)) = (1 + 2)^n = 3^n", "# => Time: O(3^n), see https://cp-algorithms.com/algebra/all-submasks.html", "submask", "=", "inverse_mask", "=", "(", "len", "(", "dp", ")", "-", "1", ")", "^", "mask", "while", "submask", ":", "result", "=", "max", "(", "result", ",", "dp", "[", "mask", "]", "*", "dp", "[", "submask", "]", ")", "submask", "=", "(", "submask", "-", "1", ")", "&", "inverse_mask", "return", "result" ]
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/maximum-product-of-the-length-of-two-palindromic-subsequences.py#L5-L39
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/unused-symbols-report.py
python
Parse
(input, skip_paths=None, only_paths=None)
Parse the --print-gc-sections build output. Args: input: iterable over the lines of the build output Yields: (target name, path to .o file, demangled symbol)
Parse the --print-gc-sections build output.
[ "Parse", "the", "--", "print", "-", "gc", "-", "sections", "build", "output", "." ]
def Parse(input, skip_paths=None, only_paths=None): """Parse the --print-gc-sections build output. Args: input: iterable over the lines of the build output Yields: (target name, path to .o file, demangled symbol) """ symbol_re = re.compile(r"'\.text\.(\S+)' in file '(\S+)'$") path_re = re.compile(r"^out/[^/]+/[^/]+/([^/]+)/(.*)$") for line in input: match = symbol_re.search(line) if not match: continue symbol, path = match.groups() symbol = Unyuck(Demangle(symbol)) path = os.path.normpath(path) if skip_paths and skip_paths in path: continue if only_paths and only_paths not in path: continue match = path_re.match(path) if not match: print >>sys.stderr, "Skipping weird path", path continue target, path = match.groups() yield target, path, symbol
[ "def", "Parse", "(", "input", ",", "skip_paths", "=", "None", ",", "only_paths", "=", "None", ")", ":", "symbol_re", "=", "re", ".", "compile", "(", "r\"'\\.text\\.(\\S+)' in file '(\\S+)'$\"", ")", "path_re", "=", "re", ".", "compile", "(", "r\"^out/[^/]+/[^/]+/([^/]+)/(.*)$\"", ")", "for", "line", "in", "input", ":", "match", "=", "symbol_re", ".", "search", "(", "line", ")", "if", "not", "match", ":", "continue", "symbol", ",", "path", "=", "match", ".", "groups", "(", ")", "symbol", "=", "Unyuck", "(", "Demangle", "(", "symbol", ")", ")", "path", "=", "os", ".", "path", ".", "normpath", "(", "path", ")", "if", "skip_paths", "and", "skip_paths", "in", "path", ":", "continue", "if", "only_paths", "and", "only_paths", "not", "in", "path", ":", "continue", "match", "=", "path_re", ".", "match", "(", "path", ")", "if", "not", "match", ":", "print", ">>", "sys", ".", "stderr", ",", "\"Skipping weird path\"", ",", "path", "continue", "target", ",", "path", "=", "match", ".", "groups", "(", ")", "yield", "target", ",", "path", ",", "symbol" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/unused-symbols-report.py#L51-L78
alibaba/weex_js_engine
2bdf4b6f020c1fc99c63f649718f6faf7e27fdde
jni/v8core/v8/build/gyp/pylib/gyp/input.py
python
ValidateTargetType
(target, target_dict)
Ensures the 'type' field on the target is one of the known types. Arguments: target: string, name of target. target_dict: dict, target spec. Raises an exception on error.
Ensures the 'type' field on the target is one of the known types.
[ "Ensures", "the", "type", "field", "on", "the", "target", "is", "one", "of", "the", "known", "types", "." ]
def ValidateTargetType(target, target_dict): """Ensures the 'type' field on the target is one of the known types. Arguments: target: string, name of target. target_dict: dict, target spec. Raises an exception on error. """ VALID_TARGET_TYPES = ('executable', 'loadable_module', 'static_library', 'shared_library', 'none') target_type = target_dict.get('type', None) if target_type not in VALID_TARGET_TYPES: raise GypError("Target %s has an invalid target type '%s'. " "Must be one of %s." % (target, target_type, '/'.join(VALID_TARGET_TYPES)))
[ "def", "ValidateTargetType", "(", "target", ",", "target_dict", ")", ":", "VALID_TARGET_TYPES", "=", "(", "'executable'", ",", "'loadable_module'", ",", "'static_library'", ",", "'shared_library'", ",", "'none'", ")", "target_type", "=", "target_dict", ".", "get", "(", "'type'", ",", "None", ")", "if", "target_type", "not", "in", "VALID_TARGET_TYPES", ":", "raise", "GypError", "(", "\"Target %s has an invalid target type '%s'. \"", "\"Must be one of %s.\"", "%", "(", "target", ",", "target_type", ",", "'/'", ".", "join", "(", "VALID_TARGET_TYPES", ")", ")", ")" ]
https://github.com/alibaba/weex_js_engine/blob/2bdf4b6f020c1fc99c63f649718f6faf7e27fdde/jni/v8core/v8/build/gyp/pylib/gyp/input.py#L2127-L2143
arx/ArxLibertatis
0313c51625f3f55016cdad43d2c7f7296d27949c
scripts/cpplint.py
python
RemoveMultiLineCommentsFromRange
(lines, begin, end)
Clears a range of lines for multi-line comments.
Clears a range of lines for multi-line comments.
[ "Clears", "a", "range", "of", "lines", "for", "multi", "-", "line", "comments", "." ]
def RemoveMultiLineCommentsFromRange(lines, begin, end): """Clears a range of lines for multi-line comments.""" # Having // dummy comments makes the lines non-empty, so we will not get # unnecessary blank line warnings later in the code. for i in range(begin, end): lines[i] = re.search('^\t*', lines[i]).group(0) + '// dummy'
[ "def", "RemoveMultiLineCommentsFromRange", "(", "lines", ",", "begin", ",", "end", ")", ":", "# Having // dummy comments makes the lines non-empty, so we will not get", "# unnecessary blank line warnings later in the code.", "for", "i", "in", "range", "(", "begin", ",", "end", ")", ":", "lines", "[", "i", "]", "=", "re", ".", "search", "(", "'^\\t*'", ",", "lines", "[", "i", "]", ")", ".", "group", "(", "0", ")", "+", "'// dummy'" ]
https://github.com/arx/ArxLibertatis/blob/0313c51625f3f55016cdad43d2c7f7296d27949c/scripts/cpplint.py#L925-L930
nileshkulkarni/csm
0e6e0e7d4f725fd36f2414c0be4b9d83197aa1fc
csm/utils/transformations.py
python
angle_between_vectors
(v0, v1, directed=True, axis=0)
return numpy.arccos(dot if directed else numpy.fabs(dot))
Return angle between vectors. If directed is False, the input vectors are interpreted as undirected axes, i.e. the maximum angle is pi/2. >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3]) >>> numpy.allclose(a, math.pi) True >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False) >>> numpy.allclose(a, 0) True >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] >>> v1 = [[3], [0], [0]] >>> a = angle_between_vectors(v0, v1) >>> numpy.allclose(a, [0, 1.5708, 1.5708, 0.95532]) True >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] >>> a = angle_between_vectors(v0, v1, axis=1) >>> numpy.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532]) True
Return angle between vectors.
[ "Return", "angle", "between", "vectors", "." ]
def angle_between_vectors(v0, v1, directed=True, axis=0): """Return angle between vectors. If directed is False, the input vectors are interpreted as undirected axes, i.e. the maximum angle is pi/2. >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3]) >>> numpy.allclose(a, math.pi) True >>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False) >>> numpy.allclose(a, 0) True >>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]] >>> v1 = [[3], [0], [0]] >>> a = angle_between_vectors(v0, v1) >>> numpy.allclose(a, [0, 1.5708, 1.5708, 0.95532]) True >>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]] >>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]] >>> a = angle_between_vectors(v0, v1, axis=1) >>> numpy.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532]) True """ v0 = numpy.array(v0, dtype=numpy.float64, copy=False) v1 = numpy.array(v1, dtype=numpy.float64, copy=False) dot = numpy.sum(v0 * v1, axis=axis) dot /= vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis) return numpy.arccos(dot if directed else numpy.fabs(dot))
[ "def", "angle_between_vectors", "(", "v0", ",", "v1", ",", "directed", "=", "True", ",", "axis", "=", "0", ")", ":", "v0", "=", "numpy", ".", "array", "(", "v0", ",", "dtype", "=", "numpy", ".", "float64", ",", "copy", "=", "False", ")", "v1", "=", "numpy", ".", "array", "(", "v1", ",", "dtype", "=", "numpy", ".", "float64", ",", "copy", "=", "False", ")", "dot", "=", "numpy", ".", "sum", "(", "v0", "*", "v1", ",", "axis", "=", "axis", ")", "dot", "/=", "vector_norm", "(", "v0", ",", "axis", "=", "axis", ")", "*", "vector_norm", "(", "v1", ",", "axis", "=", "axis", ")", "return", "numpy", ".", "arccos", "(", "dot", "if", "directed", "else", "numpy", ".", "fabs", "(", "dot", ")", ")" ]
https://github.com/nileshkulkarni/csm/blob/0e6e0e7d4f725fd36f2414c0be4b9d83197aa1fc/csm/utils/transformations.py#L1807-L1835
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/_stream_hixie75.py
python
StreamHixie75.send_message
(self, message, end=True, binary=False)
Send message. Args: message: unicode string to send. binary: not used in hixie75. Raises: BadOperationException: when called on a server-terminated connection.
Send message.
[ "Send", "message", "." ]
def send_message(self, message, end=True, binary=False): """Send message. Args: message: unicode string to send. binary: not used in hixie75. Raises: BadOperationException: when called on a server-terminated connection. """ if not end: raise BadOperationException( 'StreamHixie75 doesn\'t support send_message with end=False') if binary: raise BadOperationException( 'StreamHixie75 doesn\'t support send_message with binary=True') if self._request.server_terminated: raise BadOperationException( 'Requested send_message after sending out a closing handshake') self._write(''.join(['\x00', message.encode('utf-8'), '\xff']))
[ "def", "send_message", "(", "self", ",", "message", ",", "end", "=", "True", ",", "binary", "=", "False", ")", ":", "if", "not", "end", ":", "raise", "BadOperationException", "(", "'StreamHixie75 doesn\\'t support send_message with end=False'", ")", "if", "binary", ":", "raise", "BadOperationException", "(", "'StreamHixie75 doesn\\'t support send_message with binary=True'", ")", "if", "self", ".", "_request", ".", "server_terminated", ":", "raise", "BadOperationException", "(", "'Requested send_message after sending out a closing handshake'", ")", "self", ".", "_write", "(", "''", ".", "join", "(", "[", "'\\x00'", ",", "message", ".", "encode", "(", "'utf-8'", ")", ",", "'\\xff'", "]", ")", ")" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/_stream_hixie75.py#L73-L97
google/shaka-packager
e1b0c7c45431327fd3ce193514a5407d07b39b22
packager/third_party/protobuf/python/google/protobuf/internal/python_message.py
python
_VerifyExtensionHandle
(message, extension_handle)
Verify that the given extension handle is valid.
Verify that the given extension handle is valid.
[ "Verify", "that", "the", "given", "extension", "handle", "is", "valid", "." ]
def _VerifyExtensionHandle(message, extension_handle): """Verify that the given extension handle is valid.""" if not isinstance(extension_handle, _FieldDescriptor): raise KeyError('HasExtension() expects an extension handle, got: %s' % extension_handle) if not extension_handle.is_extension: raise KeyError('"%s" is not an extension.' % extension_handle.full_name) if not extension_handle.containing_type: raise KeyError('"%s" is missing a containing_type.' % extension_handle.full_name) if extension_handle.containing_type is not message.DESCRIPTOR: raise KeyError('Extension "%s" extends message type "%s", but this ' 'message is of type "%s".' % (extension_handle.full_name, extension_handle.containing_type.full_name, message.DESCRIPTOR.full_name))
[ "def", "_VerifyExtensionHandle", "(", "message", ",", "extension_handle", ")", ":", "if", "not", "isinstance", "(", "extension_handle", ",", "_FieldDescriptor", ")", ":", "raise", "KeyError", "(", "'HasExtension() expects an extension handle, got: %s'", "%", "extension_handle", ")", "if", "not", "extension_handle", ".", "is_extension", ":", "raise", "KeyError", "(", "'\"%s\" is not an extension.'", "%", "extension_handle", ".", "full_name", ")", "if", "not", "extension_handle", ".", "containing_type", ":", "raise", "KeyError", "(", "'\"%s\" is missing a containing_type.'", "%", "extension_handle", ".", "full_name", ")", "if", "extension_handle", ".", "containing_type", "is", "not", "message", ".", "DESCRIPTOR", ":", "raise", "KeyError", "(", "'Extension \"%s\" extends message type \"%s\", but this '", "'message is of type \"%s\".'", "%", "(", "extension_handle", ".", "full_name", ",", "extension_handle", ".", "containing_type", ".", "full_name", ",", "message", ".", "DESCRIPTOR", ".", "full_name", ")", ")" ]
https://github.com/google/shaka-packager/blob/e1b0c7c45431327fd3ce193514a5407d07b39b22/packager/third_party/protobuf/python/google/protobuf/internal/python_message.py#L213-L232
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Configuration/DataProcessing/python/Utils.py
python
addMonitoring
(process)
return process
_addMonitoring_ Add the monitoring services to the process provided in order to write out performance summaries to the framework job report
_addMonitoring_ Add the monitoring services to the process provided in order to write out performance summaries to the framework job report
[ "_addMonitoring_", "Add", "the", "monitoring", "services", "to", "the", "process", "provided", "in", "order", "to", "write", "out", "performance", "summaries", "to", "the", "framework", "job", "report" ]
def addMonitoring(process): """ _addMonitoring_ Add the monitoring services to the process provided in order to write out performance summaries to the framework job report """ import FWCore.ParameterSet.Config as cms process.SimpleMemoryCheck = cms.Service("SimpleMemoryCheck", jobReportOutputOnly = cms.untracked.bool(True) ) process.Timing = cms.Service("Timing", summaryOnly = cms.untracked.bool(True) ) return process
[ "def", "addMonitoring", "(", "process", ")", ":", "import", "FWCore", ".", "ParameterSet", ".", "Config", "as", "cms", "process", ".", "SimpleMemoryCheck", "=", "cms", ".", "Service", "(", "\"SimpleMemoryCheck\"", ",", "jobReportOutputOnly", "=", "cms", ".", "untracked", ".", "bool", "(", "True", ")", ")", "process", ".", "Timing", "=", "cms", ".", "Service", "(", "\"Timing\"", ",", "summaryOnly", "=", "cms", ".", "untracked", ".", "bool", "(", "True", ")", ")", "return", "process" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Configuration/DataProcessing/python/Utils.py#L38-L54
InsightSoftwareConsortium/ITK
87acfce9a93d928311c38bc371b666b515b9f19d
Modules/ThirdParty/pygccxml/src/pygccxml/declarations/type_traits.py
python
is_void_pointer
(type_)
return is_same(type_, cpptypes.pointer_t(cpptypes.void_t()))
returns True, if type represents `void*`, False otherwise
returns True, if type represents `void*`, False otherwise
[ "returns", "True", "if", "type", "represents", "void", "*", "False", "otherwise" ]
def is_void_pointer(type_): """returns True, if type represents `void*`, False otherwise""" return is_same(type_, cpptypes.pointer_t(cpptypes.void_t()))
[ "def", "is_void_pointer", "(", "type_", ")", ":", "return", "is_same", "(", "type_", ",", "cpptypes", ".", "pointer_t", "(", "cpptypes", ".", "void_t", "(", ")", ")", ")" ]
https://github.com/InsightSoftwareConsortium/ITK/blob/87acfce9a93d928311c38bc371b666b515b9f19d/Modules/ThirdParty/pygccxml/src/pygccxml/declarations/type_traits.py#L198-L200
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/linalg/linear_operator_circulant.py
python
_BaseLinearOperatorCirculant.block_shape_tensor
(self)
return self._block_shape_tensor()
Shape of the block dimensions of `self.spectrum`.
Shape of the block dimensions of `self.spectrum`.
[ "Shape", "of", "the", "block", "dimensions", "of", "self", ".", "spectrum", "." ]
def block_shape_tensor(self): """Shape of the block dimensions of `self.spectrum`.""" # If spectrum.shape = [s0, s1, s2], and block_depth = 2, # block_shape = [s1, s2] return self._block_shape_tensor()
[ "def", "block_shape_tensor", "(", "self", ")", ":", "# If spectrum.shape = [s0, s1, s2], and block_depth = 2,", "# block_shape = [s1, s2]", "return", "self", ".", "_block_shape_tensor", "(", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/linalg/linear_operator_circulant.py#L177-L181
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/distribute/one_device_strategy.py
python
OneDeviceExtended._in_multi_worker_mode
(self)
return False
Whether this strategy indicates working in multi-worker settings.
Whether this strategy indicates working in multi-worker settings.
[ "Whether", "this", "strategy", "indicates", "working", "in", "multi", "-", "worker", "settings", "." ]
def _in_multi_worker_mode(self): """Whether this strategy indicates working in multi-worker settings.""" return False
[ "def", "_in_multi_worker_mode", "(", "self", ")", ":", "return", "False" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/one_device_strategy.py#L428-L430
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/VBox/GuestHost/OpenGL/glapi_parser/apiutil.py
python
CanCompile
(funcName)
Return 1 if the function can be compiled into display lists, else 0.
Return 1 if the function can be compiled into display lists, else 0.
[ "Return", "1", "if", "the", "function", "can", "be", "compiled", "into", "display", "lists", "else", "0", "." ]
def CanCompile(funcName): """Return 1 if the function can be compiled into display lists, else 0.""" props = Properties(funcName) if ("nolist" in props or "get" in props or "setclient" in props): return 0 else: return 1
[ "def", "CanCompile", "(", "funcName", ")", ":", "props", "=", "Properties", "(", "funcName", ")", "if", "(", "\"nolist\"", "in", "props", "or", "\"get\"", "in", "props", "or", "\"setclient\"", "in", "props", ")", ":", "return", "0", "else", ":", "return", "1" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/VBox/GuestHost/OpenGL/glapi_parser/apiutil.py#L399-L407
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/tracing/tracing/metrics/discover.py
python
DiscoverMetrics
(modules_to_load)
Returns a list of registered metrics. Args: modules_to_load: a list of modules (string) to be loaded before discovering the registered metrics.
Returns a list of registered metrics.
[ "Returns", "a", "list", "of", "registered", "metrics", "." ]
def DiscoverMetrics(modules_to_load): """ Returns a list of registered metrics. Args: modules_to_load: a list of modules (string) to be loaded before discovering the registered metrics. """ assert isinstance(modules_to_load, list) project = tracing_project.TracingProject() all_source_paths = list(project.source_paths) res = vinn.RunFile( _DISCOVER_CMD_LINE, source_paths=all_source_paths, js_args=modules_to_load) if res.returncode != 0: raise RuntimeError('Error running metrics_discover_cmdline: ' + res.stdout) else: return [str(m) for m in json.loads(res.stdout)]
[ "def", "DiscoverMetrics", "(", "modules_to_load", ")", ":", "assert", "isinstance", "(", "modules_to_load", ",", "list", ")", "project", "=", "tracing_project", ".", "TracingProject", "(", ")", "all_source_paths", "=", "list", "(", "project", ".", "source_paths", ")", "res", "=", "vinn", ".", "RunFile", "(", "_DISCOVER_CMD_LINE", ",", "source_paths", "=", "all_source_paths", ",", "js_args", "=", "modules_to_load", ")", "if", "res", ".", "returncode", "!=", "0", ":", "raise", "RuntimeError", "(", "'Error running metrics_discover_cmdline: '", "+", "res", ".", "stdout", ")", "else", ":", "return", "[", "str", "(", "m", ")", "for", "m", "in", "json", ".", "loads", "(", "res", ".", "stdout", ")", "]" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/tracing/tracing/metrics/discover.py#L16-L34
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/contrib/_securetransport/low_level.py
python
_cert_array_from_pem
(pem_bundle)
return cert_array
Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain.
Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain.
[ "Given", "a", "bundle", "of", "certs", "in", "PEM", "format", "turns", "them", "into", "a", "CFArray", "of", "certs", "that", "can", "be", "used", "to", "validate", "a", "cert", "chain", "." ]
def _cert_array_from_pem(pem_bundle): """ Given a bundle of certs in PEM format, turns them into a CFArray of certs that can be used to validate a cert chain. """ der_certs = [ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) ] if not der_certs: raise ssl.SSLError("No root certificates specified") cert_array = CoreFoundation.CFArrayCreateMutable( CoreFoundation.kCFAllocatorDefault, 0, ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks) ) if not cert_array: raise ssl.SSLError("Unable to allocate memory!") try: for der_bytes in der_certs: certdata = _cf_data_from_bytes(der_bytes) if not certdata: raise ssl.SSLError("Unable to allocate memory!") cert = Security.SecCertificateCreateWithData( CoreFoundation.kCFAllocatorDefault, certdata ) CoreFoundation.CFRelease(certdata) if not cert: raise ssl.SSLError("Unable to build cert object!") CoreFoundation.CFArrayAppendValue(cert_array, cert) CoreFoundation.CFRelease(cert) except Exception: # We need to free the array before the exception bubbles further. # We only want to do that if an error occurs: otherwise, the caller # should free. CoreFoundation.CFRelease(cert_array) return cert_array
[ "def", "_cert_array_from_pem", "(", "pem_bundle", ")", ":", "der_certs", "=", "[", "base64", ".", "b64decode", "(", "match", ".", "group", "(", "1", ")", ")", "for", "match", "in", "_PEM_CERTS_RE", ".", "finditer", "(", "pem_bundle", ")", "]", "if", "not", "der_certs", ":", "raise", "ssl", ".", "SSLError", "(", "\"No root certificates specified\"", ")", "cert_array", "=", "CoreFoundation", ".", "CFArrayCreateMutable", "(", "CoreFoundation", ".", "kCFAllocatorDefault", ",", "0", ",", "ctypes", ".", "byref", "(", "CoreFoundation", ".", "kCFTypeArrayCallBacks", ")", ")", "if", "not", "cert_array", ":", "raise", "ssl", ".", "SSLError", "(", "\"Unable to allocate memory!\"", ")", "try", ":", "for", "der_bytes", "in", "der_certs", ":", "certdata", "=", "_cf_data_from_bytes", "(", "der_bytes", ")", "if", "not", "certdata", ":", "raise", "ssl", ".", "SSLError", "(", "\"Unable to allocate memory!\"", ")", "cert", "=", "Security", ".", "SecCertificateCreateWithData", "(", "CoreFoundation", ".", "kCFAllocatorDefault", ",", "certdata", ")", "CoreFoundation", ".", "CFRelease", "(", "certdata", ")", "if", "not", "cert", ":", "raise", "ssl", ".", "SSLError", "(", "\"Unable to build cert object!\"", ")", "CoreFoundation", ".", "CFArrayAppendValue", "(", "cert_array", ",", "cert", ")", "CoreFoundation", ".", "CFRelease", "(", "cert", ")", "except", "Exception", ":", "# We need to free the array before the exception bubbles further.", "# We only want to do that if an error occurs: otherwise, the caller", "# should free.", "CoreFoundation", ".", "CFRelease", "(", "cert_array", ")", "return", "cert_array" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/contrib/_securetransport/low_level.py#L109-L149
glinscott/leela-chess
481f1de6c0d2ad7f4e27df551ac5fc754e684f69
training/tf/chunkparser.py
python
ChunkParser.batch_gen
(self, gen)
Pack multiple records into a single batch
Pack multiple records into a single batch
[ "Pack", "multiple", "records", "into", "a", "single", "batch" ]
def batch_gen(self, gen): """ Pack multiple records into a single batch """ # Get N records. We flatten the returned generator to # a list because we need to reuse it. while True: s = list(itertools.islice(gen, self.batch_size)) if not len(s): return yield ( b''.join([x[0] for x in s]), b''.join([x[1] for x in s]), b''.join([x[2] for x in s]) )
[ "def", "batch_gen", "(", "self", ",", "gen", ")", ":", "# Get N records. We flatten the returned generator to", "# a list because we need to reuse it.", "while", "True", ":", "s", "=", "list", "(", "itertools", ".", "islice", "(", "gen", ",", "self", ".", "batch_size", ")", ")", "if", "not", "len", "(", "s", ")", ":", "return", "yield", "(", "b''", ".", "join", "(", "[", "x", "[", "0", "]", "for", "x", "in", "s", "]", ")", ",", "b''", ".", "join", "(", "[", "x", "[", "1", "]", "for", "x", "in", "s", "]", ")", ",", "b''", ".", "join", "(", "[", "x", "[", "2", "]", "for", "x", "in", "s", "]", ")", ")" ]
https://github.com/glinscott/leela-chess/blob/481f1de6c0d2ad7f4e27df551ac5fc754e684f69/training/tf/chunkparser.py#L251-L263
qgis/QGIS
15a77662d4bb712184f6aa60d0bd663010a76a75
python/plugins/db_manager/db_plugins/postgis/connector.py
python
PostGisDBConnector.setTableColumnType
(self, table, column, data_type)
return self.updateTableColumn(table, column, None, data_type)
Changes column type
Changes column type
[ "Changes", "column", "type" ]
def setTableColumnType(self, table, column, data_type): """Changes column type """ return self.updateTableColumn(table, column, None, data_type)
[ "def", "setTableColumnType", "(", "self", ",", "table", ",", "column", ",", "data_type", ")", ":", "return", "self", ".", "updateTableColumn", "(", "table", ",", "column", ",", "None", ",", "data_type", ")" ]
https://github.com/qgis/QGIS/blob/15a77662d4bb712184f6aa60d0bd663010a76a75/python/plugins/db_manager/db_plugins/postgis/connector.py#L1132-L1134
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/wsgiref/handlers.py
python
BaseHandler.run
(self, application)
Invoke the application
Invoke the application
[ "Invoke", "the", "application" ]
def run(self, application): """Invoke the application""" # Note to self: don't move the close()! Asynchronous servers shouldn't # call close() from finish_response(), so if you close() anywhere but # the double-error branch here, you'll break asynchronous servers by # prematurely closing. Async servers must return from 'run()' without # closing if there might still be output to iterate over. try: self.setup_environ() self.result = application(self.environ, self.start_response) self.finish_response() except: try: self.handle_error() except: # If we get an error handling an error, just give up already! self.close() raise
[ "def", "run", "(", "self", ",", "application", ")", ":", "# Note to self: don't move the close()! Asynchronous servers shouldn't", "# call close() from finish_response(), so if you close() anywhere but", "# the double-error branch here, you'll break asynchronous servers by", "# prematurely closing. Async servers must return from 'run()' without", "# closing if there might still be output to iterate over.", "try", ":", "self", ".", "setup_environ", "(", ")", "self", ".", "result", "=", "application", "(", "self", ".", "environ", ",", "self", ".", "start_response", ")", "self", ".", "finish_response", "(", ")", "except", ":", "try", ":", "self", ".", "handle_error", "(", ")", "except", ":", "# If we get an error handling an error, just give up already!", "self", ".", "close", "(", ")", "raise" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/wsgiref/handlers.py#L76-L93
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/valgrind/gdb_helper.py
python
AddressTable.Add
(self, binary, address)
Register a lookup request.
Register a lookup request.
[ "Register", "a", "lookup", "request", "." ]
def Add(self, binary, address): ''' Register a lookup request. ''' if binary == '': logging.warn('adding address %s in empty binary?' % address) if binary in self._binaries: self._binaries[binary].append(address) else: self._binaries[binary] = [address] self._all_resolved = False
[ "def", "Add", "(", "self", ",", "binary", ",", "address", ")", ":", "if", "binary", "==", "''", ":", "logging", ".", "warn", "(", "'adding address %s in empty binary?'", "%", "address", ")", "if", "binary", "in", "self", ".", "_binaries", ":", "self", ".", "_binaries", "[", "binary", "]", ".", "append", "(", "address", ")", "else", ":", "self", ".", "_binaries", "[", "binary", "]", "=", "[", "address", "]", "self", ".", "_all_resolved", "=", "False" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/valgrind/gdb_helper.py#L58-L66
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/frame.py
python
DataFrame._get_agg_axis
(self, axis_num)
Let's be explicit about this.
Let's be explicit about this.
[ "Let", "s", "be", "explicit", "about", "this", "." ]
def _get_agg_axis(self, axis_num): """ Let's be explicit about this. """ if axis_num == 0: return self.columns elif axis_num == 1: return self.index else: raise ValueError(f"Axis must be 0 or 1 (got {repr(axis_num)})")
[ "def", "_get_agg_axis", "(", "self", ",", "axis_num", ")", ":", "if", "axis_num", "==", "0", ":", "return", "self", ".", "columns", "elif", "axis_num", "==", "1", ":", "return", "self", ".", "index", "else", ":", "raise", "ValueError", "(", "f\"Axis must be 0 or 1 (got {repr(axis_num)})\"", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/frame.py#L8084-L8093
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/urllib.py
python
localhost
()
return _localhost
Return the IP address of the magic hostname 'localhost'.
Return the IP address of the magic hostname 'localhost'.
[ "Return", "the", "IP", "address", "of", "the", "magic", "hostname", "localhost", "." ]
def localhost(): """Return the IP address of the magic hostname 'localhost'.""" global _localhost if _localhost is None: _localhost = socket.gethostbyname('localhost') return _localhost
[ "def", "localhost", "(", ")", ":", "global", "_localhost", "if", "_localhost", "is", "None", ":", "_localhost", "=", "socket", ".", "gethostbyname", "(", "'localhost'", ")", "return", "_localhost" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/urllib.py#L802-L807
devpack/android-python27
d42dd67565e104cf7b0b50eb473f615db3e69901
python-build-with-qt/PyQt-x11-gpl-4.8/pyqtconfig.py
python
phononModuleMakefile.__init__
(self, *args, **kw)
Initialise an instance of a module Makefile.
Initialise an instance of a module Makefile.
[ "Initialise", "an", "instance", "of", "a", "module", "Makefile", "." ]
def __init__(self, *args, **kw): """Initialise an instance of a module Makefile. """ if "qt" not in kw: kw["qt"] = ["QtCore", "QtGui", "phonon"] QtGuiModuleMakefile.__init__(self, *args, **kw)
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "if", "\"qt\"", "not", "in", "kw", ":", "kw", "[", "\"qt\"", "]", "=", "[", "\"QtCore\"", ",", "\"QtGui\"", ",", "\"phonon\"", "]", "QtGuiModuleMakefile", ".", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")" ]
https://github.com/devpack/android-python27/blob/d42dd67565e104cf7b0b50eb473f615db3e69901/python-build-with-qt/PyQt-x11-gpl-4.8/pyqtconfig.py#L367-L373
LiquidPlayer/LiquidCore
9405979363f2353ac9a71ad8ab59685dd7f919c9
deps/node-10.15.3/deps/v8/src/PRESUBMIT.py
python
PostUploadHook
(cl, change, output_api)
return output_api.EnsureCQIncludeTrybotsAreAdded( cl, [ 'luci.chromium.try:linux_chromium_rel_ng' ], 'Automatically added layout test trybots to run tests on CQ.')
git cl upload will call this hook after the issue is created/modified. This hook adds extra try bots to the CL description in order to run layout tests in addition to CQ try bots.
git cl upload will call this hook after the issue is created/modified.
[ "git", "cl", "upload", "will", "call", "this", "hook", "after", "the", "issue", "is", "created", "/", "modified", "." ]
def PostUploadHook(cl, change, output_api): """git cl upload will call this hook after the issue is created/modified. This hook adds extra try bots to the CL description in order to run layout tests in addition to CQ try bots. """ def is_api_cc(f): return 'api.cc' == os.path.split(f.LocalPath())[1] if not change.AffectedFiles(file_filter=is_api_cc): return [] return output_api.EnsureCQIncludeTrybotsAreAdded( cl, [ 'luci.chromium.try:linux_chromium_rel_ng' ], 'Automatically added layout test trybots to run tests on CQ.')
[ "def", "PostUploadHook", "(", "cl", ",", "change", ",", "output_api", ")", ":", "def", "is_api_cc", "(", "f", ")", ":", "return", "'api.cc'", "==", "os", ".", "path", ".", "split", "(", "f", ".", "LocalPath", "(", ")", ")", "[", "1", "]", "if", "not", "change", ".", "AffectedFiles", "(", "file_filter", "=", "is_api_cc", ")", ":", "return", "[", "]", "return", "output_api", ".", "EnsureCQIncludeTrybotsAreAdded", "(", "cl", ",", "[", "'luci.chromium.try:linux_chromium_rel_ng'", "]", ",", "'Automatically added layout test trybots to run tests on CQ.'", ")" ]
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/deps/v8/src/PRESUBMIT.py#L14-L29
panda3d/panda3d
833ad89ebad58395d0af0b7ec08538e5e4308265
direct/src/directtools/DirectGeometry.py
python
LineNodePath.drawArrow
(self, sv, ev, arrowAngle, arrowLength)
Do the work of moving the cursor around to draw an arrow from sv to ev. Hack: the arrows take the z value of the end point
Do the work of moving the cursor around to draw an arrow from sv to ev. Hack: the arrows take the z value of the end point
[ "Do", "the", "work", "of", "moving", "the", "cursor", "around", "to", "draw", "an", "arrow", "from", "sv", "to", "ev", ".", "Hack", ":", "the", "arrows", "take", "the", "z", "value", "of", "the", "end", "point" ]
def drawArrow(self, sv, ev, arrowAngle, arrowLength): """ Do the work of moving the cursor around to draw an arrow from sv to ev. Hack: the arrows take the z value of the end point """ self.moveTo(sv) self.drawTo(ev) v = sv - ev # Find the angle of the line angle = math.atan2(v[1], v[0]) # Get the arrow angles a1 = angle + deg2Rad(arrowAngle) a2 = angle - deg2Rad(arrowAngle) # Get the arrow points a1x = arrowLength * math.cos(a1) a1y = arrowLength * math.sin(a1) a2x = arrowLength * math.cos(a2) a2y = arrowLength * math.sin(a2) z = ev[2] self.moveTo(ev) self.drawTo(Point3(ev + Point3(a1x, a1y, z))) self.moveTo(ev) self.drawTo(Point3(ev + Point3(a2x, a2y, z)))
[ "def", "drawArrow", "(", "self", ",", "sv", ",", "ev", ",", "arrowAngle", ",", "arrowLength", ")", ":", "self", ".", "moveTo", "(", "sv", ")", "self", ".", "drawTo", "(", "ev", ")", "v", "=", "sv", "-", "ev", "# Find the angle of the line", "angle", "=", "math", ".", "atan2", "(", "v", "[", "1", "]", ",", "v", "[", "0", "]", ")", "# Get the arrow angles", "a1", "=", "angle", "+", "deg2Rad", "(", "arrowAngle", ")", "a2", "=", "angle", "-", "deg2Rad", "(", "arrowAngle", ")", "# Get the arrow points", "a1x", "=", "arrowLength", "*", "math", ".", "cos", "(", "a1", ")", "a1y", "=", "arrowLength", "*", "math", ".", "sin", "(", "a1", ")", "a2x", "=", "arrowLength", "*", "math", ".", "cos", "(", "a2", ")", "a2y", "=", "arrowLength", "*", "math", ".", "sin", "(", "a2", ")", "z", "=", "ev", "[", "2", "]", "self", ".", "moveTo", "(", "ev", ")", "self", ".", "drawTo", "(", "Point3", "(", "ev", "+", "Point3", "(", "a1x", ",", "a1y", ",", "z", ")", ")", ")", "self", ".", "moveTo", "(", "ev", ")", "self", ".", "drawTo", "(", "Point3", "(", "ev", "+", "Point3", "(", "a2x", ",", "a2y", ",", "z", ")", ")", ")" ]
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/directtools/DirectGeometry.py#L70-L92
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/framework/random_seed.py
python
get_seed
(op_seed)
return seeds
Returns the local seeds an operation should use given an op-specific seed. Given operation-specific seed, `op_seed`, this helper function returns two seeds derived from graph-level and op-level seeds. Many random operations internally use the two seeds to allow user to change the seed globally for a graph, or for only specific operations. For details on how the graph-level seed interacts with op seeds, see @{tf.set_random_seed}. Args: op_seed: integer. Returns: A tuple of two integers that should be used for the local seed of this operation.
Returns the local seeds an operation should use given an op-specific seed.
[ "Returns", "the", "local", "seeds", "an", "operation", "should", "use", "given", "an", "op", "-", "specific", "seed", "." ]
def get_seed(op_seed): """Returns the local seeds an operation should use given an op-specific seed. Given operation-specific seed, `op_seed`, this helper function returns two seeds derived from graph-level and op-level seeds. Many random operations internally use the two seeds to allow user to change the seed globally for a graph, or for only specific operations. For details on how the graph-level seed interacts with op seeds, see @{tf.set_random_seed}. Args: op_seed: integer. Returns: A tuple of two integers that should be used for the local seed of this operation. """ is_graph_mode = context.in_graph_mode() if is_graph_mode: global_seed = ops.get_default_graph().seed else: global_seed = context.global_seed() if global_seed is not None: if op_seed is None: # pylint: disable=protected-access if is_graph_mode: op_seed = ops.get_default_graph()._last_id else: op_seed = context.internal_operation_seed() seeds = _truncate_seed(global_seed), _truncate_seed(op_seed) else: if op_seed is not None: seeds = DEFAULT_GRAPH_SEED, _truncate_seed(op_seed) else: seeds = None, None # Avoid (0, 0) as the C++ ops interpret it as nondeterminism, which would # be unexpected since Python docs say nondeterminism is (None, None). if seeds == (0, 0): return (0, _MAXINT32) return seeds
[ "def", "get_seed", "(", "op_seed", ")", ":", "is_graph_mode", "=", "context", ".", "in_graph_mode", "(", ")", "if", "is_graph_mode", ":", "global_seed", "=", "ops", ".", "get_default_graph", "(", ")", ".", "seed", "else", ":", "global_seed", "=", "context", ".", "global_seed", "(", ")", "if", "global_seed", "is", "not", "None", ":", "if", "op_seed", "is", "None", ":", "# pylint: disable=protected-access", "if", "is_graph_mode", ":", "op_seed", "=", "ops", ".", "get_default_graph", "(", ")", ".", "_last_id", "else", ":", "op_seed", "=", "context", ".", "internal_operation_seed", "(", ")", "seeds", "=", "_truncate_seed", "(", "global_seed", ")", ",", "_truncate_seed", "(", "op_seed", ")", "else", ":", "if", "op_seed", "is", "not", "None", ":", "seeds", "=", "DEFAULT_GRAPH_SEED", ",", "_truncate_seed", "(", "op_seed", ")", "else", ":", "seeds", "=", "None", ",", "None", "# Avoid (0, 0) as the C++ ops interpret it as nondeterminism, which would", "# be unexpected since Python docs say nondeterminism is (None, None).", "if", "seeds", "==", "(", "0", ",", "0", ")", ":", "return", "(", "0", ",", "_MAXINT32", ")", "return", "seeds" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/framework/random_seed.py#L35-L78
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/command/bdist_egg.py
python
bdist_egg.call_command
(self, cmdname, **kw)
return cmd
Invoke reinitialized command `cmdname` with keyword args
Invoke reinitialized command `cmdname` with keyword args
[ "Invoke", "reinitialized", "command", "cmdname", "with", "keyword", "args" ]
def call_command(self, cmdname, **kw): """Invoke reinitialized command `cmdname` with keyword args""" for dirname in INSTALL_DIRECTORY_ATTRS: kw.setdefault(dirname, self.bdist_dir) kw.setdefault('skip_build', self.skip_build) kw.setdefault('dry_run', self.dry_run) cmd = self.reinitialize_command(cmdname, **kw) self.run_command(cmdname) return cmd
[ "def", "call_command", "(", "self", ",", "cmdname", ",", "*", "*", "kw", ")", ":", "for", "dirname", "in", "INSTALL_DIRECTORY_ATTRS", ":", "kw", ".", "setdefault", "(", "dirname", ",", "self", ".", "bdist_dir", ")", "kw", ".", "setdefault", "(", "'skip_build'", ",", "self", ".", "skip_build", ")", "kw", ".", "setdefault", "(", "'dry_run'", ",", "self", ".", "dry_run", ")", "cmd", "=", "self", ".", "reinitialize_command", "(", "cmdname", ",", "*", "*", "kw", ")", "self", ".", "run_command", "(", "cmdname", ")", "return", "cmd" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/command/bdist_egg.py#L152-L160
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/pip/req/req_set.py
python
RequirementSet.__init__
(self, build_dir, src_dir, download_dir, upgrade=False, ignore_installed=False, as_egg=False, target_dir=None, ignore_dependencies=False, force_reinstall=False, use_user_site=False, session=None, pycompile=True, isolated=False, wheel_download_dir=None, wheel_cache=None)
Create a RequirementSet. :param wheel_download_dir: Where still-packed .whl files should be written to. If None they are written to the download_dir parameter. Separate to download_dir to permit only keeping wheel archives for pip wheel. :param download_dir: Where still packed archives should be written to. If None they are not saved, and are deleted immediately after unpacking. :param wheel_cache: The pip wheel cache, for passing to InstallRequirement.
Create a RequirementSet.
[ "Create", "a", "RequirementSet", "." ]
def __init__(self, build_dir, src_dir, download_dir, upgrade=False, ignore_installed=False, as_egg=False, target_dir=None, ignore_dependencies=False, force_reinstall=False, use_user_site=False, session=None, pycompile=True, isolated=False, wheel_download_dir=None, wheel_cache=None): """Create a RequirementSet. :param wheel_download_dir: Where still-packed .whl files should be written to. If None they are written to the download_dir parameter. Separate to download_dir to permit only keeping wheel archives for pip wheel. :param download_dir: Where still packed archives should be written to. If None they are not saved, and are deleted immediately after unpacking. :param wheel_cache: The pip wheel cache, for passing to InstallRequirement. """ if session is None: raise TypeError( "RequirementSet() missing 1 required keyword argument: " "'session'" ) self.build_dir = build_dir self.src_dir = src_dir # XXX: download_dir and wheel_download_dir overlap semantically and may # be combined if we're willing to have non-wheel archives present in # the wheelhouse output by 'pip wheel'. self.download_dir = download_dir self.upgrade = upgrade self.ignore_installed = ignore_installed self.force_reinstall = force_reinstall self.requirements = Requirements() # Mapping of alias: real_name self.requirement_aliases = {} self.unnamed_requirements = [] self.ignore_dependencies = ignore_dependencies self.successfully_downloaded = [] self.successfully_installed = [] self.reqs_to_cleanup = [] self.as_egg = as_egg self.use_user_site = use_user_site self.target_dir = target_dir # set from --target option self.session = session self.pycompile = pycompile self.isolated = isolated if wheel_download_dir: wheel_download_dir = normalize_path(wheel_download_dir) self.wheel_download_dir = wheel_download_dir self._wheel_cache = wheel_cache # Maps from install_req -> dependencies_of_install_req self._dependencies = defaultdict(list)
[ "def", "__init__", "(", "self", ",", "build_dir", ",", "src_dir", ",", "download_dir", ",", "upgrade", "=", "False", ",", "ignore_installed", "=", "False", ",", "as_egg", "=", "False", ",", "target_dir", "=", "None", ",", "ignore_dependencies", "=", "False", ",", "force_reinstall", "=", "False", ",", "use_user_site", "=", "False", ",", "session", "=", "None", ",", "pycompile", "=", "True", ",", "isolated", "=", "False", ",", "wheel_download_dir", "=", "None", ",", "wheel_cache", "=", "None", ")", ":", "if", "session", "is", "None", ":", "raise", "TypeError", "(", "\"RequirementSet() missing 1 required keyword argument: \"", "\"'session'\"", ")", "self", ".", "build_dir", "=", "build_dir", "self", ".", "src_dir", "=", "src_dir", "# XXX: download_dir and wheel_download_dir overlap semantically and may", "# be combined if we're willing to have non-wheel archives present in", "# the wheelhouse output by 'pip wheel'.", "self", ".", "download_dir", "=", "download_dir", "self", ".", "upgrade", "=", "upgrade", "self", ".", "ignore_installed", "=", "ignore_installed", "self", ".", "force_reinstall", "=", "force_reinstall", "self", ".", "requirements", "=", "Requirements", "(", ")", "# Mapping of alias: real_name", "self", ".", "requirement_aliases", "=", "{", "}", "self", ".", "unnamed_requirements", "=", "[", "]", "self", ".", "ignore_dependencies", "=", "ignore_dependencies", "self", ".", "successfully_downloaded", "=", "[", "]", "self", ".", "successfully_installed", "=", "[", "]", "self", ".", "reqs_to_cleanup", "=", "[", "]", "self", ".", "as_egg", "=", "as_egg", "self", ".", "use_user_site", "=", "use_user_site", "self", ".", "target_dir", "=", "target_dir", "# set from --target option", "self", ".", "session", "=", "session", "self", ".", "pycompile", "=", "pycompile", "self", ".", "isolated", "=", "isolated", "if", "wheel_download_dir", ":", "wheel_download_dir", "=", "normalize_path", "(", "wheel_download_dir", ")", "self", ".", "wheel_download_dir", "=", "wheel_download_dir", "self", ".", "_wheel_cache", "=", "wheel_cache", "# Maps from install_req -> dependencies_of_install_req", "self", ".", "_dependencies", "=", "defaultdict", "(", "list", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/req/req_set.py#L138-L190
continental/ecal
204dab80a24fe01abca62541133b311bf0c09608
lang/python/core/ecal/core/publisher.py
python
MessagePublisher.set_qos
(self, qos)
return self.c_publisher.set_qos(qos)
set publisher quality of service :param qos: 0 = default, 1 = best effort, 2 = reliable :type qos: int
set publisher quality of service
[ "set", "publisher", "quality", "of", "service" ]
def set_qos(self, qos): """ set publisher quality of service :param qos: 0 = default, 1 = best effort, 2 = reliable :type qos: int """ return self.c_publisher.set_qos(qos)
[ "def", "set_qos", "(", "self", ",", "qos", ")", ":", "return", "self", ".", "c_publisher", ".", "set_qos", "(", "qos", ")" ]
https://github.com/continental/ecal/blob/204dab80a24fe01abca62541133b311bf0c09608/lang/python/core/ecal/core/publisher.py#L52-L59
fasiondog/hikyuu
842751aa25283f9fdafc6f560ea262f79e67a307
hikyuu/shell/hkucmd.py
python
HKUShell.do_record
(self, arg)
Save future commands to filename: RECORD rose.cmd
Save future commands to filename: RECORD rose.cmd
[ "Save", "future", "commands", "to", "filename", ":", "RECORD", "rose", ".", "cmd" ]
def do_record(self, arg): 'Save future commands to filename: RECORD rose.cmd' self.file = open(arg, 'w')
[ "def", "do_record", "(", "self", ",", "arg", ")", ":", "self", ".", "file", "=", "open", "(", "arg", ",", "'w'", ")" ]
https://github.com/fasiondog/hikyuu/blob/842751aa25283f9fdafc6f560ea262f79e67a307/hikyuu/shell/hkucmd.py#L91-L93
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/multiprocessing/process.py
python
Process.start
(self)
Start child process
Start child process
[ "Start", "child", "process" ]
def start(self): ''' Start child process ''' assert self._popen is None, 'cannot start a process twice' assert self._parent_pid == os.getpid(), \ 'can only start a process object created by current process' assert not _current_process._daemonic, \ 'daemonic processes are not allowed to have children' _cleanup() if self._Popen is not None: Popen = self._Popen else: from .forking import Popen self._popen = Popen(self) _current_process._children.add(self)
[ "def", "start", "(", "self", ")", ":", "assert", "self", ".", "_popen", "is", "None", ",", "'cannot start a process twice'", "assert", "self", ".", "_parent_pid", "==", "os", ".", "getpid", "(", ")", ",", "'can only start a process object created by current process'", "assert", "not", "_current_process", ".", "_daemonic", ",", "'daemonic processes are not allowed to have children'", "_cleanup", "(", ")", "if", "self", ".", "_Popen", "is", "not", "None", ":", "Popen", "=", "self", ".", "_Popen", "else", ":", "from", ".", "forking", "import", "Popen", "self", ".", "_popen", "=", "Popen", "(", "self", ")", "_current_process", ".", "_children", ".", "add", "(", "self", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/multiprocessing/process.py#L116-L131
intel/llvm
e6d0547e9d99b5a56430c4749f6c7e328bf221ab
lldb/third_party/Python/module/pexpect-4.6/pexpect/screen.py
python
screen.clear_tab
(self)
Clears tab at the current position.
Clears tab at the current position.
[ "Clears", "tab", "at", "the", "current", "position", "." ]
def clear_tab (self): # <ESC>[g '''Clears tab at the current position.''' pass
[ "def", "clear_tab", "(", "self", ")", ":", "# <ESC>[g", "pass" ]
https://github.com/intel/llvm/blob/e6d0547e9d99b5a56430c4749f6c7e328bf221ab/lldb/third_party/Python/module/pexpect-4.6/pexpect/screen.py#L417-L420
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/cython/Cython/Distutils/old_build_ext.py
python
old_build_ext.cython_sources
(self, sources, extension)
return new_sources
Walk the list of source files in 'sources', looking for Cython source files (.pyx and .py). Run Cython on all that are found, and return a modified 'sources' list with Cython source files replaced by the generated C (or C++) files.
Walk the list of source files in 'sources', looking for Cython source files (.pyx and .py). Run Cython on all that are found, and return a modified 'sources' list with Cython source files replaced by the generated C (or C++) files.
[ "Walk", "the", "list", "of", "source", "files", "in", "sources", "looking", "for", "Cython", "source", "files", "(", ".", "pyx", "and", ".", "py", ")", ".", "Run", "Cython", "on", "all", "that", "are", "found", "and", "return", "a", "modified", "sources", "list", "with", "Cython", "source", "files", "replaced", "by", "the", "generated", "C", "(", "or", "C", "++", ")", "files", "." ]
def cython_sources(self, sources, extension): """ Walk the list of source files in 'sources', looking for Cython source files (.pyx and .py). Run Cython on all that are found, and return a modified 'sources' list with Cython source files replaced by the generated C (or C++) files. """ try: from Cython.Compiler.Main \ import CompilationOptions, \ default_options as cython_default_options, \ compile as cython_compile from Cython.Compiler.Errors import PyrexError except ImportError: e = sys.exc_info()[1] print("failed to import Cython: %s" % e) raise DistutilsPlatformError("Cython does not appear to be installed") new_sources = [] cython_sources = [] cython_targets = {} # Setup create_list and cplus from the extension options if # Cython.Distutils.extension.Extension is used, otherwise just # use what was parsed from the command-line or the configuration file. # cplus will also be set to true is extension.language is equal to # 'C++' or 'c++'. #try: # create_listing = self.cython_create_listing or \ # extension.cython_create_listing # cplus = self.cython_cplus or \ # extension.cython_cplus or \ # (extension.language != None and \ # extension.language.lower() == 'c++') #except AttributeError: # create_listing = self.cython_create_listing # cplus = self.cython_cplus or \ # (extension.language != None and \ # extension.language.lower() == 'c++') create_listing = self.cython_create_listing or \ getattr(extension, 'cython_create_listing', 0) line_directives = self.cython_line_directives or \ getattr(extension, 'cython_line_directives', 0) no_c_in_traceback = self.no_c_in_traceback or \ getattr(extension, 'no_c_in_traceback', 0) cplus = self.cython_cplus or getattr(extension, 'cython_cplus', 0) or \ (extension.language and extension.language.lower() == 'c++') cython_gen_pxi = self.cython_gen_pxi or getattr(extension, 'cython_gen_pxi', 0) cython_gdb = self.cython_gdb or getattr(extension, 'cython_gdb', False) cython_compile_time_env = self.cython_compile_time_env or \ getattr(extension, 'cython_compile_time_env', None) # Set up the include_path for the Cython compiler: # 1. Start with the command line option. # 2. Add in any (unique) paths from the extension # cython_include_dirs (if Cython.Distutils.extension is used). # 3. Add in any (unique) paths from the extension include_dirs includes = self.cython_include_dirs try: for i in extension.cython_include_dirs: if not i in includes: includes.append(i) except AttributeError: pass # In case extension.include_dirs is a generator, evaluate it and keep # result extension.include_dirs = list(extension.include_dirs) for i in extension.include_dirs: if not i in includes: includes.append(i) # Set up Cython compiler directives: # 1. Start with the command line option. # 2. Add in any (unique) entries from the extension # cython_directives (if Cython.Distutils.extension is used). directives = self.cython_directives if hasattr(extension, "cython_directives"): directives.update(extension.cython_directives) # Set the target_ext to '.c'. Cython will change this to '.cpp' if # needed. if cplus: target_ext = '.cpp' else: target_ext = '.c' # Decide whether to drop the generated C files into the temp dir # or the source tree. if not self.inplace and (self.cython_c_in_temp or getattr(extension, 'cython_c_in_temp', 0)): target_dir = os.path.join(self.build_temp, "pyrex") for package_name in extension.name.split('.')[:-1]: target_dir = os.path.join(target_dir, package_name) else: target_dir = None newest_dependency = None for source in sources: (base, ext) = os.path.splitext(os.path.basename(source)) if ext == ".py": # FIXME: we might want to special case this some more ext = '.pyx' if ext == ".pyx": # Cython source file output_dir = target_dir or os.path.dirname(source) new_sources.append(os.path.join(output_dir, base + target_ext)) cython_sources.append(source) cython_targets[source] = new_sources[-1] elif ext == '.pxi' or ext == '.pxd': if newest_dependency is None \ or newer(source, newest_dependency): newest_dependency = source else: new_sources.append(source) if not cython_sources: return new_sources module_name = extension.name for source in cython_sources: target = cython_targets[source] depends = [source] + list(extension.depends or ()) if(source[-4:].lower()==".pyx" and os.path.isfile(source[:-3]+"pxd")): depends += [source[:-3]+"pxd"] rebuild = self.force or newer_group(depends, target, 'newer') if not rebuild and newest_dependency is not None: rebuild = newer(newest_dependency, target) if rebuild: log.info("cythoning %s to %s", source, target) self.mkpath(os.path.dirname(target)) if self.inplace: output_dir = os.curdir else: output_dir = self.build_lib options = CompilationOptions(cython_default_options, use_listing_file = create_listing, include_path = includes, compiler_directives = directives, output_file = target, cplus = cplus, emit_linenums = line_directives, c_line_in_traceback = not no_c_in_traceback, generate_pxi = cython_gen_pxi, output_dir = output_dir, gdb_debug = cython_gdb, compile_time_env = cython_compile_time_env) result = cython_compile(source, options=options, full_module_name=module_name) else: log.info("skipping '%s' Cython extension (up-to-date)", target) return new_sources
[ "def", "cython_sources", "(", "self", ",", "sources", ",", "extension", ")", ":", "try", ":", "from", "Cython", ".", "Compiler", ".", "Main", "import", "CompilationOptions", ",", "default_options", "as", "cython_default_options", ",", "compile", "as", "cython_compile", "from", "Cython", ".", "Compiler", ".", "Errors", "import", "PyrexError", "except", "ImportError", ":", "e", "=", "sys", ".", "exc_info", "(", ")", "[", "1", "]", "print", "(", "\"failed to import Cython: %s\"", "%", "e", ")", "raise", "DistutilsPlatformError", "(", "\"Cython does not appear to be installed\"", ")", "new_sources", "=", "[", "]", "cython_sources", "=", "[", "]", "cython_targets", "=", "{", "}", "# Setup create_list and cplus from the extension options if", "# Cython.Distutils.extension.Extension is used, otherwise just", "# use what was parsed from the command-line or the configuration file.", "# cplus will also be set to true is extension.language is equal to", "# 'C++' or 'c++'.", "#try:", "# create_listing = self.cython_create_listing or \\", "# extension.cython_create_listing", "# cplus = self.cython_cplus or \\", "# extension.cython_cplus or \\", "# (extension.language != None and \\", "# extension.language.lower() == 'c++')", "#except AttributeError:", "# create_listing = self.cython_create_listing", "# cplus = self.cython_cplus or \\", "# (extension.language != None and \\", "# extension.language.lower() == 'c++')", "create_listing", "=", "self", ".", "cython_create_listing", "or", "getattr", "(", "extension", ",", "'cython_create_listing'", ",", "0", ")", "line_directives", "=", "self", ".", "cython_line_directives", "or", "getattr", "(", "extension", ",", "'cython_line_directives'", ",", "0", ")", "no_c_in_traceback", "=", "self", ".", "no_c_in_traceback", "or", "getattr", "(", "extension", ",", "'no_c_in_traceback'", ",", "0", ")", "cplus", "=", "self", ".", "cython_cplus", "or", "getattr", "(", "extension", ",", "'cython_cplus'", ",", "0", ")", "or", "(", "extension", ".", "language", "and", "extension", ".", "language", ".", "lower", "(", ")", "==", "'c++'", ")", "cython_gen_pxi", "=", "self", ".", "cython_gen_pxi", "or", "getattr", "(", "extension", ",", "'cython_gen_pxi'", ",", "0", ")", "cython_gdb", "=", "self", ".", "cython_gdb", "or", "getattr", "(", "extension", ",", "'cython_gdb'", ",", "False", ")", "cython_compile_time_env", "=", "self", ".", "cython_compile_time_env", "or", "getattr", "(", "extension", ",", "'cython_compile_time_env'", ",", "None", ")", "# Set up the include_path for the Cython compiler:", "# 1. Start with the command line option.", "# 2. Add in any (unique) paths from the extension", "# cython_include_dirs (if Cython.Distutils.extension is used).", "# 3. Add in any (unique) paths from the extension include_dirs", "includes", "=", "self", ".", "cython_include_dirs", "try", ":", "for", "i", "in", "extension", ".", "cython_include_dirs", ":", "if", "not", "i", "in", "includes", ":", "includes", ".", "append", "(", "i", ")", "except", "AttributeError", ":", "pass", "# In case extension.include_dirs is a generator, evaluate it and keep", "# result", "extension", ".", "include_dirs", "=", "list", "(", "extension", ".", "include_dirs", ")", "for", "i", "in", "extension", ".", "include_dirs", ":", "if", "not", "i", "in", "includes", ":", "includes", ".", "append", "(", "i", ")", "# Set up Cython compiler directives:", "# 1. Start with the command line option.", "# 2. Add in any (unique) entries from the extension", "# cython_directives (if Cython.Distutils.extension is used).", "directives", "=", "self", ".", "cython_directives", "if", "hasattr", "(", "extension", ",", "\"cython_directives\"", ")", ":", "directives", ".", "update", "(", "extension", ".", "cython_directives", ")", "# Set the target_ext to '.c'. Cython will change this to '.cpp' if", "# needed.", "if", "cplus", ":", "target_ext", "=", "'.cpp'", "else", ":", "target_ext", "=", "'.c'", "# Decide whether to drop the generated C files into the temp dir", "# or the source tree.", "if", "not", "self", ".", "inplace", "and", "(", "self", ".", "cython_c_in_temp", "or", "getattr", "(", "extension", ",", "'cython_c_in_temp'", ",", "0", ")", ")", ":", "target_dir", "=", "os", ".", "path", ".", "join", "(", "self", ".", "build_temp", ",", "\"pyrex\"", ")", "for", "package_name", "in", "extension", ".", "name", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ":", "target_dir", "=", "os", ".", "path", ".", "join", "(", "target_dir", ",", "package_name", ")", "else", ":", "target_dir", "=", "None", "newest_dependency", "=", "None", "for", "source", "in", "sources", ":", "(", "base", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "source", ")", ")", "if", "ext", "==", "\".py\"", ":", "# FIXME: we might want to special case this some more", "ext", "=", "'.pyx'", "if", "ext", "==", "\".pyx\"", ":", "# Cython source file", "output_dir", "=", "target_dir", "or", "os", ".", "path", ".", "dirname", "(", "source", ")", "new_sources", ".", "append", "(", "os", ".", "path", ".", "join", "(", "output_dir", ",", "base", "+", "target_ext", ")", ")", "cython_sources", ".", "append", "(", "source", ")", "cython_targets", "[", "source", "]", "=", "new_sources", "[", "-", "1", "]", "elif", "ext", "==", "'.pxi'", "or", "ext", "==", "'.pxd'", ":", "if", "newest_dependency", "is", "None", "or", "newer", "(", "source", ",", "newest_dependency", ")", ":", "newest_dependency", "=", "source", "else", ":", "new_sources", ".", "append", "(", "source", ")", "if", "not", "cython_sources", ":", "return", "new_sources", "module_name", "=", "extension", ".", "name", "for", "source", "in", "cython_sources", ":", "target", "=", "cython_targets", "[", "source", "]", "depends", "=", "[", "source", "]", "+", "list", "(", "extension", ".", "depends", "or", "(", ")", ")", "if", "(", "source", "[", "-", "4", ":", "]", ".", "lower", "(", ")", "==", "\".pyx\"", "and", "os", ".", "path", ".", "isfile", "(", "source", "[", ":", "-", "3", "]", "+", "\"pxd\"", ")", ")", ":", "depends", "+=", "[", "source", "[", ":", "-", "3", "]", "+", "\"pxd\"", "]", "rebuild", "=", "self", ".", "force", "or", "newer_group", "(", "depends", ",", "target", ",", "'newer'", ")", "if", "not", "rebuild", "and", "newest_dependency", "is", "not", "None", ":", "rebuild", "=", "newer", "(", "newest_dependency", ",", "target", ")", "if", "rebuild", ":", "log", ".", "info", "(", "\"cythoning %s to %s\"", ",", "source", ",", "target", ")", "self", ".", "mkpath", "(", "os", ".", "path", ".", "dirname", "(", "target", ")", ")", "if", "self", ".", "inplace", ":", "output_dir", "=", "os", ".", "curdir", "else", ":", "output_dir", "=", "self", ".", "build_lib", "options", "=", "CompilationOptions", "(", "cython_default_options", ",", "use_listing_file", "=", "create_listing", ",", "include_path", "=", "includes", ",", "compiler_directives", "=", "directives", ",", "output_file", "=", "target", ",", "cplus", "=", "cplus", ",", "emit_linenums", "=", "line_directives", ",", "c_line_in_traceback", "=", "not", "no_c_in_traceback", ",", "generate_pxi", "=", "cython_gen_pxi", ",", "output_dir", "=", "output_dir", ",", "gdb_debug", "=", "cython_gdb", ",", "compile_time_env", "=", "cython_compile_time_env", ")", "result", "=", "cython_compile", "(", "source", ",", "options", "=", "options", ",", "full_module_name", "=", "module_name", ")", "else", ":", "log", ".", "info", "(", "\"skipping '%s' Cython extension (up-to-date)\"", ",", "target", ")", "return", "new_sources" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/cython/Cython/Distutils/old_build_ext.py#L197-L351
apache/mesos
97d9a4063332aae3825d78de71611657e05cf5e2
src/python/interface/src/mesos/interface/__init__.py
python
Scheduler.offerRescinded
(self, driver, offerId)
Invoked when an offer is no longer valid (e.g., the slave was lost or another framework used resources in the offer.) If for whatever reason an offer is never rescinded (e.g., dropped message, failing over framework, etc.), a framework that attempts to launch tasks using an invalid offer will receive TASK_LOST status updates for those tasks (see Scheduler.resourceOffers).
Invoked when an offer is no longer valid (e.g., the slave was lost or another framework used resources in the offer.) If for whatever reason an offer is never rescinded (e.g., dropped message, failing over framework, etc.), a framework that attempts to launch tasks using an invalid offer will receive TASK_LOST status updates for those tasks (see Scheduler.resourceOffers).
[ "Invoked", "when", "an", "offer", "is", "no", "longer", "valid", "(", "e", ".", "g", ".", "the", "slave", "was", "lost", "or", "another", "framework", "used", "resources", "in", "the", "offer", ".", ")", "If", "for", "whatever", "reason", "an", "offer", "is", "never", "rescinded", "(", "e", ".", "g", ".", "dropped", "message", "failing", "over", "framework", "etc", ".", ")", "a", "framework", "that", "attempts", "to", "launch", "tasks", "using", "an", "invalid", "offer", "will", "receive", "TASK_LOST", "status", "updates", "for", "those", "tasks", "(", "see", "Scheduler", ".", "resourceOffers", ")", "." ]
def offerRescinded(self, driver, offerId): """ Invoked when an offer is no longer valid (e.g., the slave was lost or another framework used resources in the offer.) If for whatever reason an offer is never rescinded (e.g., dropped message, failing over framework, etc.), a framework that attempts to launch tasks using an invalid offer will receive TASK_LOST status updates for those tasks (see Scheduler.resourceOffers). """
[ "def", "offerRescinded", "(", "self", ",", "driver", ",", "offerId", ")", ":" ]
https://github.com/apache/mesos/blob/97d9a4063332aae3825d78de71611657e05cf5e2/src/python/interface/src/mesos/interface/__init__.py#L78-L86
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
main/python/cmdLineUtils.py
python
write
(string,indent=0,end="")
Use sys.stdout.write to write the string with an indentation equal to indent and specifying the end character
Use sys.stdout.write to write the string with an indentation equal to indent and specifying the end character
[ "Use", "sys", ".", "stdout", ".", "write", "to", "write", "the", "string", "with", "an", "indentation", "equal", "to", "indent", "and", "specifying", "the", "end", "character" ]
def write(string,indent=0,end=""): """Use sys.stdout.write to write the string with an indentation equal to indent and specifying the end character""" sys.stdout.write(" "*indent+string+end)
[ "def", "write", "(", "string", ",", "indent", "=", "0", ",", "end", "=", "\"\"", ")", ":", "sys", ".", "stdout", ".", "write", "(", "\" \"", "*", "indent", "+", "string", "+", "end", ")" ]
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/main/python/cmdLineUtils.py#L908-L911
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/logging/config.py
python
BaseConfigurator.cfg_convert
(self, value)
return d
Default converter for the cfg:// protocol.
Default converter for the cfg:// protocol.
[ "Default", "converter", "for", "the", "cfg", ":", "//", "protocol", "." ]
def cfg_convert(self, value): """Default converter for the cfg:// protocol.""" rest = value m = self.WORD_PATTERN.match(rest) if m is None: raise ValueError("Unable to convert %r" % value) else: rest = rest[m.end():] d = self.config[m.groups()[0]] #print d, rest while rest: m = self.DOT_PATTERN.match(rest) if m: d = d[m.groups()[0]] else: m = self.INDEX_PATTERN.match(rest) if m: idx = m.groups()[0] if not self.DIGIT_PATTERN.match(idx): d = d[idx] else: try: n = int(idx) # try as number first (most likely) d = d[n] except TypeError: d = d[idx] if m: rest = rest[m.end():] else: raise ValueError('Unable to convert ' '%r at %r' % (value, rest)) #rest should be empty return d
[ "def", "cfg_convert", "(", "self", ",", "value", ")", ":", "rest", "=", "value", "m", "=", "self", ".", "WORD_PATTERN", ".", "match", "(", "rest", ")", "if", "m", "is", "None", ":", "raise", "ValueError", "(", "\"Unable to convert %r\"", "%", "value", ")", "else", ":", "rest", "=", "rest", "[", "m", ".", "end", "(", ")", ":", "]", "d", "=", "self", ".", "config", "[", "m", ".", "groups", "(", ")", "[", "0", "]", "]", "#print d, rest", "while", "rest", ":", "m", "=", "self", ".", "DOT_PATTERN", ".", "match", "(", "rest", ")", "if", "m", ":", "d", "=", "d", "[", "m", ".", "groups", "(", ")", "[", "0", "]", "]", "else", ":", "m", "=", "self", ".", "INDEX_PATTERN", ".", "match", "(", "rest", ")", "if", "m", ":", "idx", "=", "m", ".", "groups", "(", ")", "[", "0", "]", "if", "not", "self", ".", "DIGIT_PATTERN", ".", "match", "(", "idx", ")", ":", "d", "=", "d", "[", "idx", "]", "else", ":", "try", ":", "n", "=", "int", "(", "idx", ")", "# try as number first (most likely)", "d", "=", "d", "[", "n", "]", "except", "TypeError", ":", "d", "=", "d", "[", "idx", "]", "if", "m", ":", "rest", "=", "rest", "[", "m", ".", "end", "(", ")", ":", "]", "else", ":", "raise", "ValueError", "(", "'Unable to convert '", "'%r at %r'", "%", "(", "value", ",", "rest", ")", ")", "#rest should be empty", "return", "d" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/logging/config.py#L407-L439
widelands/widelands
e9f047d46a23d81312237d52eabf7d74e8de52d6
utils/glossary_checks.py
python
load_glossary
(glossary_file, locale)
return result
Build a glossary from the given Transifex glossary csv file for the given locale.
Build a glossary from the given Transifex glossary csv file for the given locale.
[ "Build", "a", "glossary", "from", "the", "given", "Transifex", "glossary", "csv", "file", "for", "the", "given", "locale", "." ]
def load_glossary(glossary_file, locale): """Build a glossary from the given Transifex glossary csv file for the given locale.""" result = [] counter = 0 term_index = 0 term_comment_index = 0 wordclass_index = 0 translation_index = 0 comment_index = 0 for row in read_csv_file(glossary_file): # Detect the column indices if counter == 0: colum_counter = 0 for header in row: if header == 'term': term_index = colum_counter elif header == 'comment': term_comment_index = colum_counter elif header == 'pos': wordclass_index = colum_counter elif header in ('translation_' + locale, locale): translation_index = colum_counter elif header == 'comment_' + locale: comment_index = colum_counter colum_counter = colum_counter + 1 # If there is a translation, parse the entry # We also have some obsolete terms in the glossary that we want to # filter out. elif len(row[translation_index].strip()) > 0 and not row[term_comment_index].startswith('OBSOLETE'): if translation_index == 0: raise Exception( 'Locale %s is missing from glossary file.' % locale) if comment_index == 0: raise Exception( 'Comment field for locale %s is missing from glossary file.' % locale) entry = GlossaryEntry() entry.terms.append(row[term_index].strip()) if row[wordclass_index] == 'Noun': plural = make_english_plural(entry.terms[0]) if len(plural) > 0: entry.terms.append(plural) elif row[wordclass_index] == 'Verb': verb_forms = make_english_verb_forms(entry.terms[0]) for verb_form in verb_forms: entry.terms.append(verb_form) entry.translations.append(row[translation_index].strip()) # Misuse the comment field to provide a list of inflected forms. # Otherwise, we would get tons of false positive hits in the checks # later on and the translators would have our heads on a platter. delimiter = '|' if len(row[comment_index].strip()) > 1 and delimiter in row[comment_index]: inflections = row[comment_index].split(delimiter) for inflection in inflections: entry.translations.append(inflection.strip()) result.append(entry) counter = counter + 1 return result
[ "def", "load_glossary", "(", "glossary_file", ",", "locale", ")", ":", "result", "=", "[", "]", "counter", "=", "0", "term_index", "=", "0", "term_comment_index", "=", "0", "wordclass_index", "=", "0", "translation_index", "=", "0", "comment_index", "=", "0", "for", "row", "in", "read_csv_file", "(", "glossary_file", ")", ":", "# Detect the column indices", "if", "counter", "==", "0", ":", "colum_counter", "=", "0", "for", "header", "in", "row", ":", "if", "header", "==", "'term'", ":", "term_index", "=", "colum_counter", "elif", "header", "==", "'comment'", ":", "term_comment_index", "=", "colum_counter", "elif", "header", "==", "'pos'", ":", "wordclass_index", "=", "colum_counter", "elif", "header", "in", "(", "'translation_'", "+", "locale", ",", "locale", ")", ":", "translation_index", "=", "colum_counter", "elif", "header", "==", "'comment_'", "+", "locale", ":", "comment_index", "=", "colum_counter", "colum_counter", "=", "colum_counter", "+", "1", "# If there is a translation, parse the entry", "# We also have some obsolete terms in the glossary that we want to", "# filter out.", "elif", "len", "(", "row", "[", "translation_index", "]", ".", "strip", "(", ")", ")", ">", "0", "and", "not", "row", "[", "term_comment_index", "]", ".", "startswith", "(", "'OBSOLETE'", ")", ":", "if", "translation_index", "==", "0", ":", "raise", "Exception", "(", "'Locale %s is missing from glossary file.'", "%", "locale", ")", "if", "comment_index", "==", "0", ":", "raise", "Exception", "(", "'Comment field for locale %s is missing from glossary file.'", "%", "locale", ")", "entry", "=", "GlossaryEntry", "(", ")", "entry", ".", "terms", ".", "append", "(", "row", "[", "term_index", "]", ".", "strip", "(", ")", ")", "if", "row", "[", "wordclass_index", "]", "==", "'Noun'", ":", "plural", "=", "make_english_plural", "(", "entry", ".", "terms", "[", "0", "]", ")", "if", "len", "(", "plural", ")", ">", "0", ":", "entry", ".", "terms", ".", "append", "(", "plural", ")", "elif", "row", "[", "wordclass_index", "]", "==", "'Verb'", ":", "verb_forms", "=", "make_english_verb_forms", "(", "entry", ".", "terms", "[", "0", "]", ")", "for", "verb_form", "in", "verb_forms", ":", "entry", ".", "terms", ".", "append", "(", "verb_form", ")", "entry", ".", "translations", ".", "append", "(", "row", "[", "translation_index", "]", ".", "strip", "(", ")", ")", "# Misuse the comment field to provide a list of inflected forms.", "# Otherwise, we would get tons of false positive hits in the checks", "# later on and the translators would have our heads on a platter.", "delimiter", "=", "'|'", "if", "len", "(", "row", "[", "comment_index", "]", ".", "strip", "(", ")", ")", ">", "1", "and", "delimiter", "in", "row", "[", "comment_index", "]", ":", "inflections", "=", "row", "[", "comment_index", "]", ".", "split", "(", "delimiter", ")", "for", "inflection", "in", "inflections", ":", "entry", ".", "translations", ".", "append", "(", "inflection", ".", "strip", "(", ")", ")", "result", ".", "append", "(", "entry", ")", "counter", "=", "counter", "+", "1", "return", "result" ]
https://github.com/widelands/widelands/blob/e9f047d46a23d81312237d52eabf7d74e8de52d6/utils/glossary_checks.py#L208-L268
KratosMultiphysics/Kratos
0000833054ed0503424eb28205d6508d9ca6cbbc
applications/CoSimulationApplication/python_scripts/base_classes/co_simulation_io.py
python
CoSimulationIO.ImportCouplingInterface
(self, interface_config)
Imports coupling interface from an external solver External solver sends, CoSimulation receives @param interface_config <python dictionary> : configuration of the interface to be imported
Imports coupling interface from an external solver External solver sends, CoSimulation receives
[ "Imports", "coupling", "interface", "from", "an", "external", "solver", "External", "solver", "sends", "CoSimulation", "receives" ]
def ImportCouplingInterface(self, interface_config): """Imports coupling interface from an external solver External solver sends, CoSimulation receives @param interface_config <python dictionary> : configuration of the interface to be imported """ raise NotImplementedError("This function has to be implemented in the derived class!")
[ "def", "ImportCouplingInterface", "(", "self", ",", "interface_config", ")", ":", "raise", "NotImplementedError", "(", "\"This function has to be implemented in the derived class!\"", ")" ]
https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/CoSimulationApplication/python_scripts/base_classes/co_simulation_io.py#L26-L32
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/urllib/request.py
python
localhost
()
return _localhost
Return the IP address of the magic hostname 'localhost'.
Return the IP address of the magic hostname 'localhost'.
[ "Return", "the", "IP", "address", "of", "the", "magic", "hostname", "localhost", "." ]
def localhost(): """Return the IP address of the magic hostname 'localhost'.""" global _localhost if _localhost is None: _localhost = socket.gethostbyname('localhost') return _localhost
[ "def", "localhost", "(", ")", ":", "global", "_localhost", "if", "_localhost", "is", "None", ":", "_localhost", "=", "socket", ".", "gethostbyname", "(", "'localhost'", ")", "return", "_localhost" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/urllib/request.py#L2353-L2358
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/linalg/sparse/sparse_csr_matrix_ops.py
python
matmul
(a, b, transpose_a=False, transpose_b=False, adjoint_a=False, adjoint_b=False, name=None)
Perform a sparse matrix matmul between `a` and `b`. Performs a contraction between `a` and `b` along the two innermost dimensions. If both `a` and `b` are instances of `SparseMatrix`, returns a new instance of `SparseMatrix` (same type as `a`). If one is not an instance of `SparseMatrix`, returns a dense `Tensor`: ``` c = opA(a) . opB(b) ``` where `opA` (resp. `opB`) is the transpose or hermitian transpose depending on the values of `transpose_a` (resp. `transpose_b`) and `adjoint_a` (resp. `adjoint_b`). Args: a: `Tensor` or `SparseMatrix`, having rank `2` or `3`. b: `Tensor` or `SparseMatrix`, having rank `2` or `3`. transpose_a: Python `bool`. transpose_b: Python `bool`. adjoint_a: Python `bool`. adjoint_b: Python `bool`. name: Optional name to use when creating ops. Returns: A `SparseMatrix` if both `a` and `b` are instances of `SparseMatrix`, otherwise a dense `Tensor`.
Perform a sparse matrix matmul between `a` and `b`.
[ "Perform", "a", "sparse", "matrix", "matmul", "between", "a", "and", "b", "." ]
def matmul(a, b, transpose_a=False, transpose_b=False, adjoint_a=False, adjoint_b=False, name=None): """Perform a sparse matrix matmul between `a` and `b`. Performs a contraction between `a` and `b` along the two innermost dimensions. If both `a` and `b` are instances of `SparseMatrix`, returns a new instance of `SparseMatrix` (same type as `a`). If one is not an instance of `SparseMatrix`, returns a dense `Tensor`: ``` c = opA(a) . opB(b) ``` where `opA` (resp. `opB`) is the transpose or hermitian transpose depending on the values of `transpose_a` (resp. `transpose_b`) and `adjoint_a` (resp. `adjoint_b`). Args: a: `Tensor` or `SparseMatrix`, having rank `2` or `3`. b: `Tensor` or `SparseMatrix`, having rank `2` or `3`. transpose_a: Python `bool`. transpose_b: Python `bool`. adjoint_a: Python `bool`. adjoint_b: Python `bool`. name: Optional name to use when creating ops. Returns: A `SparseMatrix` if both `a` and `b` are instances of `SparseMatrix`, otherwise a dense `Tensor`. """ if not isinstance(a, SparseMatrix) and not isinstance(b, SparseMatrix): return math_ops.matmul( a, b, transpose_a=transpose_a, transpose_b=transpose_b, adjoint_a=adjoint_a, adjoint_b=adjoint_b, name=name) # pylint: disable=protected-access a_matrix = a._matrix if isinstance(a, SparseMatrix) else a b_matrix = b._matrix if isinstance(b, SparseMatrix) else b with ops.name_scope(name, "SparseMatrixMatMul", [a_matrix, b_matrix]): if isinstance(a, SparseMatrix) and isinstance(b, SparseMatrix): if not (isinstance(a, type(b)) or isinstance(b, type(a))): raise TypeError("SparseMatrix types don't inherit from each other: " "%s and %s" % (type(a), type(b))) c = sm_ops.sparse_matrix_sparse_mat_mul( a_matrix, b_matrix, transpose_a=transpose_a, transpose_b=transpose_b, adjoint_a=adjoint_a, adjoint_b=adjoint_b, type=a.dtype) # In eager mode, shape inference functions are not called, and the output # shape is not set. We have to infer the output shape here. # TODO(penporn): Set this from the C++ kernel instead. c_handle = matmul_shape_inference(a_matrix, b_matrix, c, transpose_a, transpose_b, adjoint_a, adjoint_b) return a._from_matrix(c, handle_data=c_handle) elif isinstance(a, SparseMatrix): return sm_ops.sparse_matrix_mat_mul( a_matrix, b, transpose_a=transpose_a, transpose_b=transpose_b, adjoint_a=adjoint_a, adjoint_b=adjoint_b) else: # opA(A) . opB(B) = t(nopB(B) . nopA(A)) if not adjoint_a and not adjoint_b: return sm_ops.sparse_matrix_mat_mul( b_matrix, a, transpose_a=not transpose_b, transpose_b=not transpose_a, transpose_output=True) elif not transpose_a and not transpose_b: return sm_ops.sparse_matrix_mat_mul( b_matrix, a, adjoint_a=not adjoint_b, adjoint_b=not adjoint_a, transpose_output=True, conjugate_output=True) else: return sm_ops.sparse_matrix_mat_mul( b_matrix, math_ops.conj(a), transpose_output=True, conjugate_output=adjoint_b)
[ "def", "matmul", "(", "a", ",", "b", ",", "transpose_a", "=", "False", ",", "transpose_b", "=", "False", ",", "adjoint_a", "=", "False", ",", "adjoint_b", "=", "False", ",", "name", "=", "None", ")", ":", "if", "not", "isinstance", "(", "a", ",", "SparseMatrix", ")", "and", "not", "isinstance", "(", "b", ",", "SparseMatrix", ")", ":", "return", "math_ops", ".", "matmul", "(", "a", ",", "b", ",", "transpose_a", "=", "transpose_a", ",", "transpose_b", "=", "transpose_b", ",", "adjoint_a", "=", "adjoint_a", ",", "adjoint_b", "=", "adjoint_b", ",", "name", "=", "name", ")", "# pylint: disable=protected-access", "a_matrix", "=", "a", ".", "_matrix", "if", "isinstance", "(", "a", ",", "SparseMatrix", ")", "else", "a", "b_matrix", "=", "b", ".", "_matrix", "if", "isinstance", "(", "b", ",", "SparseMatrix", ")", "else", "b", "with", "ops", ".", "name_scope", "(", "name", ",", "\"SparseMatrixMatMul\"", ",", "[", "a_matrix", ",", "b_matrix", "]", ")", ":", "if", "isinstance", "(", "a", ",", "SparseMatrix", ")", "and", "isinstance", "(", "b", ",", "SparseMatrix", ")", ":", "if", "not", "(", "isinstance", "(", "a", ",", "type", "(", "b", ")", ")", "or", "isinstance", "(", "b", ",", "type", "(", "a", ")", ")", ")", ":", "raise", "TypeError", "(", "\"SparseMatrix types don't inherit from each other: \"", "\"%s and %s\"", "%", "(", "type", "(", "a", ")", ",", "type", "(", "b", ")", ")", ")", "c", "=", "sm_ops", ".", "sparse_matrix_sparse_mat_mul", "(", "a_matrix", ",", "b_matrix", ",", "transpose_a", "=", "transpose_a", ",", "transpose_b", "=", "transpose_b", ",", "adjoint_a", "=", "adjoint_a", ",", "adjoint_b", "=", "adjoint_b", ",", "type", "=", "a", ".", "dtype", ")", "# In eager mode, shape inference functions are not called, and the output", "# shape is not set. We have to infer the output shape here.", "# TODO(penporn): Set this from the C++ kernel instead.", "c_handle", "=", "matmul_shape_inference", "(", "a_matrix", ",", "b_matrix", ",", "c", ",", "transpose_a", ",", "transpose_b", ",", "adjoint_a", ",", "adjoint_b", ")", "return", "a", ".", "_from_matrix", "(", "c", ",", "handle_data", "=", "c_handle", ")", "elif", "isinstance", "(", "a", ",", "SparseMatrix", ")", ":", "return", "sm_ops", ".", "sparse_matrix_mat_mul", "(", "a_matrix", ",", "b", ",", "transpose_a", "=", "transpose_a", ",", "transpose_b", "=", "transpose_b", ",", "adjoint_a", "=", "adjoint_a", ",", "adjoint_b", "=", "adjoint_b", ")", "else", ":", "# opA(A) . opB(B) = t(nopB(B) . nopA(A))", "if", "not", "adjoint_a", "and", "not", "adjoint_b", ":", "return", "sm_ops", ".", "sparse_matrix_mat_mul", "(", "b_matrix", ",", "a", ",", "transpose_a", "=", "not", "transpose_b", ",", "transpose_b", "=", "not", "transpose_a", ",", "transpose_output", "=", "True", ")", "elif", "not", "transpose_a", "and", "not", "transpose_b", ":", "return", "sm_ops", ".", "sparse_matrix_mat_mul", "(", "b_matrix", ",", "a", ",", "adjoint_a", "=", "not", "adjoint_b", ",", "adjoint_b", "=", "not", "adjoint_a", ",", "transpose_output", "=", "True", ",", "conjugate_output", "=", "True", ")", "else", ":", "return", "sm_ops", ".", "sparse_matrix_mat_mul", "(", "b_matrix", ",", "math_ops", ".", "conj", "(", "a", ")", ",", "transpose_output", "=", "True", ",", "conjugate_output", "=", "adjoint_b", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/linalg/sparse/sparse_csr_matrix_ops.py#L141-L239
msitt/blpapi-python
bebcf43668c9e5f5467b1f685f9baebbfc45bc87
src/blpapi/exception.py
python
_ExceptionUtil.raiseOnError
(errorCode, description=None)
Throw the appropriate exception for the specified 'errorCode' if the 'errorCode != 0'.
Throw the appropriate exception for the specified 'errorCode' if the 'errorCode != 0'.
[ "Throw", "the", "appropriate", "exception", "for", "the", "specified", "errorCode", "if", "the", "errorCode", "!", "=", "0", "." ]
def raiseOnError(errorCode, description=None): """Throw the appropriate exception for the specified 'errorCode' if the 'errorCode != 0'. """ if errorCode: _ExceptionUtil.raiseException(errorCode, description)
[ "def", "raiseOnError", "(", "errorCode", ",", "description", "=", "None", ")", ":", "if", "errorCode", ":", "_ExceptionUtil", ".", "raiseException", "(", "errorCode", ",", "description", ")" ]
https://github.com/msitt/blpapi-python/blob/bebcf43668c9e5f5467b1f685f9baebbfc45bc87/src/blpapi/exception.py#L141-L146
gimli-org/gimli
17aa2160de9b15ababd9ef99e89b1bc3277bbb23
pygimli/physics/sNMR/mrs.py
python
MRS.loadMRSD
(self, filename, usereal=False, mint=0., maxt=2.0)
Load mrsd (MRS data) file: not really used as in MRSD.
Load mrsd (MRS data) file: not really used as in MRSD.
[ "Load", "mrsd", "(", "MRS", "data", ")", "file", ":", "not", "really", "used", "as", "in", "MRSD", "." ]
def loadMRSD(self, filename, usereal=False, mint=0., maxt=2.0): """Load mrsd (MRS data) file: not really used as in MRSD.""" from scipy.io import loadmat # loading Matlab mat files print("Currently not using mint/maxt & usereal:", mint, maxt, usereal) pl = loadmat(filename, struct_as_record=False, squeeze_me=True)['proclog'] self.q = np.array([q.q for q in pl.Q]) self.t = pl.Q[0].rx.sig[0].t + pl.Q[0].timing.tau_dead1 nq = len(pl.Q) nt = len(self.t) self.dcube = np.zeros((nq, nt)) for i in range(nq): self.dcube[i, :] = np.abs(pl.Q[i].rx.sig[1].V) self.ecube = np.ones((nq, nt))*20e-9
[ "def", "loadMRSD", "(", "self", ",", "filename", ",", "usereal", "=", "False", ",", "mint", "=", "0.", ",", "maxt", "=", "2.0", ")", ":", "from", "scipy", ".", "io", "import", "loadmat", "# loading Matlab mat files", "print", "(", "\"Currently not using mint/maxt & usereal:\"", ",", "mint", ",", "maxt", ",", "usereal", ")", "pl", "=", "loadmat", "(", "filename", ",", "struct_as_record", "=", "False", ",", "squeeze_me", "=", "True", ")", "[", "'proclog'", "]", "self", ".", "q", "=", "np", ".", "array", "(", "[", "q", ".", "q", "for", "q", "in", "pl", ".", "Q", "]", ")", "self", ".", "t", "=", "pl", ".", "Q", "[", "0", "]", ".", "rx", ".", "sig", "[", "0", "]", ".", "t", "+", "pl", ".", "Q", "[", "0", "]", ".", "timing", ".", "tau_dead1", "nq", "=", "len", "(", "pl", ".", "Q", ")", "nt", "=", "len", "(", "self", ".", "t", ")", "self", ".", "dcube", "=", "np", ".", "zeros", "(", "(", "nq", ",", "nt", ")", ")", "for", "i", "in", "range", "(", "nq", ")", ":", "self", ".", "dcube", "[", "i", ",", ":", "]", "=", "np", ".", "abs", "(", "pl", ".", "Q", "[", "i", "]", ".", "rx", ".", "sig", "[", "1", "]", ".", "V", ")", "self", ".", "ecube", "=", "np", ".", "ones", "(", "(", "nq", ",", "nt", ")", ")", "*", "20e-9" ]
https://github.com/gimli-org/gimli/blob/17aa2160de9b15ababd9ef99e89b1bc3277bbb23/pygimli/physics/sNMR/mrs.py#L241-L255
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/tornado/tornado-6/tornado/template.py
python
Template.__init__
( self, template_string: Union[str, bytes], name: str = "<string>", loader: Optional["BaseLoader"] = None, compress_whitespace: Union[bool, _UnsetMarker] = _UNSET, autoescape: Optional[Union[str, _UnsetMarker]] = _UNSET, whitespace: Optional[str] = None, )
Construct a Template. :arg str template_string: the contents of the template file. :arg str name: the filename from which the template was loaded (used for error message). :arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible for this template, used to resolve ``{% include %}`` and ``{% extend %}`` directives. :arg bool compress_whitespace: Deprecated since Tornado 4.3. Equivalent to ``whitespace="single"`` if true and ``whitespace="all"`` if false. :arg str autoescape: The name of a function in the template namespace, or ``None`` to disable escaping by default. :arg str whitespace: A string specifying treatment of whitespace; see `filter_whitespace` for options. .. versionchanged:: 4.3 Added ``whitespace`` parameter; deprecated ``compress_whitespace``.
Construct a Template.
[ "Construct", "a", "Template", "." ]
def __init__( self, template_string: Union[str, bytes], name: str = "<string>", loader: Optional["BaseLoader"] = None, compress_whitespace: Union[bool, _UnsetMarker] = _UNSET, autoescape: Optional[Union[str, _UnsetMarker]] = _UNSET, whitespace: Optional[str] = None, ) -> None: """Construct a Template. :arg str template_string: the contents of the template file. :arg str name: the filename from which the template was loaded (used for error message). :arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible for this template, used to resolve ``{% include %}`` and ``{% extend %}`` directives. :arg bool compress_whitespace: Deprecated since Tornado 4.3. Equivalent to ``whitespace="single"`` if true and ``whitespace="all"`` if false. :arg str autoescape: The name of a function in the template namespace, or ``None`` to disable escaping by default. :arg str whitespace: A string specifying treatment of whitespace; see `filter_whitespace` for options. .. versionchanged:: 4.3 Added ``whitespace`` parameter; deprecated ``compress_whitespace``. """ self.name = escape.native_str(name) if compress_whitespace is not _UNSET: # Convert deprecated compress_whitespace (bool) to whitespace (str). if whitespace is not None: raise Exception("cannot set both whitespace and compress_whitespace") whitespace = "single" if compress_whitespace else "all" if whitespace is None: if loader and loader.whitespace: whitespace = loader.whitespace else: # Whitespace defaults by filename. if name.endswith(".html") or name.endswith(".js"): whitespace = "single" else: whitespace = "all" # Validate the whitespace setting. assert whitespace is not None filter_whitespace(whitespace, "") if not isinstance(autoescape, _UnsetMarker): self.autoescape = autoescape # type: Optional[str] elif loader: self.autoescape = loader.autoescape else: self.autoescape = _DEFAULT_AUTOESCAPE self.namespace = loader.namespace if loader else {} reader = _TemplateReader(name, escape.native_str(template_string), whitespace) self.file = _File(self, _parse(reader, self)) self.code = self._generate_python(loader) self.loader = loader try: # Under python2.5, the fake filename used here must match # the module name used in __name__ below. # The dont_inherit flag prevents template.py's future imports # from being applied to the generated code. self.compiled = compile( escape.to_unicode(self.code), "%s.generated.py" % self.name.replace(".", "_"), "exec", dont_inherit=True, ) except Exception: formatted_code = _format_code(self.code).rstrip() app_log.error("%s code:\n%s", self.name, formatted_code) raise
[ "def", "__init__", "(", "self", ",", "template_string", ":", "Union", "[", "str", ",", "bytes", "]", ",", "name", ":", "str", "=", "\"<string>\"", ",", "loader", ":", "Optional", "[", "\"BaseLoader\"", "]", "=", "None", ",", "compress_whitespace", ":", "Union", "[", "bool", ",", "_UnsetMarker", "]", "=", "_UNSET", ",", "autoescape", ":", "Optional", "[", "Union", "[", "str", ",", "_UnsetMarker", "]", "]", "=", "_UNSET", ",", "whitespace", ":", "Optional", "[", "str", "]", "=", "None", ",", ")", "->", "None", ":", "self", ".", "name", "=", "escape", ".", "native_str", "(", "name", ")", "if", "compress_whitespace", "is", "not", "_UNSET", ":", "# Convert deprecated compress_whitespace (bool) to whitespace (str).", "if", "whitespace", "is", "not", "None", ":", "raise", "Exception", "(", "\"cannot set both whitespace and compress_whitespace\"", ")", "whitespace", "=", "\"single\"", "if", "compress_whitespace", "else", "\"all\"", "if", "whitespace", "is", "None", ":", "if", "loader", "and", "loader", ".", "whitespace", ":", "whitespace", "=", "loader", ".", "whitespace", "else", ":", "# Whitespace defaults by filename.", "if", "name", ".", "endswith", "(", "\".html\"", ")", "or", "name", ".", "endswith", "(", "\".js\"", ")", ":", "whitespace", "=", "\"single\"", "else", ":", "whitespace", "=", "\"all\"", "# Validate the whitespace setting.", "assert", "whitespace", "is", "not", "None", "filter_whitespace", "(", "whitespace", ",", "\"\"", ")", "if", "not", "isinstance", "(", "autoescape", ",", "_UnsetMarker", ")", ":", "self", ".", "autoescape", "=", "autoescape", "# type: Optional[str]", "elif", "loader", ":", "self", ".", "autoescape", "=", "loader", ".", "autoescape", "else", ":", "self", ".", "autoescape", "=", "_DEFAULT_AUTOESCAPE", "self", ".", "namespace", "=", "loader", ".", "namespace", "if", "loader", "else", "{", "}", "reader", "=", "_TemplateReader", "(", "name", ",", "escape", ".", "native_str", "(", "template_string", ")", ",", "whitespace", ")", "self", ".", "file", "=", "_File", "(", "self", ",", "_parse", "(", "reader", ",", "self", ")", ")", "self", ".", "code", "=", "self", ".", "_generate_python", "(", "loader", ")", "self", ".", "loader", "=", "loader", "try", ":", "# Under python2.5, the fake filename used here must match", "# the module name used in __name__ below.", "# The dont_inherit flag prevents template.py's future imports", "# from being applied to the generated code.", "self", ".", "compiled", "=", "compile", "(", "escape", ".", "to_unicode", "(", "self", ".", "code", ")", ",", "\"%s.generated.py\"", "%", "self", ".", "name", ".", "replace", "(", "\".\"", ",", "\"_\"", ")", ",", "\"exec\"", ",", "dont_inherit", "=", "True", ",", ")", "except", "Exception", ":", "formatted_code", "=", "_format_code", "(", "self", ".", "code", ")", ".", "rstrip", "(", ")", "app_log", ".", "error", "(", "\"%s code:\\n%s\"", ",", "self", ".", "name", ",", "formatted_code", ")", "raise" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/tornado/tornado-6/tornado/template.py#L262-L335