nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py3/sklearn/feature_extraction/_dict_vectorizer.py
python
DictVectorizer.get_feature_names
(self)
return self.feature_names_
Returns a list of feature names, ordered by their indices. If one-of-K coding is applied to categorical features, this will include the constructed feature names but not the original ones.
Returns a list of feature names, ordered by their indices.
[ "Returns", "a", "list", "of", "feature", "names", "ordered", "by", "their", "indices", "." ]
def get_feature_names(self): """Returns a list of feature names, ordered by their indices. If one-of-K coding is applied to categorical features, this will include the constructed feature names but not the original ones. """ return self.feature_names_
[ "def", "get_feature_names", "(", "self", ")", ":", "return", "self", ".", "feature_names_" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/feature_extraction/_dict_vectorizer.py#L309-L315
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/idlelib/hyperparser.py
python
HyperParser.__init__
(self, editwin, index)
To initialize, analyze the surroundings of the given index.
To initialize, analyze the surroundings of the given index.
[ "To", "initialize", "analyze", "the", "surroundings", "of", "the", "given", "index", "." ]
def __init__(self, editwin, index): "To initialize, analyze the surroundings of the given index." self.editwin = editwin self.text = text = editwin.text parser = pyparse.Parser(editwin.indentwidth, editwin.tabwidth) def index2line(index): return int(float(index)) lno = index2line(text.index(index)) if not editwin.prompt_last_line: for context in editwin.num_context_lines: startat = max(lno - context, 1) startatindex = repr(startat) + ".0" stopatindex = "%d.end" % lno # We add the newline because PyParse requires a newline # at end. We add a space so that index won't be at end # of line, so that its status will be the same as the # char before it, if should. parser.set_code(text.get(startatindex, stopatindex)+' \n') bod = parser.find_good_parse_start( editwin._build_char_in_string_func(startatindex)) if bod is not None or startat == 1: break parser.set_lo(bod or 0) else: r = text.tag_prevrange("console", index) if r: startatindex = r[1] else: startatindex = "1.0" stopatindex = "%d.end" % lno # We add the newline because PyParse requires it. We add a # space so that index won't be at end of line, so that its # status will be the same as the char before it, if should. parser.set_code(text.get(startatindex, stopatindex)+' \n') parser.set_lo(0) # We want what the parser has, minus the last newline and space. self.rawtext = parser.code[:-2] # Parser.code apparently preserves the statement we are in, so # that stopatindex can be used to synchronize the string with # the text box indices. self.stopatindex = stopatindex self.bracketing = parser.get_last_stmt_bracketing() # find which pairs of bracketing are openers. These always # correspond to a character of rawtext. self.isopener = [i>0 and self.bracketing[i][1] > self.bracketing[i-1][1] for i in range(len(self.bracketing))] self.set_index(index)
[ "def", "__init__", "(", "self", ",", "editwin", ",", "index", ")", ":", "self", ".", "editwin", "=", "editwin", "self", ".", "text", "=", "text", "=", "editwin", ".", "text", "parser", "=", "pyparse", ".", "Parser", "(", "editwin", ".", "indentwidth", ",", "editwin", ".", "tabwidth", ")", "def", "index2line", "(", "index", ")", ":", "return", "int", "(", "float", "(", "index", ")", ")", "lno", "=", "index2line", "(", "text", ".", "index", "(", "index", ")", ")", "if", "not", "editwin", ".", "prompt_last_line", ":", "for", "context", "in", "editwin", ".", "num_context_lines", ":", "startat", "=", "max", "(", "lno", "-", "context", ",", "1", ")", "startatindex", "=", "repr", "(", "startat", ")", "+", "\".0\"", "stopatindex", "=", "\"%d.end\"", "%", "lno", "# We add the newline because PyParse requires a newline", "# at end. We add a space so that index won't be at end", "# of line, so that its status will be the same as the", "# char before it, if should.", "parser", ".", "set_code", "(", "text", ".", "get", "(", "startatindex", ",", "stopatindex", ")", "+", "' \\n'", ")", "bod", "=", "parser", ".", "find_good_parse_start", "(", "editwin", ".", "_build_char_in_string_func", "(", "startatindex", ")", ")", "if", "bod", "is", "not", "None", "or", "startat", "==", "1", ":", "break", "parser", ".", "set_lo", "(", "bod", "or", "0", ")", "else", ":", "r", "=", "text", ".", "tag_prevrange", "(", "\"console\"", ",", "index", ")", "if", "r", ":", "startatindex", "=", "r", "[", "1", "]", "else", ":", "startatindex", "=", "\"1.0\"", "stopatindex", "=", "\"%d.end\"", "%", "lno", "# We add the newline because PyParse requires it. We add a", "# space so that index won't be at end of line, so that its", "# status will be the same as the char before it, if should.", "parser", ".", "set_code", "(", "text", ".", "get", "(", "startatindex", ",", "stopatindex", ")", "+", "' \\n'", ")", "parser", ".", "set_lo", "(", "0", ")", "# We want what the parser has, minus the last newline and space.", "self", ".", "rawtext", "=", "parser", ".", "code", "[", ":", "-", "2", "]", "# Parser.code apparently preserves the statement we are in, so", "# that stopatindex can be used to synchronize the string with", "# the text box indices.", "self", ".", "stopatindex", "=", "stopatindex", "self", ".", "bracketing", "=", "parser", ".", "get_last_stmt_bracketing", "(", ")", "# find which pairs of bracketing are openers. These always", "# correspond to a character of rawtext.", "self", ".", "isopener", "=", "[", "i", ">", "0", "and", "self", ".", "bracketing", "[", "i", "]", "[", "1", "]", ">", "self", ".", "bracketing", "[", "i", "-", "1", "]", "[", "1", "]", "for", "i", "in", "range", "(", "len", "(", "self", ".", "bracketing", ")", ")", "]", "self", ".", "set_index", "(", "index", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/idlelib/hyperparser.py#L26-L79
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/SocketServer.py
python
BaseServer.process_request
(self, request, client_address)
Call finish_request. Overridden by ForkingMixIn and ThreadingMixIn.
Call finish_request.
[ "Call", "finish_request", "." ]
def process_request(self, request, client_address): """Call finish_request. Overridden by ForkingMixIn and ThreadingMixIn. """ self.finish_request(request, client_address) self.shutdown_request(request)
[ "def", "process_request", "(", "self", ",", "request", ",", "client_address", ")", ":", "self", ".", "finish_request", "(", "request", ",", "client_address", ")", "self", ".", "shutdown_request", "(", "request", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/SocketServer.py#L315-L322
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/telnetlib.py
python
Telnet.fill_rawq
(self)
Fill raw queue from exactly one recv() system call. Block if no data is immediately available. Set self.eof when connection is closed.
Fill raw queue from exactly one recv() system call.
[ "Fill", "raw", "queue", "from", "exactly", "one", "recv", "()", "system", "call", "." ]
def fill_rawq(self): """Fill raw queue from exactly one recv() system call. Block if no data is immediately available. Set self.eof when connection is closed. """ if self.irawq >= len(self.rawq): self.rawq = '' self.irawq = 0 # The buffer size should be fairly small so as to avoid quadratic # behavior in process_rawq() above buf = self.sock.recv(50) self.msg("recv %r", buf) self.eof = (not buf) self.rawq = self.rawq + buf
[ "def", "fill_rawq", "(", "self", ")", ":", "if", "self", ".", "irawq", ">=", "len", "(", "self", ".", "rawq", ")", ":", "self", ".", "rawq", "=", "''", "self", ".", "irawq", "=", "0", "# The buffer size should be fairly small so as to avoid quadratic", "# behavior in process_rawq() above", "buf", "=", "self", ".", "sock", ".", "recv", "(", "50", ")", "self", ".", "msg", "(", "\"recv %r\"", ",", "buf", ")", "self", ".", "eof", "=", "(", "not", "buf", ")", "self", ".", "rawq", "=", "self", ".", "rawq", "+", "buf" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/telnetlib.py#L561-L576
cinder/Cinder
e83f5bb9c01a63eec20168d02953a0879e5100f7
docs/libs/markdown/blockparser.py
python
BlockParser.parseDocument
(self, lines)
return util.etree.ElementTree(self.root)
Parse a markdown document into an ElementTree. Given a list of lines, an ElementTree object (not just a parent Element) is created and the root element is passed to the parser as the parent. The ElementTree object is returned. This should only be called on an entire document, not pieces.
Parse a markdown document into an ElementTree.
[ "Parse", "a", "markdown", "document", "into", "an", "ElementTree", "." ]
def parseDocument(self, lines): """ Parse a markdown document into an ElementTree. Given a list of lines, an ElementTree object (not just a parent Element) is created and the root element is passed to the parser as the parent. The ElementTree object is returned. This should only be called on an entire document, not pieces. """ # Create a ElementTree from the lines self.root = util.etree.Element(self.markdown.doc_tag) self.parseChunk(self.root, '\n'.join(lines)) return util.etree.ElementTree(self.root)
[ "def", "parseDocument", "(", "self", ",", "lines", ")", ":", "# Create a ElementTree from the lines", "self", ".", "root", "=", "util", ".", "etree", ".", "Element", "(", "self", ".", "markdown", ".", "doc_tag", ")", "self", ".", "parseChunk", "(", "self", ".", "root", ",", "'\\n'", ".", "join", "(", "lines", ")", ")", "return", "util", ".", "etree", ".", "ElementTree", "(", "self", ".", "root", ")" ]
https://github.com/cinder/Cinder/blob/e83f5bb9c01a63eec20168d02953a0879e5100f7/docs/libs/markdown/blockparser.py#L53-L66
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/docutils/nodes.py
python
Element.replace_self
(self, new)
Replace `self` node with `new`, where `new` is a node or a list of nodes.
Replace `self` node with `new`, where `new` is a node or a list of nodes.
[ "Replace", "self", "node", "with", "new", "where", "new", "is", "a", "node", "or", "a", "list", "of", "nodes", "." ]
def replace_self(self, new): """ Replace `self` node with `new`, where `new` is a node or a list of nodes. """ update = new if not isinstance(new, Node): # `new` is a list; update first child. try: update = new[0] except IndexError: update = None if isinstance(update, Element): update.update_basic_atts(self) else: # `update` is a Text node or `new` is an empty list. # Assert that we aren't losing any attributes. for att in self.basic_attributes: assert not self[att], \ 'Losing "%s" attribute: %s' % (att, self[att]) self.parent.replace(self, new)
[ "def", "replace_self", "(", "self", ",", "new", ")", ":", "update", "=", "new", "if", "not", "isinstance", "(", "new", ",", "Node", ")", ":", "# `new` is a list; update first child.", "try", ":", "update", "=", "new", "[", "0", "]", "except", "IndexError", ":", "update", "=", "None", "if", "isinstance", "(", "update", ",", "Element", ")", ":", "update", ".", "update_basic_atts", "(", "self", ")", "else", ":", "# `update` is a Text node or `new` is an empty list.", "# Assert that we aren't losing any attributes.", "for", "att", "in", "self", ".", "basic_attributes", ":", "assert", "not", "self", "[", "att", "]", ",", "'Losing \"%s\" attribute: %s'", "%", "(", "att", ",", "self", "[", "att", "]", ")", "self", ".", "parent", ".", "replace", "(", "self", ",", "new", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/nodes.py#L935-L955
cmu-db/noisepage
79276e68fe83322f1249e8a8be96bd63c583ae56
build-support/cpplint.py
python
FindStartOfExpressionInLine
(line, endpos, stack)
return (-1, stack)
Find position at the matching start of current expression. This is almost the reverse of FindEndOfExpressionInLine, but note that the input position and returned position differs by 1. Args: line: a CleansedLines line. endpos: start searching at this position. stack: nesting stack at endpos. Returns: On finding matching start: (index at matching start, None) On finding an unclosed expression: (-1, None) Otherwise: (-1, new stack at beginning of this line)
Find position at the matching start of current expression.
[ "Find", "position", "at", "the", "matching", "start", "of", "current", "expression", "." ]
def FindStartOfExpressionInLine(line, endpos, stack): """Find position at the matching start of current expression. This is almost the reverse of FindEndOfExpressionInLine, but note that the input position and returned position differs by 1. Args: line: a CleansedLines line. endpos: start searching at this position. stack: nesting stack at endpos. Returns: On finding matching start: (index at matching start, None) On finding an unclosed expression: (-1, None) Otherwise: (-1, new stack at beginning of this line) """ i = endpos while i >= 0: char = line[i] if char in ')]}': # Found end of expression, push to expression stack stack.append(char) elif char == '>': # Found potential end of template argument list. # # Ignore it if it's a "->" or ">=" or "operator>" if (i > 0 and (line[i - 1] == '-' or Match(r'\s>=\s', line[i - 1:]) or Search(r'\boperator\s*$', line[0:i]))): i -= 1 else: stack.append('>') elif char == '<': # Found potential start of template argument list if i > 0 and line[i - 1] == '<': # Left shift operator i -= 1 else: # If there is a matching '>', we can pop the expression stack. # Otherwise, ignore this '<' since it must be an operator. if stack and stack[-1] == '>': stack.pop() if not stack: return (i, None) elif char in '([{': # Found start of expression. # # If there are any unmatched '>' on the stack, they must be # operators. Remove those. while stack and stack[-1] == '>': stack.pop() if not stack: return (-1, None) if ((char == '(' and stack[-1] == ')') or (char == '[' and stack[-1] == ']') or (char == '{' and stack[-1] == '}')): stack.pop() if not stack: return (i, None) else: # Mismatched parentheses return (-1, None) elif char == ';': # Found something that look like end of statements. If we are currently # expecting a '<', the matching '>' must have been an operator, since # template argument list should not contain statements. while stack and stack[-1] == '>': stack.pop() if not stack: return (-1, None) i -= 1 return (-1, stack)
[ "def", "FindStartOfExpressionInLine", "(", "line", ",", "endpos", ",", "stack", ")", ":", "i", "=", "endpos", "while", "i", ">=", "0", ":", "char", "=", "line", "[", "i", "]", "if", "char", "in", "')]}'", ":", "# Found end of expression, push to expression stack", "stack", ".", "append", "(", "char", ")", "elif", "char", "==", "'>'", ":", "# Found potential end of template argument list.", "#", "# Ignore it if it's a \"->\" or \">=\" or \"operator>\"", "if", "(", "i", ">", "0", "and", "(", "line", "[", "i", "-", "1", "]", "==", "'-'", "or", "Match", "(", "r'\\s>=\\s'", ",", "line", "[", "i", "-", "1", ":", "]", ")", "or", "Search", "(", "r'\\boperator\\s*$'", ",", "line", "[", "0", ":", "i", "]", ")", ")", ")", ":", "i", "-=", "1", "else", ":", "stack", ".", "append", "(", "'>'", ")", "elif", "char", "==", "'<'", ":", "# Found potential start of template argument list", "if", "i", ">", "0", "and", "line", "[", "i", "-", "1", "]", "==", "'<'", ":", "# Left shift operator", "i", "-=", "1", "else", ":", "# If there is a matching '>', we can pop the expression stack.", "# Otherwise, ignore this '<' since it must be an operator.", "if", "stack", "and", "stack", "[", "-", "1", "]", "==", "'>'", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "i", ",", "None", ")", "elif", "char", "in", "'([{'", ":", "# Found start of expression.", "#", "# If there are any unmatched '>' on the stack, they must be", "# operators. Remove those.", "while", "stack", "and", "stack", "[", "-", "1", "]", "==", "'>'", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "-", "1", ",", "None", ")", "if", "(", "(", "char", "==", "'('", "and", "stack", "[", "-", "1", "]", "==", "')'", ")", "or", "(", "char", "==", "'['", "and", "stack", "[", "-", "1", "]", "==", "']'", ")", "or", "(", "char", "==", "'{'", "and", "stack", "[", "-", "1", "]", "==", "'}'", ")", ")", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "i", ",", "None", ")", "else", ":", "# Mismatched parentheses", "return", "(", "-", "1", ",", "None", ")", "elif", "char", "==", "';'", ":", "# Found something that look like end of statements. If we are currently", "# expecting a '<', the matching '>' must have been an operator, since", "# template argument list should not contain statements.", "while", "stack", "and", "stack", "[", "-", "1", "]", "==", "'>'", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "-", "1", ",", "None", ")", "i", "-=", "1", "return", "(", "-", "1", ",", "stack", ")" ]
https://github.com/cmu-db/noisepage/blob/79276e68fe83322f1249e8a8be96bd63c583ae56/build-support/cpplint.py#L1866-L1940
etternagame/etterna
8775f74ac9c353320128609d4b4150672e9a6d04
extern/crashpad/buildtools/checkdeps/cpp_checker.py
python
CppChecker.ShouldCheck
(self, file_path)
return self.IsCppFile(file_path)
Check if the new #include file path should be presubmit checked. Args: file_path: file path to be checked Return: bool: True if the file should be checked; False otherwise.
Check if the new #include file path should be presubmit checked.
[ "Check", "if", "the", "new", "#include", "file", "path", "should", "be", "presubmit", "checked", "." ]
def ShouldCheck(self, file_path): """Check if the new #include file path should be presubmit checked. Args: file_path: file path to be checked Return: bool: True if the file should be checked; False otherwise. """ return self.IsCppFile(file_path)
[ "def", "ShouldCheck", "(", "self", ",", "file_path", ")", ":", "return", "self", ".", "IsCppFile", "(", "file_path", ")" ]
https://github.com/etternagame/etterna/blob/8775f74ac9c353320128609d4b4150672e9a6d04/extern/crashpad/buildtools/checkdeps/cpp_checker.py#L122-L131
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/tools/gyp/pylib/gyp/xcode_ninja.py
python
IsValidTargetForWrapper
(target_extras, executable_target_pattern, spec)
return False
Limit targets for Xcode wrapper. Xcode sometimes performs poorly with too many targets, so only include proper executable targets, with filters to customize. Arguments: target_extras: Regular expression to always add, matching any target. executable_target_pattern: Regular expression limiting executable targets. spec: Specifications for target.
Limit targets for Xcode wrapper.
[ "Limit", "targets", "for", "Xcode", "wrapper", "." ]
def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec): """Limit targets for Xcode wrapper. Xcode sometimes performs poorly with too many targets, so only include proper executable targets, with filters to customize. Arguments: target_extras: Regular expression to always add, matching any target. executable_target_pattern: Regular expression limiting executable targets. spec: Specifications for target. """ target_name = spec.get('target_name') # Always include targets matching target_extras. if target_extras is not None and re.search(target_extras, target_name): return True # Otherwise just show executable targets and xc_tests. if (int(spec.get('mac_xctest_bundle', 0)) != 0 or (spec.get('type', '') == 'executable' and spec.get('product_extension', '') != 'bundle')): # If there is a filter and the target does not match, exclude the target. if executable_target_pattern is not None: if not re.search(executable_target_pattern, target_name): return False return True return False
[ "def", "IsValidTargetForWrapper", "(", "target_extras", ",", "executable_target_pattern", ",", "spec", ")", ":", "target_name", "=", "spec", ".", "get", "(", "'target_name'", ")", "# Always include targets matching target_extras.", "if", "target_extras", "is", "not", "None", "and", "re", ".", "search", "(", "target_extras", ",", "target_name", ")", ":", "return", "True", "# Otherwise just show executable targets and xc_tests.", "if", "(", "int", "(", "spec", ".", "get", "(", "'mac_xctest_bundle'", ",", "0", ")", ")", "!=", "0", "or", "(", "spec", ".", "get", "(", "'type'", ",", "''", ")", "==", "'executable'", "and", "spec", ".", "get", "(", "'product_extension'", ",", "''", ")", "!=", "'bundle'", ")", ")", ":", "# If there is a filter and the target does not match, exclude the target.", "if", "executable_target_pattern", "is", "not", "None", ":", "if", "not", "re", ".", "search", "(", "executable_target_pattern", ",", "target_name", ")", ":", "return", "False", "return", "True", "return", "False" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/gyp/pylib/gyp/xcode_ninja.py#L131-L156
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/ops/math_grad.py
python
_SegmentSumGrad
(op, grad)
return array_ops.gather(grad, op.inputs[1]), None
Gradient for SegmentSum.
Gradient for SegmentSum.
[ "Gradient", "for", "SegmentSum", "." ]
def _SegmentSumGrad(op, grad): """Gradient for SegmentSum.""" return array_ops.gather(grad, op.inputs[1]), None
[ "def", "_SegmentSumGrad", "(", "op", ",", "grad", ")", ":", "return", "array_ops", ".", "gather", "(", "grad", ",", "op", ".", "inputs", "[", "1", "]", ")", ",", "None" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/math_grad.py#L149-L151
tensorflow/deepmath
b5b721f54de1d5d6a02d78f5da5995237f9995f9
deepmath/guidance/wavenet.py
python
wavenet_layer
(inp, depth, width=3, rate=1, context=None, scope=None, reuse=None)
Single wavenet layer. This assumes that the input is a rank 4 tensor of shape: [batch, reduced_text_dimension, auxilliary_text_dimension, feature_depth] If rate is more than one, this will be reshaped to [B, R//(2**(rate-1)), A*(2**(rate-1)), D] Then a conv2d will be applied with kernel size [width, 1]. The rest of the wavenet activations will be applied and the result will be returned without reshaping, this allows a multilayer wavenet to be implemented by subsequent calls to wavenet_layer and rate=2. Arguments: inp: input tensor depth: depth of the intermediate nonlinear activations before reduced. width: the width of the conv filter, 2 by default. rate: the dilation, use 1 in the first layer and 2 in subsequent layers. context: Optional 2-D [batch, dim] tensor on which to condition each node. scope: name of scope if given. reuse: reuse for variable scope if given. Returns: output: output tensor.
Single wavenet layer.
[ "Single", "wavenet", "layer", "." ]
def wavenet_layer(inp, depth, width=3, rate=1, context=None, scope=None, reuse=None): """Single wavenet layer. This assumes that the input is a rank 4 tensor of shape: [batch, reduced_text_dimension, auxilliary_text_dimension, feature_depth] If rate is more than one, this will be reshaped to [B, R//(2**(rate-1)), A*(2**(rate-1)), D] Then a conv2d will be applied with kernel size [width, 1]. The rest of the wavenet activations will be applied and the result will be returned without reshaping, this allows a multilayer wavenet to be implemented by subsequent calls to wavenet_layer and rate=2. Arguments: inp: input tensor depth: depth of the intermediate nonlinear activations before reduced. width: the width of the conv filter, 2 by default. rate: the dilation, use 1 in the first layer and 2 in subsequent layers. context: Optional 2-D [batch, dim] tensor on which to condition each node. scope: name of scope if given. reuse: reuse for variable scope if given. Returns: output: output tensor. """ tf.logging.info('Creating wavenet layer d=%d w=%d r=%d', depth, width, rate) with tf.variable_scope(scope, 'wavenet_layer', [inp], reuse=reuse): current_shape = inp.get_shape() true_shape = tf.shape(inp) in_depth = current_shape[3].value mul = 2**(rate - 1) reshaped = tf.reshape( inp, [true_shape[0], true_shape[1] // mul, mul * true_shape[2], in_depth]) conved = slim.conv2d( reshaped, 2 * depth, [width, 1], rate=1, padding='SAME', activation_fn=None) if context is not None: conved += layers.linear(context, 2 * depth)[:, None, None, :] act = tf.nn.tanh(conved[:, :, :, :depth]) gate = tf.nn.sigmoid(conved[:, :, :, depth:]) z = act * gate if in_depth != depth: z = slim.conv2d(z, in_depth, [1, 1], padding='SAME', activation_fn=None) return z + reshaped
[ "def", "wavenet_layer", "(", "inp", ",", "depth", ",", "width", "=", "3", ",", "rate", "=", "1", ",", "context", "=", "None", ",", "scope", "=", "None", ",", "reuse", "=", "None", ")", ":", "tf", ".", "logging", ".", "info", "(", "'Creating wavenet layer d=%d w=%d r=%d'", ",", "depth", ",", "width", ",", "rate", ")", "with", "tf", ".", "variable_scope", "(", "scope", ",", "'wavenet_layer'", ",", "[", "inp", "]", ",", "reuse", "=", "reuse", ")", ":", "current_shape", "=", "inp", ".", "get_shape", "(", ")", "true_shape", "=", "tf", ".", "shape", "(", "inp", ")", "in_depth", "=", "current_shape", "[", "3", "]", ".", "value", "mul", "=", "2", "**", "(", "rate", "-", "1", ")", "reshaped", "=", "tf", ".", "reshape", "(", "inp", ",", "[", "true_shape", "[", "0", "]", ",", "true_shape", "[", "1", "]", "//", "mul", ",", "mul", "*", "true_shape", "[", "2", "]", ",", "in_depth", "]", ")", "conved", "=", "slim", ".", "conv2d", "(", "reshaped", ",", "2", "*", "depth", ",", "[", "width", ",", "1", "]", ",", "rate", "=", "1", ",", "padding", "=", "'SAME'", ",", "activation_fn", "=", "None", ")", "if", "context", "is", "not", "None", ":", "conved", "+=", "layers", ".", "linear", "(", "context", ",", "2", "*", "depth", ")", "[", ":", ",", "None", ",", "None", ",", ":", "]", "act", "=", "tf", ".", "nn", ".", "tanh", "(", "conved", "[", ":", ",", ":", ",", ":", ",", ":", "depth", "]", ")", "gate", "=", "tf", ".", "nn", ".", "sigmoid", "(", "conved", "[", ":", ",", ":", ",", ":", ",", "depth", ":", "]", ")", "z", "=", "act", "*", "gate", "if", "in_depth", "!=", "depth", ":", "z", "=", "slim", ".", "conv2d", "(", "z", ",", "in_depth", ",", "[", "1", ",", "1", "]", ",", "padding", "=", "'SAME'", ",", "activation_fn", "=", "None", ")", "return", "z", "+", "reshaped" ]
https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/guidance/wavenet.py#L27-L85
idaholab/moose
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
python/peacock/base/Preferences.py
python
BasePreferenceWidget.setValue
(self, val)
Function that child classes need to inherit that sets the value of the widget
Function that child classes need to inherit that sets the value of the widget
[ "Function", "that", "child", "classes", "need", "to", "inherit", "that", "sets", "the", "value", "of", "the", "widget" ]
def setValue(self, val): """ Function that child classes need to inherit that sets the value of the widget """
[ "def", "setValue", "(", "self", ",", "val", ")", ":" ]
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/peacock/base/Preferences.py#L47-L50
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/expected-lite/script/update-version.py
python
editFilesToVersionFromCommandLine
()
Update version number given on command line in paths from configuration table.
Update version number given on command line in paths from configuration table.
[ "Update", "version", "number", "given", "on", "command", "line", "in", "paths", "from", "configuration", "table", "." ]
def editFilesToVersionFromCommandLine(): """Update version number given on command line in paths from configuration table.""" parser = argparse.ArgumentParser( description='Update version number in files.', epilog="""""", formatter_class=argparse.RawTextHelpFormatter) parser.add_argument( 'version', metavar='version', type=str, nargs=1, help='new version number, like 1.2.3') parser.add_argument( '-v', '--verbose', action='store_true', help='report the name of the file being processed') args = parser.parse_args() editFilesToVersion( args.version[0], table, args.verbose )
[ "def", "editFilesToVersionFromCommandLine", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "'Update version number in files.'", ",", "epilog", "=", "\"\"\"\"\"\"", ",", "formatter_class", "=", "argparse", ".", "RawTextHelpFormatter", ")", "parser", ".", "add_argument", "(", "'version'", ",", "metavar", "=", "'version'", ",", "type", "=", "str", ",", "nargs", "=", "1", ",", "help", "=", "'new version number, like 1.2.3'", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--verbose'", ",", "action", "=", "'store_true'", ",", "help", "=", "'report the name of the file being processed'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "editFilesToVersion", "(", "args", ".", "version", "[", "0", "]", ",", "table", ",", "args", ".", "verbose", ")" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/expected-lite/script/update-version.py#L101-L123
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/arrays/datetimes.py
python
DatetimeArray.__iter__
(self)
Return an iterator over the boxed values Yields ------- tstamp : Timestamp
Return an iterator over the boxed values
[ "Return", "an", "iterator", "over", "the", "boxed", "values" ]
def __iter__(self): """ Return an iterator over the boxed values Yields ------- tstamp : Timestamp """ # convert in chunks of 10k for efficiency data = self.asi8 length = len(self) chunksize = 10000 chunks = int(length / chunksize) + 1 for i in range(chunks): start_i = i * chunksize end_i = min((i + 1) * chunksize, length) converted = tslib.ints_to_pydatetime(data[start_i:end_i], tz=self.tz, freq=self.freq, box="timestamp") for v in converted: yield v
[ "def", "__iter__", "(", "self", ")", ":", "# convert in chunks of 10k for efficiency", "data", "=", "self", ".", "asi8", "length", "=", "len", "(", "self", ")", "chunksize", "=", "10000", "chunks", "=", "int", "(", "length", "/", "chunksize", ")", "+", "1", "for", "i", "in", "range", "(", "chunks", ")", ":", "start_i", "=", "i", "*", "chunksize", "end_i", "=", "min", "(", "(", "i", "+", "1", ")", "*", "chunksize", ",", "length", ")", "converted", "=", "tslib", ".", "ints_to_pydatetime", "(", "data", "[", "start_i", ":", "end_i", "]", ",", "tz", "=", "self", ".", "tz", ",", "freq", "=", "self", ".", "freq", ",", "box", "=", "\"timestamp\"", ")", "for", "v", "in", "converted", ":", "yield", "v" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/arrays/datetimes.py#L592-L613
MythTV/mythtv
d282a209cb8be85d036f85a62a8ec971b67d45f4
mythtv/contrib/imports/mirobridge/mirobridge/metadata.py
python
OutStreamEncoder.__getattr__
(self, attr)
return getattr(self.out, attr)
Delegate everything but write to the stream
Delegate everything but write to the stream
[ "Delegate", "everything", "but", "write", "to", "the", "stream" ]
def __getattr__(self, attr): """Delegate everything but write to the stream""" return getattr(self.out, attr)
[ "def", "__getattr__", "(", "self", ",", "attr", ")", ":", "return", "getattr", "(", "self", ".", "out", ",", "attr", ")" ]
https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/contrib/imports/mirobridge/mirobridge/metadata.py#L67-L69
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/ctypes/_aix.py
python
get_version
(name, members)
return None
Sort list of members and return highest numbered version - if it exists. This function is called when an unversioned libFOO.a(libFOO.so) has not been found. Versioning for the member name is expected to follow GNU LIBTOOL conventions: the highest version (x, then X.y, then X.Y.z) * find [libFoo.so.X] * find [libFoo.so.X.Y] * find [libFoo.so.X.Y.Z] Before the GNU convention became the standard scheme regardless of binary size AIX packagers used GNU convention "as-is" for 32-bit archive members but used an "distinguishing" name for 64-bit members. This scheme inserted either 64 or _64 between libFOO and .so - generally libFOO_64.so, but occasionally libFOO64.so
Sort list of members and return highest numbered version - if it exists. This function is called when an unversioned libFOO.a(libFOO.so) has not been found.
[ "Sort", "list", "of", "members", "and", "return", "highest", "numbered", "version", "-", "if", "it", "exists", ".", "This", "function", "is", "called", "when", "an", "unversioned", "libFOO", ".", "a", "(", "libFOO", ".", "so", ")", "has", "not", "been", "found", "." ]
def get_version(name, members): """ Sort list of members and return highest numbered version - if it exists. This function is called when an unversioned libFOO.a(libFOO.so) has not been found. Versioning for the member name is expected to follow GNU LIBTOOL conventions: the highest version (x, then X.y, then X.Y.z) * find [libFoo.so.X] * find [libFoo.so.X.Y] * find [libFoo.so.X.Y.Z] Before the GNU convention became the standard scheme regardless of binary size AIX packagers used GNU convention "as-is" for 32-bit archive members but used an "distinguishing" name for 64-bit members. This scheme inserted either 64 or _64 between libFOO and .so - generally libFOO_64.so, but occasionally libFOO64.so """ # the expression ending for versions must start as # '.so.[0-9]', i.e., *.so.[at least one digit] # while multiple, more specific expressions could be specified # to search for .so.X, .so.X.Y and .so.X.Y.Z # after the first required 'dot' digit # any combination of additional 'dot' digits pairs are accepted # anything more than libFOO.so.digits.digits.digits # should be seen as a member name outside normal expectations exprs = [rf'lib{name}\.so\.[0-9]+[0-9.]*', rf'lib{name}_?64\.so\.[0-9]+[0-9.]*'] for expr in exprs: versions = [] for line in members: m = re.search(expr, line) if m: versions.append(m.group(0)) if versions: return _last_version(versions, '.') return None
[ "def", "get_version", "(", "name", ",", "members", ")", ":", "# the expression ending for versions must start as", "# '.so.[0-9]', i.e., *.so.[at least one digit]", "# while multiple, more specific expressions could be specified", "# to search for .so.X, .so.X.Y and .so.X.Y.Z", "# after the first required 'dot' digit", "# any combination of additional 'dot' digits pairs are accepted", "# anything more than libFOO.so.digits.digits.digits", "# should be seen as a member name outside normal expectations", "exprs", "=", "[", "rf'lib{name}\\.so\\.[0-9]+[0-9.]*'", ",", "rf'lib{name}_?64\\.so\\.[0-9]+[0-9.]*'", "]", "for", "expr", "in", "exprs", ":", "versions", "=", "[", "]", "for", "line", "in", "members", ":", "m", "=", "re", ".", "search", "(", "expr", ",", "line", ")", "if", "m", ":", "versions", ".", "append", "(", "m", ".", "group", "(", "0", ")", ")", "if", "versions", ":", "return", "_last_version", "(", "versions", ",", "'.'", ")", "return", "None" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/ctypes/_aix.py#L174-L210
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/monkey.py
python
_patch_distribution_metadata
()
Patch write_pkg_file and read_pkg_file for higher metadata standards
Patch write_pkg_file and read_pkg_file for higher metadata standards
[ "Patch", "write_pkg_file", "and", "read_pkg_file", "for", "higher", "metadata", "standards" ]
def _patch_distribution_metadata(): """Patch write_pkg_file and read_pkg_file for higher metadata standards""" for attr in ('write_pkg_file', 'read_pkg_file', 'get_metadata_version'): new_val = getattr(setuptools.dist, attr) setattr(distutils.dist.DistributionMetadata, attr, new_val)
[ "def", "_patch_distribution_metadata", "(", ")", ":", "for", "attr", "in", "(", "'write_pkg_file'", ",", "'read_pkg_file'", ",", "'get_metadata_version'", ")", ":", "new_val", "=", "getattr", "(", "setuptools", ".", "dist", ",", "attr", ")", "setattr", "(", "distutils", ".", "dist", ".", "DistributionMetadata", ",", "attr", ",", "new_val", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/monkey.py#L104-L108
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py
python
Wm.wm_colormapwindows
(self, *wlist)
return map(self._nametowidget, self.tk.call(args))
Store list of window names (WLIST) into WM_COLORMAPWINDOWS property of this widget. This list contains windows whose colormaps differ from their parents. Return current list of widgets if WLIST is empty.
Store list of window names (WLIST) into WM_COLORMAPWINDOWS property of this widget. This list contains windows whose colormaps differ from their parents. Return current list of widgets if WLIST is empty.
[ "Store", "list", "of", "window", "names", "(", "WLIST", ")", "into", "WM_COLORMAPWINDOWS", "property", "of", "this", "widget", ".", "This", "list", "contains", "windows", "whose", "colormaps", "differ", "from", "their", "parents", ".", "Return", "current", "list", "of", "widgets", "if", "WLIST", "is", "empty", "." ]
def wm_colormapwindows(self, *wlist): """Store list of window names (WLIST) into WM_COLORMAPWINDOWS property of this widget. This list contains windows whose colormaps differ from their parents. Return current list of widgets if WLIST is empty.""" if len(wlist) > 1: wlist = (wlist,) # Tk needs a list of windows here args = ('wm', 'colormapwindows', self._w) + wlist return map(self._nametowidget, self.tk.call(args))
[ "def", "wm_colormapwindows", "(", "self", ",", "*", "wlist", ")", ":", "if", "len", "(", "wlist", ")", ">", "1", ":", "wlist", "=", "(", "wlist", ",", ")", "# Tk needs a list of windows here", "args", "=", "(", "'wm'", ",", "'colormapwindows'", ",", "self", ".", "_w", ")", "+", "wlist", "return", "map", "(", "self", ".", "_nametowidget", ",", "self", ".", "tk", ".", "call", "(", "args", ")", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py#L1561-L1568
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/ragged/ragged_getitem.py
python
_ragged_getitem_inner_dimensions
(rt_input, key_list)
Retrieve inner dimensions, keeping outermost dimension unchanged. Args: rt_input: The `RaggedTensor` or `Tensor` from which a piece should be extracted. key_list: The __getitem__ keys for slicing the inner dimensions. Returns: A `RaggedTensor`. Raises: ValueError: If key_list is not supported.
Retrieve inner dimensions, keeping outermost dimension unchanged.
[ "Retrieve", "inner", "dimensions", "keeping", "outermost", "dimension", "unchanged", "." ]
def _ragged_getitem_inner_dimensions(rt_input, key_list): """Retrieve inner dimensions, keeping outermost dimension unchanged. Args: rt_input: The `RaggedTensor` or `Tensor` from which a piece should be extracted. key_list: The __getitem__ keys for slicing the inner dimensions. Returns: A `RaggedTensor`. Raises: ValueError: If key_list is not supported. """ if not key_list: return rt_input if isinstance(rt_input, ops.Tensor): return rt_input.__getitem__([slice(None, None, None)] + key_list) column_key = key_list[0] if column_key is Ellipsis: expanded_key_list = _expand_ellipsis(key_list, rt_input.values.shape.ndims) return _ragged_getitem_inner_dimensions(rt_input, expanded_key_list) # Adding a new axis to a ragged inner dimension: recursively get the inner # dimensions of rt_input with key_list[1:], and then wrap the result in a # RaggedTensor that puts each value in its own row. if column_key is array_ops.newaxis: inner_rt = _ragged_getitem_inner_dimensions(rt_input, key_list[1:]) nsplits = tensor_shape.dimension_at_index(inner_rt.row_splits.shape, 0) if nsplits.value is not None: nsplits = nsplits.value else: nsplits = array_ops.shape(inner_rt.row_splits, out_type=inner_rt.row_splits.dtype)[0] return ragged_tensor.RaggedTensor.from_uniform_row_length( inner_rt, 1, nrows=nsplits - 1, validate=False) # Slicing a range of columns in a ragged inner dimension. We use a # recursive call to process the values, and then assemble a RaggedTensor # with those values. if isinstance(column_key, slice): if (column_key.start is None and column_key.stop is None and column_key.step is None): # Trivial slice: recursively process all values, & splits is unchanged. return rt_input.with_values( _ragged_getitem_inner_dimensions(rt_input.values, key_list[1:])) else: if not (isinstance(column_key.start, (ops.Tensor, int, type(None))) and isinstance(column_key.stop, (ops.Tensor, int, type(None)))): raise TypeError("slice offsets must be integers or None") # Nontrivial slice: use ragged_gather to extract the indicated slice as # a new RaggedTensor (inner_rt), and then recursively process its values. starts = rt_input.row_splits[:-1] limits = rt_input.row_splits[1:] step = 1 if column_key.step is None else column_key.step lower_bound = _if_ge_zero(step, lambda: starts, lambda: starts - 1) upper_bound = _if_ge_zero(step, lambda: limits, lambda: limits - 1) # inner_rt_starts[i] = index to start gathering for row i. if column_key.start is None: inner_rt_starts = _if_ge_zero(step, lambda: starts, lambda: limits - 1) else: start_offset = math_ops.cast(column_key.start, starts.dtype) inner_rt_starts = _if_ge_zero( column_key.start, lambda: math_ops.minimum(starts + start_offset, upper_bound), lambda: math_ops.maximum(limits + start_offset, lower_bound)) # inner_rt_limits[i] = index to stop gathering for row i. if column_key.stop is None: inner_rt_limits = _if_ge_zero(step, lambda: limits, lambda: starts - 1) else: stop_offset = math_ops.cast(column_key.stop, starts.dtype) inner_rt_limits = _if_ge_zero( column_key.stop, lambda: math_ops.minimum(starts + stop_offset, upper_bound), lambda: math_ops.maximum(limits + stop_offset, lower_bound)) inner_rt = _build_ragged_tensor_from_value_ranges( inner_rt_starts, inner_rt_limits, column_key.step, rt_input.values) # If the row dimension is uniform, then calculate the new # uniform_row_length, and rebuild inner_rt using that uniform_row_lengths. if rt_input.uniform_row_length is not None: new_row_length = _slice_length(rt_input.uniform_row_length, column_key) inner_rt = ragged_tensor.RaggedTensor.from_uniform_row_length( inner_rt.values, new_row_length, rt_input.nrows()) return inner_rt.with_values( _ragged_getitem_inner_dimensions(inner_rt.values, key_list[1:])) # Indexing a single column in a ragged inner dimension: raise an Exception. # See RaggedTensor.__getitem__.__doc__ for an explanation of why indexing # into a ragged inner dimension is problematic. if rt_input.uniform_row_length is None: raise ValueError("Cannot index into an inner ragged dimension.") # Indexing a single column in a uniform inner dimension: check that the # given index is in-bounds, and then use a strided slice over rt_input.values # to take the indicated element from each row. row_length = rt_input.uniform_row_length column_key = math_ops.cast(column_key, row_length.dtype) oob_err_msg = "Index out of bounds when indexing into a ragged tensor" oob_checks = [ check_ops.assert_greater_equal( column_key, -row_length, message=oob_err_msg), check_ops.assert_less(column_key, row_length, message=oob_err_msg), ] with ops.control_dependencies(oob_checks): offset = _if_ge_zero(column_key, lambda: column_key, lambda: row_length + column_key) sliced_rt = rt_input.values[offset::row_length] return _ragged_getitem_inner_dimensions(sliced_rt, key_list[1:])
[ "def", "_ragged_getitem_inner_dimensions", "(", "rt_input", ",", "key_list", ")", ":", "if", "not", "key_list", ":", "return", "rt_input", "if", "isinstance", "(", "rt_input", ",", "ops", ".", "Tensor", ")", ":", "return", "rt_input", ".", "__getitem__", "(", "[", "slice", "(", "None", ",", "None", ",", "None", ")", "]", "+", "key_list", ")", "column_key", "=", "key_list", "[", "0", "]", "if", "column_key", "is", "Ellipsis", ":", "expanded_key_list", "=", "_expand_ellipsis", "(", "key_list", ",", "rt_input", ".", "values", ".", "shape", ".", "ndims", ")", "return", "_ragged_getitem_inner_dimensions", "(", "rt_input", ",", "expanded_key_list", ")", "# Adding a new axis to a ragged inner dimension: recursively get the inner", "# dimensions of rt_input with key_list[1:], and then wrap the result in a", "# RaggedTensor that puts each value in its own row.", "if", "column_key", "is", "array_ops", ".", "newaxis", ":", "inner_rt", "=", "_ragged_getitem_inner_dimensions", "(", "rt_input", ",", "key_list", "[", "1", ":", "]", ")", "nsplits", "=", "tensor_shape", ".", "dimension_at_index", "(", "inner_rt", ".", "row_splits", ".", "shape", ",", "0", ")", "if", "nsplits", ".", "value", "is", "not", "None", ":", "nsplits", "=", "nsplits", ".", "value", "else", ":", "nsplits", "=", "array_ops", ".", "shape", "(", "inner_rt", ".", "row_splits", ",", "out_type", "=", "inner_rt", ".", "row_splits", ".", "dtype", ")", "[", "0", "]", "return", "ragged_tensor", ".", "RaggedTensor", ".", "from_uniform_row_length", "(", "inner_rt", ",", "1", ",", "nrows", "=", "nsplits", "-", "1", ",", "validate", "=", "False", ")", "# Slicing a range of columns in a ragged inner dimension. We use a", "# recursive call to process the values, and then assemble a RaggedTensor", "# with those values.", "if", "isinstance", "(", "column_key", ",", "slice", ")", ":", "if", "(", "column_key", ".", "start", "is", "None", "and", "column_key", ".", "stop", "is", "None", "and", "column_key", ".", "step", "is", "None", ")", ":", "# Trivial slice: recursively process all values, & splits is unchanged.", "return", "rt_input", ".", "with_values", "(", "_ragged_getitem_inner_dimensions", "(", "rt_input", ".", "values", ",", "key_list", "[", "1", ":", "]", ")", ")", "else", ":", "if", "not", "(", "isinstance", "(", "column_key", ".", "start", ",", "(", "ops", ".", "Tensor", ",", "int", ",", "type", "(", "None", ")", ")", ")", "and", "isinstance", "(", "column_key", ".", "stop", ",", "(", "ops", ".", "Tensor", ",", "int", ",", "type", "(", "None", ")", ")", ")", ")", ":", "raise", "TypeError", "(", "\"slice offsets must be integers or None\"", ")", "# Nontrivial slice: use ragged_gather to extract the indicated slice as", "# a new RaggedTensor (inner_rt), and then recursively process its values.", "starts", "=", "rt_input", ".", "row_splits", "[", ":", "-", "1", "]", "limits", "=", "rt_input", ".", "row_splits", "[", "1", ":", "]", "step", "=", "1", "if", "column_key", ".", "step", "is", "None", "else", "column_key", ".", "step", "lower_bound", "=", "_if_ge_zero", "(", "step", ",", "lambda", ":", "starts", ",", "lambda", ":", "starts", "-", "1", ")", "upper_bound", "=", "_if_ge_zero", "(", "step", ",", "lambda", ":", "limits", ",", "lambda", ":", "limits", "-", "1", ")", "# inner_rt_starts[i] = index to start gathering for row i.", "if", "column_key", ".", "start", "is", "None", ":", "inner_rt_starts", "=", "_if_ge_zero", "(", "step", ",", "lambda", ":", "starts", ",", "lambda", ":", "limits", "-", "1", ")", "else", ":", "start_offset", "=", "math_ops", ".", "cast", "(", "column_key", ".", "start", ",", "starts", ".", "dtype", ")", "inner_rt_starts", "=", "_if_ge_zero", "(", "column_key", ".", "start", ",", "lambda", ":", "math_ops", ".", "minimum", "(", "starts", "+", "start_offset", ",", "upper_bound", ")", ",", "lambda", ":", "math_ops", ".", "maximum", "(", "limits", "+", "start_offset", ",", "lower_bound", ")", ")", "# inner_rt_limits[i] = index to stop gathering for row i.", "if", "column_key", ".", "stop", "is", "None", ":", "inner_rt_limits", "=", "_if_ge_zero", "(", "step", ",", "lambda", ":", "limits", ",", "lambda", ":", "starts", "-", "1", ")", "else", ":", "stop_offset", "=", "math_ops", ".", "cast", "(", "column_key", ".", "stop", ",", "starts", ".", "dtype", ")", "inner_rt_limits", "=", "_if_ge_zero", "(", "column_key", ".", "stop", ",", "lambda", ":", "math_ops", ".", "minimum", "(", "starts", "+", "stop_offset", ",", "upper_bound", ")", ",", "lambda", ":", "math_ops", ".", "maximum", "(", "limits", "+", "stop_offset", ",", "lower_bound", ")", ")", "inner_rt", "=", "_build_ragged_tensor_from_value_ranges", "(", "inner_rt_starts", ",", "inner_rt_limits", ",", "column_key", ".", "step", ",", "rt_input", ".", "values", ")", "# If the row dimension is uniform, then calculate the new", "# uniform_row_length, and rebuild inner_rt using that uniform_row_lengths.", "if", "rt_input", ".", "uniform_row_length", "is", "not", "None", ":", "new_row_length", "=", "_slice_length", "(", "rt_input", ".", "uniform_row_length", ",", "column_key", ")", "inner_rt", "=", "ragged_tensor", ".", "RaggedTensor", ".", "from_uniform_row_length", "(", "inner_rt", ".", "values", ",", "new_row_length", ",", "rt_input", ".", "nrows", "(", ")", ")", "return", "inner_rt", ".", "with_values", "(", "_ragged_getitem_inner_dimensions", "(", "inner_rt", ".", "values", ",", "key_list", "[", "1", ":", "]", ")", ")", "# Indexing a single column in a ragged inner dimension: raise an Exception.", "# See RaggedTensor.__getitem__.__doc__ for an explanation of why indexing", "# into a ragged inner dimension is problematic.", "if", "rt_input", ".", "uniform_row_length", "is", "None", ":", "raise", "ValueError", "(", "\"Cannot index into an inner ragged dimension.\"", ")", "# Indexing a single column in a uniform inner dimension: check that the", "# given index is in-bounds, and then use a strided slice over rt_input.values", "# to take the indicated element from each row.", "row_length", "=", "rt_input", ".", "uniform_row_length", "column_key", "=", "math_ops", ".", "cast", "(", "column_key", ",", "row_length", ".", "dtype", ")", "oob_err_msg", "=", "\"Index out of bounds when indexing into a ragged tensor\"", "oob_checks", "=", "[", "check_ops", ".", "assert_greater_equal", "(", "column_key", ",", "-", "row_length", ",", "message", "=", "oob_err_msg", ")", ",", "check_ops", ".", "assert_less", "(", "column_key", ",", "row_length", ",", "message", "=", "oob_err_msg", ")", ",", "]", "with", "ops", ".", "control_dependencies", "(", "oob_checks", ")", ":", "offset", "=", "_if_ge_zero", "(", "column_key", ",", "lambda", ":", "column_key", ",", "lambda", ":", "row_length", "+", "column_key", ")", "sliced_rt", "=", "rt_input", ".", "values", "[", "offset", ":", ":", "row_length", "]", "return", "_ragged_getitem_inner_dimensions", "(", "sliced_rt", ",", "key_list", "[", "1", ":", "]", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/ragged/ragged_getitem.py#L232-L342
rsummers11/CADLab
976ed959a0b5208bb4173127a7ef732ac73a9b6f
MULAN_universal_lesion_analysis/maskrcnn/modeling/rpn/rpn.py
python
RPNModule.forward
(self, images, features, targets=None)
Arguments: images (ImageList): images for which we want to compute the predictions features (list[Tensor]): features computed from the images that are used for computing the predictions. Each tensor in the list correspond to different feature levels targets (list[BoxList): ground-truth boxes present in the image (optional) Returns: boxes (list[BoxList]): the predicted boxes from the RPN, one BoxList per image. losses (dict[Tensor]): the losses for the model during training. During testing, it is an empty dict.
Arguments: images (ImageList): images for which we want to compute the predictions features (list[Tensor]): features computed from the images that are used for computing the predictions. Each tensor in the list correspond to different feature levels targets (list[BoxList): ground-truth boxes present in the image (optional)
[ "Arguments", ":", "images", "(", "ImageList", ")", ":", "images", "for", "which", "we", "want", "to", "compute", "the", "predictions", "features", "(", "list", "[", "Tensor", "]", ")", ":", "features", "computed", "from", "the", "images", "that", "are", "used", "for", "computing", "the", "predictions", ".", "Each", "tensor", "in", "the", "list", "correspond", "to", "different", "feature", "levels", "targets", "(", "list", "[", "BoxList", ")", ":", "ground", "-", "truth", "boxes", "present", "in", "the", "image", "(", "optional", ")" ]
def forward(self, images, features, targets=None): """ Arguments: images (ImageList): images for which we want to compute the predictions features (list[Tensor]): features computed from the images that are used for computing the predictions. Each tensor in the list correspond to different feature levels targets (list[BoxList): ground-truth boxes present in the image (optional) Returns: boxes (list[BoxList]): the predicted boxes from the RPN, one BoxList per image. losses (dict[Tensor]): the losses for the model during training. During testing, it is an empty dict. """ objectness, rpn_box_regression = self.head(features) anchors = self.anchor_generator(images, features) if self.training: return self._forward_train(anchors, objectness, rpn_box_regression, targets) else: return self._forward_test(anchors, objectness, rpn_box_regression, targets)
[ "def", "forward", "(", "self", ",", "images", ",", "features", ",", "targets", "=", "None", ")", ":", "objectness", ",", "rpn_box_regression", "=", "self", ".", "head", "(", "features", ")", "anchors", "=", "self", ".", "anchor_generator", "(", "images", ",", "features", ")", "if", "self", ".", "training", ":", "return", "self", ".", "_forward_train", "(", "anchors", ",", "objectness", ",", "rpn_box_regression", ",", "targets", ")", "else", ":", "return", "self", ".", "_forward_test", "(", "anchors", ",", "objectness", ",", "rpn_box_regression", ",", "targets", ")" ]
https://github.com/rsummers11/CADLab/blob/976ed959a0b5208bb4173127a7ef732ac73a9b6f/MULAN_universal_lesion_analysis/maskrcnn/modeling/rpn/rpn.py#L86-L107
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/image_ops_impl.py
python
flip_left_right
(image)
return _flip(image, 1, 'flip_left_right')
Flip an image horizontally (left to right). Outputs the contents of `image` flipped along the width dimension. See also `tf.reverse`. Usage Example: >>> x = [[[1.0, 2.0, 3.0], ... [4.0, 5.0, 6.0]], ... [[7.0, 8.0, 9.0], ... [10.0, 11.0, 12.0]]] >>> tf.image.flip_left_right(x) <tf.Tensor: shape=(2, 2, 3), dtype=float32, numpy= array([[[ 4., 5., 6.], [ 1., 2., 3.]], [[10., 11., 12.], [ 7., 8., 9.]]], dtype=float32)> Args: image: 4-D Tensor of shape `[batch, height, width, channels]` or 3-D Tensor of shape `[height, width, channels]`. Returns: A tensor of the same type and shape as `image`. Raises: ValueError: if the shape of `image` not supported.
Flip an image horizontally (left to right).
[ "Flip", "an", "image", "horizontally", "(", "left", "to", "right", ")", "." ]
def flip_left_right(image): """Flip an image horizontally (left to right). Outputs the contents of `image` flipped along the width dimension. See also `tf.reverse`. Usage Example: >>> x = [[[1.0, 2.0, 3.0], ... [4.0, 5.0, 6.0]], ... [[7.0, 8.0, 9.0], ... [10.0, 11.0, 12.0]]] >>> tf.image.flip_left_right(x) <tf.Tensor: shape=(2, 2, 3), dtype=float32, numpy= array([[[ 4., 5., 6.], [ 1., 2., 3.]], [[10., 11., 12.], [ 7., 8., 9.]]], dtype=float32)> Args: image: 4-D Tensor of shape `[batch, height, width, channels]` or 3-D Tensor of shape `[height, width, channels]`. Returns: A tensor of the same type and shape as `image`. Raises: ValueError: if the shape of `image` not supported. """ return _flip(image, 1, 'flip_left_right')
[ "def", "flip_left_right", "(", "image", ")", ":", "return", "_flip", "(", "image", ",", "1", ",", "'flip_left_right'", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/image_ops_impl.py#L540-L570
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
python
parserCtxt.scanName
(self)
return ret
Trickery: parse an XML name but without consuming the input flow Needed for rollback cases. Used only when parsing entities references. TODO: seems deprecated now, only used in the default part of xmlParserHandleReference [4] NameChar ::= Letter | Digit | '.' | '-' | '_' | ':' | CombiningChar | Extender [5] Name ::= (Letter | '_' | ':') (NameChar)* [6] Names ::= Name (S Name)*
Trickery: parse an XML name but without consuming the input flow Needed for rollback cases. Used only when parsing entities references. TODO: seems deprecated now, only used in the default part of xmlParserHandleReference [4] NameChar ::= Letter | Digit | '.' | '-' | '_' | ':' | CombiningChar | Extender [5] Name ::= (Letter | '_' | ':') (NameChar)* [6] Names ::= Name (S Name)*
[ "Trickery", ":", "parse", "an", "XML", "name", "but", "without", "consuming", "the", "input", "flow", "Needed", "for", "rollback", "cases", ".", "Used", "only", "when", "parsing", "entities", "references", ".", "TODO", ":", "seems", "deprecated", "now", "only", "used", "in", "the", "default", "part", "of", "xmlParserHandleReference", "[", "4", "]", "NameChar", "::", "=", "Letter", "|", "Digit", "|", ".", "|", "-", "|", "_", "|", ":", "|", "CombiningChar", "|", "Extender", "[", "5", "]", "Name", "::", "=", "(", "Letter", "|", "_", "|", ":", ")", "(", "NameChar", ")", "*", "[", "6", "]", "Names", "::", "=", "Name", "(", "S", "Name", ")", "*" ]
def scanName(self): """Trickery: parse an XML name but without consuming the input flow Needed for rollback cases. Used only when parsing entities references. TODO: seems deprecated now, only used in the default part of xmlParserHandleReference [4] NameChar ::= Letter | Digit | '.' | '-' | '_' | ':' | CombiningChar | Extender [5] Name ::= (Letter | '_' | ':') (NameChar)* [6] Names ::= Name (S Name)* """ ret = libxml2mod.xmlScanName(self._o) return ret
[ "def", "scanName", "(", "self", ")", ":", "ret", "=", "libxml2mod", ".", "xmlScanName", "(", "self", ".", "_o", ")", "return", "ret" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L4744-L4753
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/propgrid.py
python
FileProperty.__init__
(self, *args, **kwargs)
__init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> FileProperty
__init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> FileProperty
[ "__init__", "(", "self", "String", "label", "=", "(", "*", "wxPGProperty", "::", "sm_wxPG_LABEL", ")", "String", "name", "=", "(", "*", "wxPGProperty", "::", "sm_wxPG_LABEL", ")", "String", "value", "=", "wxEmptyString", ")", "-", ">", "FileProperty" ]
def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> FileProperty """ _propgrid.FileProperty_swiginit(self,_propgrid.new_FileProperty(*args, **kwargs))
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_propgrid", ".", "FileProperty_swiginit", "(", "self", ",", "_propgrid", ".", "new_FileProperty", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/propgrid.py#L3067-L3072
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftguitools/gui_orthoarray.py
python
OrthoArray.GetResources
(self)
return {"Pixmap": "Draft_Array", "MenuText": QT_TRANSLATE_NOOP("Draft_OrthoArray", "Array"), "ToolTip": QT_TRANSLATE_NOOP("Draft_OrthoArray", "Creates copies of the selected object, and places the copies in an orthogonal pattern,\nmeaning the copies follow the specified direction in the X, Y, Z axes.\n\nThe array can be turned into a polar or a circular array by changing its type.")}
Set icon, menu and tooltip.
Set icon, menu and tooltip.
[ "Set", "icon", "menu", "and", "tooltip", "." ]
def GetResources(self): """Set icon, menu and tooltip.""" return {"Pixmap": "Draft_Array", "MenuText": QT_TRANSLATE_NOOP("Draft_OrthoArray", "Array"), "ToolTip": QT_TRANSLATE_NOOP("Draft_OrthoArray", "Creates copies of the selected object, and places the copies in an orthogonal pattern,\nmeaning the copies follow the specified direction in the X, Y, Z axes.\n\nThe array can be turned into a polar or a circular array by changing its type.")}
[ "def", "GetResources", "(", "self", ")", ":", "return", "{", "\"Pixmap\"", ":", "\"Draft_Array\"", ",", "\"MenuText\"", ":", "QT_TRANSLATE_NOOP", "(", "\"Draft_OrthoArray\"", ",", "\"Array\"", ")", ",", "\"ToolTip\"", ":", "QT_TRANSLATE_NOOP", "(", "\"Draft_OrthoArray\"", ",", "\"Creates copies of the selected object, and places the copies in an orthogonal pattern,\\nmeaning the copies follow the specified direction in the X, Y, Z axes.\\n\\nThe array can be turned into a polar or a circular array by changing its type.\"", ")", "}" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftguitools/gui_orthoarray.py#L62-L66
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/pickletools.py
python
read_uint1
(f)
r""" >>> import io >>> read_uint1(io.BytesIO(b'\xff')) 255
r""" >>> import io >>> read_uint1(io.BytesIO(b'\xff')) 255
[ "r", ">>>", "import", "io", ">>>", "read_uint1", "(", "io", ".", "BytesIO", "(", "b", "\\", "xff", "))", "255" ]
def read_uint1(f): r""" >>> import io >>> read_uint1(io.BytesIO(b'\xff')) 255 """ data = f.read(1) if data: return data[0] raise ValueError("not enough data in stream to read uint1")
[ "def", "read_uint1", "(", "f", ")", ":", "data", "=", "f", ".", "read", "(", "1", ")", "if", "data", ":", "return", "data", "[", "0", "]", "raise", "ValueError", "(", "\"not enough data in stream to read uint1\"", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/pickletools.py#L212-L222
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftobjects/dimension.py
python
AngularDimension.onChanged
(self, obj, prop)
Execute when a property is changed. It just sets some properties to be read-only or hidden, as they aren't used.
Execute when a property is changed.
[ "Execute", "when", "a", "property", "is", "changed", "." ]
def onChanged(self, obj, prop): """Execute when a property is changed. It just sets some properties to be read-only or hidden, as they aren't used. """ if hasattr(obj, "Angle"): obj.setPropertyStatus('Angle', 'ReadOnly') if hasattr(obj, "Normal"): obj.setPropertyStatus('Normal', 'Hidden') if hasattr(obj, "Support"): obj.setPropertyStatus('Support', 'Hidden') if hasattr(obj, "LinkedGeometry"): obj.setPropertyStatus('LinkedGeometry', 'Hidden')
[ "def", "onChanged", "(", "self", ",", "obj", ",", "prop", ")", ":", "if", "hasattr", "(", "obj", ",", "\"Angle\"", ")", ":", "obj", ".", "setPropertyStatus", "(", "'Angle'", ",", "'ReadOnly'", ")", "if", "hasattr", "(", "obj", ",", "\"Normal\"", ")", ":", "obj", ".", "setPropertyStatus", "(", "'Normal'", ",", "'Hidden'", ")", "if", "hasattr", "(", "obj", ",", "\"Support\"", ")", ":", "obj", ".", "setPropertyStatus", "(", "'Support'", ",", "'Hidden'", ")", "if", "hasattr", "(", "obj", ",", "\"LinkedGeometry\"", ")", ":", "obj", ".", "setPropertyStatus", "(", "'LinkedGeometry'", ",", "'Hidden'", ")" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftobjects/dimension.py#L587-L601
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/http/cookiejar.py
python
CookiePolicy.return_ok
(self, cookie, request)
Return true if (and only if) cookie should be returned to server.
Return true if (and only if) cookie should be returned to server.
[ "Return", "true", "if", "(", "and", "only", "if", ")", "cookie", "should", "be", "returned", "to", "server", "." ]
def return_ok(self, cookie, request): """Return true if (and only if) cookie should be returned to server.""" raise NotImplementedError()
[ "def", "return_ok", "(", "self", ",", "cookie", ",", "request", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/http/cookiejar.py#L851-L853
eric612/MobileNet-YOLO
69b4441cb3ec8d553fbdef788ad033e246f901bd
python/caffe/pycaffe.py
python
_Net_forward_backward_all
(self, blobs=None, diffs=None, **kwargs)
return all_outs, all_diffs
Run net forward + backward in batches. Parameters ---------- blobs: list of blobs to extract as in forward() diffs: list of diffs to extract as in backward() kwargs: Keys are input (for forward) and output (for backward) blob names and values are ndarrays. Refer to forward() and backward(). Prefilled variants are called for lack of input or output blobs. Returns ------- all_blobs: {blob name: blob ndarray} dict. all_diffs: {blob name: diff ndarray} dict.
Run net forward + backward in batches.
[ "Run", "net", "forward", "+", "backward", "in", "batches", "." ]
def _Net_forward_backward_all(self, blobs=None, diffs=None, **kwargs): """ Run net forward + backward in batches. Parameters ---------- blobs: list of blobs to extract as in forward() diffs: list of diffs to extract as in backward() kwargs: Keys are input (for forward) and output (for backward) blob names and values are ndarrays. Refer to forward() and backward(). Prefilled variants are called for lack of input or output blobs. Returns ------- all_blobs: {blob name: blob ndarray} dict. all_diffs: {blob name: diff ndarray} dict. """ # Batch blobs and diffs. all_outs = {out: [] for out in set(self.outputs + (blobs or []))} all_diffs = {diff: [] for diff in set(self.inputs + (diffs or []))} forward_batches = self._batch({in_: kwargs[in_] for in_ in self.inputs if in_ in kwargs}) backward_batches = self._batch({out: kwargs[out] for out in self.outputs if out in kwargs}) # Collect outputs from batches (and heed lack of forward/backward batches). for fb, bb in izip_longest(forward_batches, backward_batches, fillvalue={}): batch_blobs = self.forward(blobs=blobs, **fb) batch_diffs = self.backward(diffs=diffs, **bb) for out, out_blobs in six.iteritems(batch_blobs): all_outs[out].extend(out_blobs.copy()) for diff, out_diffs in six.iteritems(batch_diffs): all_diffs[diff].extend(out_diffs.copy()) # Package in ndarray. for out, diff in zip(all_outs, all_diffs): all_outs[out] = np.asarray(all_outs[out]) all_diffs[diff] = np.asarray(all_diffs[diff]) # Discard padding at the end and package in ndarray. pad = len(six.next(six.itervalues(all_outs))) - len(six.next(six.itervalues(kwargs))) if pad: for out, diff in zip(all_outs, all_diffs): all_outs[out] = all_outs[out][:-pad] all_diffs[diff] = all_diffs[diff][:-pad] return all_outs, all_diffs
[ "def", "_Net_forward_backward_all", "(", "self", ",", "blobs", "=", "None", ",", "diffs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Batch blobs and diffs.", "all_outs", "=", "{", "out", ":", "[", "]", "for", "out", "in", "set", "(", "self", ".", "outputs", "+", "(", "blobs", "or", "[", "]", ")", ")", "}", "all_diffs", "=", "{", "diff", ":", "[", "]", "for", "diff", "in", "set", "(", "self", ".", "inputs", "+", "(", "diffs", "or", "[", "]", ")", ")", "}", "forward_batches", "=", "self", ".", "_batch", "(", "{", "in_", ":", "kwargs", "[", "in_", "]", "for", "in_", "in", "self", ".", "inputs", "if", "in_", "in", "kwargs", "}", ")", "backward_batches", "=", "self", ".", "_batch", "(", "{", "out", ":", "kwargs", "[", "out", "]", "for", "out", "in", "self", ".", "outputs", "if", "out", "in", "kwargs", "}", ")", "# Collect outputs from batches (and heed lack of forward/backward batches).", "for", "fb", ",", "bb", "in", "izip_longest", "(", "forward_batches", ",", "backward_batches", ",", "fillvalue", "=", "{", "}", ")", ":", "batch_blobs", "=", "self", ".", "forward", "(", "blobs", "=", "blobs", ",", "*", "*", "fb", ")", "batch_diffs", "=", "self", ".", "backward", "(", "diffs", "=", "diffs", ",", "*", "*", "bb", ")", "for", "out", ",", "out_blobs", "in", "six", ".", "iteritems", "(", "batch_blobs", ")", ":", "all_outs", "[", "out", "]", ".", "extend", "(", "out_blobs", ".", "copy", "(", ")", ")", "for", "diff", ",", "out_diffs", "in", "six", ".", "iteritems", "(", "batch_diffs", ")", ":", "all_diffs", "[", "diff", "]", ".", "extend", "(", "out_diffs", ".", "copy", "(", ")", ")", "# Package in ndarray.", "for", "out", ",", "diff", "in", "zip", "(", "all_outs", ",", "all_diffs", ")", ":", "all_outs", "[", "out", "]", "=", "np", ".", "asarray", "(", "all_outs", "[", "out", "]", ")", "all_diffs", "[", "diff", "]", "=", "np", ".", "asarray", "(", "all_diffs", "[", "diff", "]", ")", "# Discard padding at the end and package in ndarray.", "pad", "=", "len", "(", "six", ".", "next", "(", "six", ".", "itervalues", "(", "all_outs", ")", ")", ")", "-", "len", "(", "six", ".", "next", "(", "six", ".", "itervalues", "(", "kwargs", ")", ")", ")", "if", "pad", ":", "for", "out", ",", "diff", "in", "zip", "(", "all_outs", ",", "all_diffs", ")", ":", "all_outs", "[", "out", "]", "=", "all_outs", "[", "out", "]", "[", ":", "-", "pad", "]", "all_diffs", "[", "diff", "]", "=", "all_diffs", "[", "diff", "]", "[", ":", "-", "pad", "]", "return", "all_outs", ",", "all_diffs" ]
https://github.com/eric612/MobileNet-YOLO/blob/69b4441cb3ec8d553fbdef788ad033e246f901bd/python/caffe/pycaffe.py#L216-L258
abyzovlab/CNVnator
c73786d6160f17b020feae928148533ca036fad2
pytools/io.py
python
IO.__del__
(self)
Class destructor Closes root file
Class destructor Closes root file
[ "Class", "destructor", "Closes", "root", "file" ]
def __del__(self): """Class destructor Closes root file""" self.file.Close()
[ "def", "__del__", "(", "self", ")", ":", "self", ".", "file", ".", "Close", "(", ")" ]
https://github.com/abyzovlab/CNVnator/blob/c73786d6160f17b020feae928148533ca036fad2/pytools/io.py#L59-L62
libornovax/master_thesis_code
6eca474ed3cae673afde010caef338cf7349f839
scripts/data/shared/bbtxt.py
python
load_bbtxt_to_list
(path_bbtxt)
Loads a BBTXT file into a list of BB2D objects. The information about filename will get lost - this is a function purely for statistical purposes. Input: path_bbtxt: Path to a BBTXT file Returns: list of BB2d objects
Loads a BBTXT file into a list of BB2D objects. The information about filename will get lost - this is a function purely for statistical purposes.
[ "Loads", "a", "BBTXT", "file", "into", "a", "list", "of", "BB2D", "objects", ".", "The", "information", "about", "filename", "will", "get", "lost", "-", "this", "is", "a", "function", "purely", "for", "statistical", "purposes", "." ]
def load_bbtxt_to_list(path_bbtxt): """ Loads a BBTXT file into a list of BB2D objects. The information about filename will get lost - this is a function purely for statistical purposes. Input: path_bbtxt: Path to a BBTXT file Returns: list of BB2d objects """ with open(path_bbtxt, 'r') as infile: # Ok, the file is open so we can start reading bb2d_list = [] for line in infile: line = line.rstrip('\n') data = line.split(' ') bb2d_list.append(BB2D(xmin=float(data[3]), ymin=float(data[4]), xmax=float(data[5]), ymax=float(data[6]), label=abs(int(data[1])), confidence=float(data[2]), required=(int(data[1]) >= 0))) return bb2d_list print('ERROR: File "%s" could not be opened!'%(path_bbtxt)) exit(1)
[ "def", "load_bbtxt_to_list", "(", "path_bbtxt", ")", ":", "with", "open", "(", "path_bbtxt", ",", "'r'", ")", "as", "infile", ":", "# Ok, the file is open so we can start reading", "bb2d_list", "=", "[", "]", "for", "line", "in", "infile", ":", "line", "=", "line", ".", "rstrip", "(", "'\\n'", ")", "data", "=", "line", ".", "split", "(", "' '", ")", "bb2d_list", ".", "append", "(", "BB2D", "(", "xmin", "=", "float", "(", "data", "[", "3", "]", ")", ",", "ymin", "=", "float", "(", "data", "[", "4", "]", ")", ",", "xmax", "=", "float", "(", "data", "[", "5", "]", ")", ",", "ymax", "=", "float", "(", "data", "[", "6", "]", ")", ",", "label", "=", "abs", "(", "int", "(", "data", "[", "1", "]", ")", ")", ",", "confidence", "=", "float", "(", "data", "[", "2", "]", ")", ",", "required", "=", "(", "int", "(", "data", "[", "1", "]", ")", ">=", "0", ")", ")", ")", "return", "bb2d_list", "print", "(", "'ERROR: File \"%s\" could not be opened!'", "%", "(", "path_bbtxt", ")", ")", "exit", "(", "1", ")" ]
https://github.com/libornovax/master_thesis_code/blob/6eca474ed3cae673afde010caef338cf7349f839/scripts/data/shared/bbtxt.py#L57-L83
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/contexts/fitting_contexts/model_fitting_context.py
python
ModelFittingContext.x_parameter_errors
(self)
return self._x_parameter_errors
Returns the available x parameter errors for the selected results table.
Returns the available x parameter errors for the selected results table.
[ "Returns", "the", "available", "x", "parameter", "errors", "for", "the", "selected", "results", "table", "." ]
def x_parameter_errors(self) -> dict: """Returns the available x parameter errors for the selected results table.""" return self._x_parameter_errors
[ "def", "x_parameter_errors", "(", "self", ")", "->", "dict", ":", "return", "self", ".", "_x_parameter_errors" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/contexts/fitting_contexts/model_fitting_context.py#L77-L79
livecode/livecode
4606a10ea10b16d5071d0f9f263ccdd7ede8b31d
gyp/pylib/gyp/generator/android.py
python
AndroidMkWriter.WriteSourceFlags
(self, spec, configs)
Write out the flags and include paths used to compile source files for the current target. Args: spec, configs: input from gyp.
Write out the flags and include paths used to compile source files for the current target.
[ "Write", "out", "the", "flags", "and", "include", "paths", "used", "to", "compile", "source", "files", "for", "the", "current", "target", "." ]
def WriteSourceFlags(self, spec, configs): """Write out the flags and include paths used to compile source files for the current target. Args: spec, configs: input from gyp. """ for configname, config in sorted(configs.iteritems()): extracted_includes = [] self.WriteLn('\n# Flags passed to both C and C++ files.') cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( config.get('cflags', []) + config.get('cflags_c', [])) extracted_includes.extend(includes_from_cflags) self.WriteList(cflags, 'MY_CFLAGS_%s' % configname) self.WriteList(config.get('defines'), 'MY_DEFS_%s' % configname, prefix='-D', quoter=make.EscapeCppDefine) self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS') includes = list(config.get('include_dirs', [])) includes.extend(extracted_includes) includes = map(Sourceify, map(self.LocalPathify, includes)) includes = self.NormalizeIncludePaths(includes) self.WriteList(includes, 'LOCAL_C_INCLUDES_%s' % configname) self.WriteLn('\n# Flags passed to only C++ (and not C) files.') self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS_%s' % configname) self.WriteLn('\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) ' '$(MY_DEFS_$(GYP_CONFIGURATION))') # Undefine ANDROID for host modules # TODO: the source code should not use macro ANDROID to tell if it's host # or target module. if self.toolset == 'host': self.WriteLn('# Undefine ANDROID for host modules') self.WriteLn('LOCAL_CFLAGS += -UANDROID') self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) ' '$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))') self.WriteLn('LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))') # Android uses separate flags for assembly file invocations, but gyp expects # the same CFLAGS to be applied: self.WriteLn('LOCAL_ASFLAGS := $(LOCAL_CFLAGS)')
[ "def", "WriteSourceFlags", "(", "self", ",", "spec", ",", "configs", ")", ":", "for", "configname", ",", "config", "in", "sorted", "(", "configs", ".", "iteritems", "(", ")", ")", ":", "extracted_includes", "=", "[", "]", "self", ".", "WriteLn", "(", "'\\n# Flags passed to both C and C++ files.'", ")", "cflags", ",", "includes_from_cflags", "=", "self", ".", "ExtractIncludesFromCFlags", "(", "config", ".", "get", "(", "'cflags'", ",", "[", "]", ")", "+", "config", ".", "get", "(", "'cflags_c'", ",", "[", "]", ")", ")", "extracted_includes", ".", "extend", "(", "includes_from_cflags", ")", "self", ".", "WriteList", "(", "cflags", ",", "'MY_CFLAGS_%s'", "%", "configname", ")", "self", ".", "WriteList", "(", "config", ".", "get", "(", "'defines'", ")", ",", "'MY_DEFS_%s'", "%", "configname", ",", "prefix", "=", "'-D'", ",", "quoter", "=", "make", ".", "EscapeCppDefine", ")", "self", ".", "WriteLn", "(", "'\\n# Include paths placed before CFLAGS/CPPFLAGS'", ")", "includes", "=", "list", "(", "config", ".", "get", "(", "'include_dirs'", ",", "[", "]", ")", ")", "includes", ".", "extend", "(", "extracted_includes", ")", "includes", "=", "map", "(", "Sourceify", ",", "map", "(", "self", ".", "LocalPathify", ",", "includes", ")", ")", "includes", "=", "self", ".", "NormalizeIncludePaths", "(", "includes", ")", "self", ".", "WriteList", "(", "includes", ",", "'LOCAL_C_INCLUDES_%s'", "%", "configname", ")", "self", ".", "WriteLn", "(", "'\\n# Flags passed to only C++ (and not C) files.'", ")", "self", ".", "WriteList", "(", "config", ".", "get", "(", "'cflags_cc'", ")", ",", "'LOCAL_CPPFLAGS_%s'", "%", "configname", ")", "self", ".", "WriteLn", "(", "'\\nLOCAL_CFLAGS := $(MY_CFLAGS_$(GYP_CONFIGURATION)) '", "'$(MY_DEFS_$(GYP_CONFIGURATION))'", ")", "# Undefine ANDROID for host modules", "# TODO: the source code should not use macro ANDROID to tell if it's host", "# or target module.", "if", "self", ".", "toolset", "==", "'host'", ":", "self", ".", "WriteLn", "(", "'# Undefine ANDROID for host modules'", ")", "self", ".", "WriteLn", "(", "'LOCAL_CFLAGS += -UANDROID'", ")", "self", ".", "WriteLn", "(", "'LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) '", "'$(LOCAL_C_INCLUDES_$(GYP_CONFIGURATION))'", ")", "self", ".", "WriteLn", "(", "'LOCAL_CPPFLAGS := $(LOCAL_CPPFLAGS_$(GYP_CONFIGURATION))'", ")", "# Android uses separate flags for assembly file invocations, but gyp expects", "# the same CFLAGS to be applied:", "self", ".", "WriteLn", "(", "'LOCAL_ASFLAGS := $(LOCAL_CFLAGS)'", ")" ]
https://github.com/livecode/livecode/blob/4606a10ea10b16d5071d0f9f263ccdd7ede8b31d/gyp/pylib/gyp/generator/android.py#L454-L496
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/bdb.py
python
effective
(file, line, frame)
return (None, None)
Determine which breakpoint for this file:line is to be acted upon. Called only if we know there is a breakpoint at this location. Return the breakpoint that was triggered and a boolean that indicates if it is ok to delete a temporary breakpoint. Return (None, None) if there is no matching breakpoint.
Determine which breakpoint for this file:line is to be acted upon.
[ "Determine", "which", "breakpoint", "for", "this", "file", ":", "line", "is", "to", "be", "acted", "upon", "." ]
def effective(file, line, frame): """Determine which breakpoint for this file:line is to be acted upon. Called only if we know there is a breakpoint at this location. Return the breakpoint that was triggered and a boolean that indicates if it is ok to delete a temporary breakpoint. Return (None, None) if there is no matching breakpoint. """ possibles = Breakpoint.bplist[file, line] for b in possibles: if not b.enabled: continue if not checkfuncname(b, frame): continue # Count every hit when bp is enabled b.hits += 1 if not b.cond: # If unconditional, and ignoring go on to next, else break if b.ignore > 0: b.ignore -= 1 continue else: # breakpoint and marker that it's ok to delete if temporary return (b, True) else: # Conditional bp. # Ignore count applies only to those bpt hits where the # condition evaluates to true. try: val = eval(b.cond, frame.f_globals, frame.f_locals) if val: if b.ignore > 0: b.ignore -= 1 # continue else: return (b, True) # else: # continue except: # if eval fails, most conservative thing is to stop on # breakpoint regardless of ignore count. Don't delete # temporary, as another hint to user. return (b, False) return (None, None)
[ "def", "effective", "(", "file", ",", "line", ",", "frame", ")", ":", "possibles", "=", "Breakpoint", ".", "bplist", "[", "file", ",", "line", "]", "for", "b", "in", "possibles", ":", "if", "not", "b", ".", "enabled", ":", "continue", "if", "not", "checkfuncname", "(", "b", ",", "frame", ")", ":", "continue", "# Count every hit when bp is enabled", "b", ".", "hits", "+=", "1", "if", "not", "b", ".", "cond", ":", "# If unconditional, and ignoring go on to next, else break", "if", "b", ".", "ignore", ">", "0", ":", "b", ".", "ignore", "-=", "1", "continue", "else", ":", "# breakpoint and marker that it's ok to delete if temporary", "return", "(", "b", ",", "True", ")", "else", ":", "# Conditional bp.", "# Ignore count applies only to those bpt hits where the", "# condition evaluates to true.", "try", ":", "val", "=", "eval", "(", "b", ".", "cond", ",", "frame", ".", "f_globals", ",", "frame", ".", "f_locals", ")", "if", "val", ":", "if", "b", ".", "ignore", ">", "0", ":", "b", ".", "ignore", "-=", "1", "# continue", "else", ":", "return", "(", "b", ",", "True", ")", "# else:", "# continue", "except", ":", "# if eval fails, most conservative thing is to stop on", "# breakpoint regardless of ignore count. Don't delete", "# temporary, as another hint to user.", "return", "(", "b", ",", "False", ")", "return", "(", "None", ",", "None", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/bdb.py#L797-L840
google/or-tools
2cb85b4eead4c38e1c54b48044f92087cf165bce
examples/python/shift_scheduling_sat.py
python
negated_bounded_span
(works, start, length)
return sequence
Filters an isolated sub-sequence of variables assined to True. Extract the span of Boolean variables [start, start + length), negate them, and if there is variables to the left/right of this span, surround the span by them in non negated form. Args: works: a list of variables to extract the span from. start: the start to the span. length: the length of the span. Returns: a list of variables which conjunction will be false if the sub-list is assigned to True, and correctly bounded by variables assigned to False, or by the start or end of works.
Filters an isolated sub-sequence of variables assined to True.
[ "Filters", "an", "isolated", "sub", "-", "sequence", "of", "variables", "assined", "to", "True", "." ]
def negated_bounded_span(works, start, length): """Filters an isolated sub-sequence of variables assined to True. Extract the span of Boolean variables [start, start + length), negate them, and if there is variables to the left/right of this span, surround the span by them in non negated form. Args: works: a list of variables to extract the span from. start: the start to the span. length: the length of the span. Returns: a list of variables which conjunction will be false if the sub-list is assigned to True, and correctly bounded by variables assigned to False, or by the start or end of works. """ sequence = [] # Left border (start of works, or works[start - 1]) if start > 0: sequence.append(works[start - 1]) for i in range(length): sequence.append(works[start + i].Not()) # Right border (end of works or works[start + length]) if start + length < len(works): sequence.append(works[start + length]) return sequence
[ "def", "negated_bounded_span", "(", "works", ",", "start", ",", "length", ")", ":", "sequence", "=", "[", "]", "# Left border (start of works, or works[start - 1])", "if", "start", ">", "0", ":", "sequence", ".", "append", "(", "works", "[", "start", "-", "1", "]", ")", "for", "i", "in", "range", "(", "length", ")", ":", "sequence", ".", "append", "(", "works", "[", "start", "+", "i", "]", ".", "Not", "(", ")", ")", "# Right border (end of works or works[start + length])", "if", "start", "+", "length", "<", "len", "(", "works", ")", ":", "sequence", ".", "append", "(", "works", "[", "start", "+", "length", "]", ")", "return", "sequence" ]
https://github.com/google/or-tools/blob/2cb85b4eead4c38e1c54b48044f92087cf165bce/examples/python/shift_scheduling_sat.py#L30-L56
ideawu/ssdb
f229ba277c7f7d0ca5a441c0c6fb3d1209af68e4
deps/cpy/antlr3/tokens.py
python
Token.getTokenIndex
(self)
@brief Get the index in the input stream. An index from 0..n-1 of the token object in the input stream. This must be valid in order to use the ANTLRWorks debugger. Using setter/getter methods is deprecated. Use o.index instead.
@brief Get the index in the input stream.
[ "@brief", "Get", "the", "index", "in", "the", "input", "stream", "." ]
def getTokenIndex(self): """@brief Get the index in the input stream. An index from 0..n-1 of the token object in the input stream. This must be valid in order to use the ANTLRWorks debugger. Using setter/getter methods is deprecated. Use o.index instead.""" raise NotImplementedError
[ "def", "getTokenIndex", "(", "self", ")", ":", "raise", "NotImplementedError" ]
https://github.com/ideawu/ssdb/blob/f229ba277c7f7d0ca5a441c0c6fb3d1209af68e4/deps/cpy/antlr3/tokens.py#L123-L131
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBWatchpoint.GetCondition
(self)
return _lldb.SBWatchpoint_GetCondition(self)
GetCondition(self) -> str Get the condition expression for the watchpoint.
GetCondition(self) -> str
[ "GetCondition", "(", "self", ")", "-", ">", "str" ]
def GetCondition(self): """ GetCondition(self) -> str Get the condition expression for the watchpoint. """ return _lldb.SBWatchpoint_GetCondition(self)
[ "def", "GetCondition", "(", "self", ")", ":", "return", "_lldb", ".", "SBWatchpoint_GetCondition", "(", "self", ")" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L12638-L12644
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/training/supervisor.py
python
Supervisor.ready_op
(self)
return self._ready_op
Return the Ready Op used by the supervisor. Returns: An Op or `None`.
Return the Ready Op used by the supervisor.
[ "Return", "the", "Ready", "Op", "used", "by", "the", "supervisor", "." ]
def ready_op(self): """Return the Ready Op used by the supervisor. Returns: An Op or `None`. """ return self._ready_op
[ "def", "ready_op", "(", "self", ")", ":", "return", "self", ".", "_ready_op" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/training/supervisor.py#L532-L538
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/cluster_resolver/python/training/cluster_resolver.py
python
SimpleClusterResolver.__init__
(self, cluster_spec)
Creates a SimpleClusterResolver from a ClusterSpec.
Creates a SimpleClusterResolver from a ClusterSpec.
[ "Creates", "a", "SimpleClusterResolver", "from", "a", "ClusterSpec", "." ]
def __init__(self, cluster_spec): """Creates a SimpleClusterResolver from a ClusterSpec.""" super(SimpleClusterResolver, self).__init__() if not isinstance(cluster_spec, ClusterSpec): raise TypeError('cluster_spec must be a ClusterSpec.') self._cluster_spec = cluster_spec
[ "def", "__init__", "(", "self", ",", "cluster_spec", ")", ":", "super", "(", "SimpleClusterResolver", ",", "self", ")", ".", "__init__", "(", ")", "if", "not", "isinstance", "(", "cluster_spec", ",", "ClusterSpec", ")", ":", "raise", "TypeError", "(", "'cluster_spec must be a ClusterSpec.'", ")", "self", ".", "_cluster_spec", "=", "cluster_spec" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/cluster_resolver/python/training/cluster_resolver.py#L60-L66
tkn-tub/ns3-gym
19bfe0a583e641142609939a090a09dfc63a095f
src/visualizer/visualizer/core.py
python
Node._set_highlighted
(self, value)
! Set highlighted function. @param self: class object. @param value: selected value @return none
! Set highlighted function.
[ "!", "Set", "highlighted", "function", "." ]
def _set_highlighted(self, value): """! Set highlighted function. @param self: class object. @param value: selected value @return none """ self._highlighted = value self._update_appearance()
[ "def", "_set_highlighted", "(", "self", ",", "value", ")", ":", "self", ".", "_highlighted", "=", "value", "self", ".", "_update_appearance", "(", ")" ]
https://github.com/tkn-tub/ns3-gym/blob/19bfe0a583e641142609939a090a09dfc63a095f/src/visualizer/visualizer/core.py#L324-L333
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
scripts/Python/static-binding/lldb.py
python
SBTypeCategory.get_filters_array
(self)
return filters
An accessor function that returns a list() that contains all filters in a lldb.SBCategory object.
An accessor function that returns a list() that contains all filters in a lldb.SBCategory object.
[ "An", "accessor", "function", "that", "returns", "a", "list", "()", "that", "contains", "all", "filters", "in", "a", "lldb", ".", "SBCategory", "object", "." ]
def get_filters_array(self): '''An accessor function that returns a list() that contains all filters in a lldb.SBCategory object.''' filters = [] for idx in range(self.GetNumFilters()): filters.append(self.GetFilterAtIndex(idx)) return filters
[ "def", "get_filters_array", "(", "self", ")", ":", "filters", "=", "[", "]", "for", "idx", "in", "range", "(", "self", ".", "GetNumFilters", "(", ")", ")", ":", "filters", ".", "append", "(", "self", ".", "GetFilterAtIndex", "(", "idx", ")", ")", "return", "filters" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L13273-L13278
qt/qt
0a2f2382541424726168804be2c90b91381608c6
src/3rdparty/freetype/src/tools/docmaker/content.py
python
DocBlock.get_markup
( self, tag_name )
return None
return the DocMarkup corresponding to a given tag in a block
return the DocMarkup corresponding to a given tag in a block
[ "return", "the", "DocMarkup", "corresponding", "to", "a", "given", "tag", "in", "a", "block" ]
def get_markup( self, tag_name ): """return the DocMarkup corresponding to a given tag in a block""" for m in self.markups: if m.tag == string.lower( tag_name ): return m return None
[ "def", "get_markup", "(", "self", ",", "tag_name", ")", ":", "for", "m", "in", "self", ".", "markups", ":", "if", "m", ".", "tag", "==", "string", ".", "lower", "(", "tag_name", ")", ":", "return", "m", "return", "None" ]
https://github.com/qt/qt/blob/0a2f2382541424726168804be2c90b91381608c6/src/3rdparty/freetype/src/tools/docmaker/content.py#L551-L556
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/ftplib.py
python
FTP.set_pasv
(self, val)
Use passive or active mode for data transfers. With a false argument, use the normal PORT mode, With a true argument, use the PASV command.
Use passive or active mode for data transfers. With a false argument, use the normal PORT mode, With a true argument, use the PASV command.
[ "Use", "passive", "or", "active", "mode", "for", "data", "transfers", ".", "With", "a", "false", "argument", "use", "the", "normal", "PORT", "mode", "With", "a", "true", "argument", "use", "the", "PASV", "command", "." ]
def set_pasv(self, val): '''Use passive or active mode for data transfers. With a false argument, use the normal PORT mode, With a true argument, use the PASV command.''' self.passiveserver = val
[ "def", "set_pasv", "(", "self", ",", "val", ")", ":", "self", ".", "passiveserver", "=", "val" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/ftplib.py#L174-L178
CMU-Perceptual-Computing-Lab/caffe_rtpose
a4778bb1c3eb74d7250402016047216f77b4dba6
scripts/cpp_lint.py
python
GetHeaderGuardCPPVariable
(filename)
return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_'
Returns the CPP variable that should be used as a header guard. Args: filename: The name of a C++ header file. Returns: The CPP variable that should be used as a header guard in the named file.
Returns the CPP variable that should be used as a header guard.
[ "Returns", "the", "CPP", "variable", "that", "should", "be", "used", "as", "a", "header", "guard", "." ]
def GetHeaderGuardCPPVariable(filename): """Returns the CPP variable that should be used as a header guard. Args: filename: The name of a C++ header file. Returns: The CPP variable that should be used as a header guard in the named file. """ # Restores original filename in case that cpplint is invoked from Emacs's # flymake. filename = re.sub(r'_flymake\.h$', '.h', filename) filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename) fileinfo = FileInfo(filename) file_path_from_root = fileinfo.RepositoryName() if _root: file_path_from_root = re.sub('^' + _root + os.sep, '', file_path_from_root) return re.sub(r'[-./\s]', '_', file_path_from_root).upper() + '_'
[ "def", "GetHeaderGuardCPPVariable", "(", "filename", ")", ":", "# Restores original filename in case that cpplint is invoked from Emacs's", "# flymake.", "filename", "=", "re", ".", "sub", "(", "r'_flymake\\.h$'", ",", "'.h'", ",", "filename", ")", "filename", "=", "re", ".", "sub", "(", "r'/\\.flymake/([^/]*)$'", ",", "r'/\\1'", ",", "filename", ")", "fileinfo", "=", "FileInfo", "(", "filename", ")", "file_path_from_root", "=", "fileinfo", ".", "RepositoryName", "(", ")", "if", "_root", ":", "file_path_from_root", "=", "re", ".", "sub", "(", "'^'", "+", "_root", "+", "os", ".", "sep", ",", "''", ",", "file_path_from_root", ")", "return", "re", ".", "sub", "(", "r'[-./\\s]'", ",", "'_'", ",", "file_path_from_root", ")", ".", "upper", "(", ")", "+", "'_'" ]
https://github.com/CMU-Perceptual-Computing-Lab/caffe_rtpose/blob/a4778bb1c3eb74d7250402016047216f77b4dba6/scripts/cpp_lint.py#L1384-L1405
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
Object.Destroy
(*args, **kwargs)
return _core_.Object_Destroy(*args, **kwargs)
Destroy(self) Deletes the C++ object this Python object is a proxy for.
Destroy(self)
[ "Destroy", "(", "self", ")" ]
def Destroy(*args, **kwargs): """ Destroy(self) Deletes the C++ object this Python object is a proxy for. """ args[0].this.own(False) return _core_.Object_Destroy(*args, **kwargs)
[ "def", "Destroy", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "args", "[", "0", "]", ".", "this", ".", "own", "(", "False", ")", "return", "_core_", ".", "Object_Destroy", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L812-L819
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/heapq.py
python
heappushpop
(heap, item)
return item
Fast version of a heappush followed by a heappop.
Fast version of a heappush followed by a heappop.
[ "Fast", "version", "of", "a", "heappush", "followed", "by", "a", "heappop", "." ]
def heappushpop(heap, item): """Fast version of a heappush followed by a heappop.""" if heap and heap[0] < item: item, heap[0] = heap[0], item _siftup(heap, 0) return item
[ "def", "heappushpop", "(", "heap", ",", "item", ")", ":", "if", "heap", "and", "heap", "[", "0", "]", "<", "item", ":", "item", ",", "heap", "[", "0", "]", "=", "heap", "[", "0", "]", ",", "item", "_siftup", "(", "heap", ",", "0", ")", "return", "item" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/heapq.py#L161-L166
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/CrystalTools/PeakReport.py
python
PeakReport.set_log_scale
(self, log_scale)
Arguments: log_scale -- True for log scaling
Arguments: log_scale -- True for log scaling
[ "Arguments", ":", "log_scale", "--", "True", "for", "log", "scaling" ]
def set_log_scale(self, log_scale): """ Arguments: log_scale -- True for log scaling """ self.__log_scale = log_scale
[ "def", "set_log_scale", "(", "self", ",", "log_scale", ")", ":", "self", ".", "__log_scale", "=", "log_scale" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/CrystalTools/PeakReport.py#L52-L57
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/ros_comm/roslaunch/src/roslaunch/xmlloader.py
python
XmlLoader._arg_tag
(self, tag, context, ros_config, verbose=True)
Process an <arg> tag.
Process an <arg> tag.
[ "Process", "an", "<arg", ">", "tag", "." ]
def _arg_tag(self, tag, context, ros_config, verbose=True): """ Process an <arg> tag. """ try: self._check_attrs(tag, context, ros_config, XmlLoader.ARG_ATTRS) (name,) = self.reqd_attrs(tag, context, ('name',)) value, default, doc = self.opt_attrs(tag, context, ('value', 'default', 'doc')) if value is not None and default is not None: raise XmlParseException( "<arg> tag must have one and only one of value/default.") context.add_arg(name, value=value, default=default, doc=doc) except substitution_args.ArgException as e: raise XmlParseException( "arg '%s' is not defined. \n\nArg xml is %s"%(e, tag.toxml())) except Exception as e: raise XmlParseException( "Invalid <arg> tag: %s. \n\nArg xml is %s"%(e, tag.toxml()))
[ "def", "_arg_tag", "(", "self", ",", "tag", ",", "context", ",", "ros_config", ",", "verbose", "=", "True", ")", ":", "try", ":", "self", ".", "_check_attrs", "(", "tag", ",", "context", ",", "ros_config", ",", "XmlLoader", ".", "ARG_ATTRS", ")", "(", "name", ",", ")", "=", "self", ".", "reqd_attrs", "(", "tag", ",", "context", ",", "(", "'name'", ",", ")", ")", "value", ",", "default", ",", "doc", "=", "self", ".", "opt_attrs", "(", "tag", ",", "context", ",", "(", "'value'", ",", "'default'", ",", "'doc'", ")", ")", "if", "value", "is", "not", "None", "and", "default", "is", "not", "None", ":", "raise", "XmlParseException", "(", "\"<arg> tag must have one and only one of value/default.\"", ")", "context", ".", "add_arg", "(", "name", ",", "value", "=", "value", ",", "default", "=", "default", ",", "doc", "=", "doc", ")", "except", "substitution_args", ".", "ArgException", "as", "e", ":", "raise", "XmlParseException", "(", "\"arg '%s' is not defined. \\n\\nArg xml is %s\"", "%", "(", "e", ",", "tag", ".", "toxml", "(", ")", ")", ")", "except", "Exception", "as", "e", ":", "raise", "XmlParseException", "(", "\"Invalid <arg> tag: %s. \\n\\nArg xml is %s\"", "%", "(", "e", ",", "tag", ".", "toxml", "(", ")", ")", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros_comm/roslaunch/src/roslaunch/xmlloader.py#L283-L303
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/inspect.py
python
isdatadescriptor
(object)
return (hasattr(object, "__set__") and hasattr(object, "__get__"))
Return true if the object is a data descriptor. Data descriptors have both a __get__ and a __set__ attribute. Examples are properties (defined in Python) and getsets and members (defined in C). Typically, data descriptors will also have __name__ and __doc__ attributes (properties, getsets, and members have both of these attributes), but this is not guaranteed.
Return true if the object is a data descriptor.
[ "Return", "true", "if", "the", "object", "is", "a", "data", "descriptor", "." ]
def isdatadescriptor(object): """Return true if the object is a data descriptor. Data descriptors have both a __get__ and a __set__ attribute. Examples are properties (defined in Python) and getsets and members (defined in C). Typically, data descriptors will also have __name__ and __doc__ attributes (properties, getsets, and members have both of these attributes), but this is not guaranteed.""" return (hasattr(object, "__set__") and hasattr(object, "__get__"))
[ "def", "isdatadescriptor", "(", "object", ")", ":", "return", "(", "hasattr", "(", "object", ",", "\"__set__\"", ")", "and", "hasattr", "(", "object", ",", "\"__get__\"", ")", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/inspect.py#L98-L106
alibaba/AliSQL
13502c1cde1b82104c2c59f9da6f0a7a460828f4
libevent/event_rpcgen.py
python
Parse
(file)
return entities
Parses the input file and returns C code and corresponding header file.
Parses the input file and returns C code and corresponding header file.
[ "Parses", "the", "input", "file", "and", "returns", "C", "code", "and", "corresponding", "header", "file", "." ]
def Parse(file): """ Parses the input file and returns C code and corresponding header file. """ entities = [] while 1: # Just gets the whole struct nicely formatted data = GetNextStruct(file) if not data: break entities.extend(ProcessStruct(data)) return entities
[ "def", "Parse", "(", "file", ")", ":", "entities", "=", "[", "]", "while", "1", ":", "# Just gets the whole struct nicely formatted", "data", "=", "GetNextStruct", "(", "file", ")", "if", "not", "data", ":", "break", "entities", ".", "extend", "(", "ProcessStruct", "(", "data", ")", ")", "return", "entities" ]
https://github.com/alibaba/AliSQL/blob/13502c1cde1b82104c2c59f9da6f0a7a460828f4/libevent/event_rpcgen.py#L1272-L1288
perilouswithadollarsign/cstrike15_src
f82112a2388b841d72cb62ca48ab1846dfcc11c8
thirdparty/protobuf-2.5.0/python/google/protobuf/internal/cpp_message.py
python
RepeatedCompositeContainer.__eq__
(self, other)
return messages == other_messages
Compares the current instance with another one.
Compares the current instance with another one.
[ "Compares", "the", "current", "instance", "with", "another", "one", "." ]
def __eq__(self, other): """Compares the current instance with another one.""" if self is other: return True if not isinstance(other, self.__class__): raise TypeError('Can only compare repeated composite fields against ' 'other repeated composite fields.') messages = self[slice(None, None, None)] other_messages = other[slice(None, None, None)] return messages == other_messages
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "if", "self", "is", "other", ":", "return", "True", "if", "not", "isinstance", "(", "other", ",", "self", ".", "__class__", ")", ":", "raise", "TypeError", "(", "'Can only compare repeated composite fields against '", "'other repeated composite fields.'", ")", "messages", "=", "self", "[", "slice", "(", "None", ",", "None", ",", "None", ")", "]", "other_messages", "=", "other", "[", "slice", "(", "None", ",", "None", ",", "None", ")", "]", "return", "messages", "==", "other_messages" ]
https://github.com/perilouswithadollarsign/cstrike15_src/blob/f82112a2388b841d72cb62ca48ab1846dfcc11c8/thirdparty/protobuf-2.5.0/python/google/protobuf/internal/cpp_message.py#L235-L244
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/eager/python/examples/revnet/main.py
python
get_datasets
(data_dir, config)
return ds_train, ds_train_one_shot, ds_validation, ds_test
Return dataset.
Return dataset.
[ "Return", "dataset", "." ]
def get_datasets(data_dir, config): """Return dataset.""" if data_dir is None: raise ValueError("No supplied data directory") if not os.path.exists(data_dir): raise ValueError("Data directory {} does not exist".format(data_dir)) if config.dataset not in ["cifar-10", "cifar-100"]: raise ValueError("Unknown dataset {}".format(config.dataset)) print("Training on {} dataset.".format(config.dataset)) sys.stdout.flush() data_dir = os.path.join(data_dir, config.dataset) if FLAGS.validate: # 40k Training set ds_train = cifar_input.get_ds_from_tfrecords( data_dir=data_dir, split="train", data_aug=True, batch_size=config.batch_size, epochs=config.epochs, shuffle=config.shuffle, data_format=config.data_format, dtype=config.dtype, prefetch=config.batch_size) # 10k Training set ds_validation = cifar_input.get_ds_from_tfrecords( data_dir=data_dir, split="validation", data_aug=False, batch_size=config.eval_batch_size, epochs=1, shuffle=False, data_format=config.data_format, dtype=config.dtype, prefetch=config.eval_batch_size) else: # 50k Training set ds_train = cifar_input.get_ds_from_tfrecords( data_dir=data_dir, split="train_all", data_aug=True, batch_size=config.batch_size, epochs=config.epochs, shuffle=config.shuffle, data_format=config.data_format, dtype=config.dtype, prefetch=config.batch_size) ds_validation = None # Always compute loss and accuracy on whole test set ds_train_one_shot = cifar_input.get_ds_from_tfrecords( data_dir=data_dir, split="train_all", data_aug=False, batch_size=config.eval_batch_size, epochs=1, shuffle=False, data_format=config.data_format, dtype=config.dtype, prefetch=config.eval_batch_size) ds_test = cifar_input.get_ds_from_tfrecords( data_dir=data_dir, split="test", data_aug=False, batch_size=config.eval_batch_size, epochs=1, shuffle=False, data_format=config.data_format, dtype=config.dtype, prefetch=config.eval_batch_size) return ds_train, ds_train_one_shot, ds_validation, ds_test
[ "def", "get_datasets", "(", "data_dir", ",", "config", ")", ":", "if", "data_dir", "is", "None", ":", "raise", "ValueError", "(", "\"No supplied data directory\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "data_dir", ")", ":", "raise", "ValueError", "(", "\"Data directory {} does not exist\"", ".", "format", "(", "data_dir", ")", ")", "if", "config", ".", "dataset", "not", "in", "[", "\"cifar-10\"", ",", "\"cifar-100\"", "]", ":", "raise", "ValueError", "(", "\"Unknown dataset {}\"", ".", "format", "(", "config", ".", "dataset", ")", ")", "print", "(", "\"Training on {} dataset.\"", ".", "format", "(", "config", ".", "dataset", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "data_dir", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "config", ".", "dataset", ")", "if", "FLAGS", ".", "validate", ":", "# 40k Training set", "ds_train", "=", "cifar_input", ".", "get_ds_from_tfrecords", "(", "data_dir", "=", "data_dir", ",", "split", "=", "\"train\"", ",", "data_aug", "=", "True", ",", "batch_size", "=", "config", ".", "batch_size", ",", "epochs", "=", "config", ".", "epochs", ",", "shuffle", "=", "config", ".", "shuffle", ",", "data_format", "=", "config", ".", "data_format", ",", "dtype", "=", "config", ".", "dtype", ",", "prefetch", "=", "config", ".", "batch_size", ")", "# 10k Training set", "ds_validation", "=", "cifar_input", ".", "get_ds_from_tfrecords", "(", "data_dir", "=", "data_dir", ",", "split", "=", "\"validation\"", ",", "data_aug", "=", "False", ",", "batch_size", "=", "config", ".", "eval_batch_size", ",", "epochs", "=", "1", ",", "shuffle", "=", "False", ",", "data_format", "=", "config", ".", "data_format", ",", "dtype", "=", "config", ".", "dtype", ",", "prefetch", "=", "config", ".", "eval_batch_size", ")", "else", ":", "# 50k Training set", "ds_train", "=", "cifar_input", ".", "get_ds_from_tfrecords", "(", "data_dir", "=", "data_dir", ",", "split", "=", "\"train_all\"", ",", "data_aug", "=", "True", ",", "batch_size", "=", "config", ".", "batch_size", ",", "epochs", "=", "config", ".", "epochs", ",", "shuffle", "=", "config", ".", "shuffle", ",", "data_format", "=", "config", ".", "data_format", ",", "dtype", "=", "config", ".", "dtype", ",", "prefetch", "=", "config", ".", "batch_size", ")", "ds_validation", "=", "None", "# Always compute loss and accuracy on whole test set", "ds_train_one_shot", "=", "cifar_input", ".", "get_ds_from_tfrecords", "(", "data_dir", "=", "data_dir", ",", "split", "=", "\"train_all\"", ",", "data_aug", "=", "False", ",", "batch_size", "=", "config", ".", "eval_batch_size", ",", "epochs", "=", "1", ",", "shuffle", "=", "False", ",", "data_format", "=", "config", ".", "data_format", ",", "dtype", "=", "config", ".", "dtype", ",", "prefetch", "=", "config", ".", "eval_batch_size", ")", "ds_test", "=", "cifar_input", ".", "get_ds_from_tfrecords", "(", "data_dir", "=", "data_dir", ",", "split", "=", "\"test\"", ",", "data_aug", "=", "False", ",", "batch_size", "=", "config", ".", "eval_batch_size", ",", "epochs", "=", "1", ",", "shuffle", "=", "False", ",", "data_format", "=", "config", ".", "data_format", ",", "dtype", "=", "config", ".", "dtype", ",", "prefetch", "=", "config", ".", "eval_batch_size", ")", "return", "ds_train", ",", "ds_train_one_shot", ",", "ds_validation", ",", "ds_test" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/eager/python/examples/revnet/main.py#L130-L202
yun-liu/RCF
91bfb054ad04187dbbe21e539e165ad9bd3ff00b
tools/extra/extract_seconds.py
python
get_log_created_year
(input_file)
return log_created_year
Get year from log file system timestamp
Get year from log file system timestamp
[ "Get", "year", "from", "log", "file", "system", "timestamp" ]
def get_log_created_year(input_file): """Get year from log file system timestamp """ log_created_time = os.path.getctime(input_file) log_created_year = datetime.datetime.fromtimestamp(log_created_time).year return log_created_year
[ "def", "get_log_created_year", "(", "input_file", ")", ":", "log_created_time", "=", "os", ".", "path", ".", "getctime", "(", "input_file", ")", "log_created_year", "=", "datetime", ".", "datetime", ".", "fromtimestamp", "(", "log_created_time", ")", ".", "year", "return", "log_created_year" ]
https://github.com/yun-liu/RCF/blob/91bfb054ad04187dbbe21e539e165ad9bd3ff00b/tools/extra/extract_seconds.py#L22-L28
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/telemetry/telemetry/core/browser_options.py
python
BrowserOptions.UpdateFromParseResults
(self, finder_options)
Copies our options from finder_options
Copies our options from finder_options
[ "Copies", "our", "options", "from", "finder_options" ]
def UpdateFromParseResults(self, finder_options): """Copies our options from finder_options""" browser_options_list = [ 'extra_browser_args_as_string', 'extra_wpr_args_as_string', 'netsim', 'profile_dir', 'profile_type', 'show_stdout', 'synthetic_gesture_source_type', ] for o in browser_options_list: a = getattr(finder_options, o, None) if a is not None: setattr(self, o, a) delattr(finder_options, o) self.browser_type = finder_options.browser_type if hasattr(self, 'extra_browser_args_as_string'): # pylint: disable=E1101 tmp = shlex.split( self.extra_browser_args_as_string) # pylint: disable=E1101 self.AppendExtraBrowserArgs(tmp) delattr(self, 'extra_browser_args_as_string') if hasattr(self, 'extra_wpr_args_as_string'): # pylint: disable=E1101 tmp = shlex.split( self.extra_wpr_args_as_string) # pylint: disable=E1101 self.extra_wpr_args.extend(tmp) delattr(self, 'extra_wpr_args_as_string') if self.profile_type == 'default': self.dont_override_profile = True if self.profile_dir and self.profile_type != 'clean': raise Exception("It's illegal to specify both --profile-type and" " --profile-dir.") if self.profile_dir and not os.path.isdir(self.profile_dir): raise Exception("Directory specified by --profile-dir (%s) doesn't" " exist or isn't a directory." % (self.profile_dir)) if not self.profile_dir: self.profile_dir = profile_types.GetProfileDir(self.profile_type) # This deferred import is necessary because browser_options is imported in # telemetry/telemetry/__init__.py. from telemetry.core.backends.chrome import chrome_browser_options finder_options.browser_options = ( chrome_browser_options.CreateChromeBrowserOptions(self))
[ "def", "UpdateFromParseResults", "(", "self", ",", "finder_options", ")", ":", "browser_options_list", "=", "[", "'extra_browser_args_as_string'", ",", "'extra_wpr_args_as_string'", ",", "'netsim'", ",", "'profile_dir'", ",", "'profile_type'", ",", "'show_stdout'", ",", "'synthetic_gesture_source_type'", ",", "]", "for", "o", "in", "browser_options_list", ":", "a", "=", "getattr", "(", "finder_options", ",", "o", ",", "None", ")", "if", "a", "is", "not", "None", ":", "setattr", "(", "self", ",", "o", ",", "a", ")", "delattr", "(", "finder_options", ",", "o", ")", "self", ".", "browser_type", "=", "finder_options", ".", "browser_type", "if", "hasattr", "(", "self", ",", "'extra_browser_args_as_string'", ")", ":", "# pylint: disable=E1101", "tmp", "=", "shlex", ".", "split", "(", "self", ".", "extra_browser_args_as_string", ")", "# pylint: disable=E1101", "self", ".", "AppendExtraBrowserArgs", "(", "tmp", ")", "delattr", "(", "self", ",", "'extra_browser_args_as_string'", ")", "if", "hasattr", "(", "self", ",", "'extra_wpr_args_as_string'", ")", ":", "# pylint: disable=E1101", "tmp", "=", "shlex", ".", "split", "(", "self", ".", "extra_wpr_args_as_string", ")", "# pylint: disable=E1101", "self", ".", "extra_wpr_args", ".", "extend", "(", "tmp", ")", "delattr", "(", "self", ",", "'extra_wpr_args_as_string'", ")", "if", "self", ".", "profile_type", "==", "'default'", ":", "self", ".", "dont_override_profile", "=", "True", "if", "self", ".", "profile_dir", "and", "self", ".", "profile_type", "!=", "'clean'", ":", "raise", "Exception", "(", "\"It's illegal to specify both --profile-type and\"", "\" --profile-dir.\"", ")", "if", "self", ".", "profile_dir", "and", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "profile_dir", ")", ":", "raise", "Exception", "(", "\"Directory specified by --profile-dir (%s) doesn't\"", "\" exist or isn't a directory.\"", "%", "(", "self", ".", "profile_dir", ")", ")", "if", "not", "self", ".", "profile_dir", ":", "self", ".", "profile_dir", "=", "profile_types", ".", "GetProfileDir", "(", "self", ".", "profile_type", ")", "# This deferred import is necessary because browser_options is imported in", "# telemetry/telemetry/__init__.py.", "from", "telemetry", ".", "core", ".", "backends", ".", "chrome", "import", "chrome_browser_options", "finder_options", ".", "browser_options", "=", "(", "chrome_browser_options", ".", "CreateChromeBrowserOptions", "(", "self", ")", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/telemetry/telemetry/core/browser_options.py#L286-L333
google/flatbuffers
b3006913369e0a7550795e477011ac5bebb93497
python/flatbuffers/table.py
python
Table.Offset
(self, vtableOffset)
return 0
Offset provides access into the Table's vtable. Deprecated fields are ignored by checking the vtable's length.
Offset provides access into the Table's vtable.
[ "Offset", "provides", "access", "into", "the", "Table", "s", "vtable", "." ]
def Offset(self, vtableOffset): """Offset provides access into the Table's vtable. Deprecated fields are ignored by checking the vtable's length.""" vtable = self.Pos - self.Get(N.SOffsetTFlags, self.Pos) vtableEnd = self.Get(N.VOffsetTFlags, vtable) if vtableOffset < vtableEnd: return self.Get(N.VOffsetTFlags, vtable + vtableOffset) return 0
[ "def", "Offset", "(", "self", ",", "vtableOffset", ")", ":", "vtable", "=", "self", ".", "Pos", "-", "self", ".", "Get", "(", "N", ".", "SOffsetTFlags", ",", "self", ".", "Pos", ")", "vtableEnd", "=", "self", ".", "Get", "(", "N", ".", "VOffsetTFlags", ",", "vtable", ")", "if", "vtableOffset", "<", "vtableEnd", ":", "return", "self", ".", "Get", "(", "N", ".", "VOffsetTFlags", ",", "vtable", "+", "vtableOffset", ")", "return", "0" ]
https://github.com/google/flatbuffers/blob/b3006913369e0a7550795e477011ac5bebb93497/python/flatbuffers/table.py#L32-L41
luliyucoordinate/Leetcode
96afcdc54807d1d184e881a075d1dbf3371e31fb
src/0087-Scramble-String/0087.py
python
Solution.isScramble
(self, s1, s2)
return False
:type s1: str :type s2: str :rtype: bool
:type s1: str :type s2: str :rtype: bool
[ ":", "type", "s1", ":", "str", ":", "type", "s2", ":", "str", ":", "rtype", ":", "bool" ]
def isScramble(self, s1, s2): """ :type s1: str :type s2: str :rtype: bool """ if len(s1) != len(s2): return False if s1 == s2: return True l1 = list(s1) l2 = list(s2) l1.sort() l2.sort() if l1 != l2: return False for i in range(1, len(s1)): if self.isScramble(s1[:i],s2[:i]) and self.isScramble(s1[i:],s2[i:]): return True if self.isScramble(s1[:i],s2[-i:]) and self.isScramble(s1[i:],s2[:-i]): return True return False
[ "def", "isScramble", "(", "self", ",", "s1", ",", "s2", ")", ":", "if", "len", "(", "s1", ")", "!=", "len", "(", "s2", ")", ":", "return", "False", "if", "s1", "==", "s2", ":", "return", "True", "l1", "=", "list", "(", "s1", ")", "l2", "=", "list", "(", "s2", ")", "l1", ".", "sort", "(", ")", "l2", ".", "sort", "(", ")", "if", "l1", "!=", "l2", ":", "return", "False", "for", "i", "in", "range", "(", "1", ",", "len", "(", "s1", ")", ")", ":", "if", "self", ".", "isScramble", "(", "s1", "[", ":", "i", "]", ",", "s2", "[", ":", "i", "]", ")", "and", "self", ".", "isScramble", "(", "s1", "[", "i", ":", "]", ",", "s2", "[", "i", ":", "]", ")", ":", "return", "True", "if", "self", ".", "isScramble", "(", "s1", "[", ":", "i", "]", ",", "s2", "[", "-", "i", ":", "]", ")", "and", "self", ".", "isScramble", "(", "s1", "[", "i", ":", "]", ",", "s2", "[", ":", "-", "i", "]", ")", ":", "return", "True", "return", "False" ]
https://github.com/luliyucoordinate/Leetcode/blob/96afcdc54807d1d184e881a075d1dbf3371e31fb/src/0087-Scramble-String/0087.py#L2-L27
apache/arrow
af33dd1157eb8d7d9bfac25ebf61445b793b7943
python/pyarrow/jvm.py
python
_from_jvm_float_type
(jvm_type)
Convert a JVM float type to its Python equivalent. Parameters ---------- jvm_type: org.apache.arrow.vector.types.pojo.ArrowType$FloatingPoint Returns ------- typ: pyarrow.DataType
Convert a JVM float type to its Python equivalent.
[ "Convert", "a", "JVM", "float", "type", "to", "its", "Python", "equivalent", "." ]
def _from_jvm_float_type(jvm_type): """ Convert a JVM float type to its Python equivalent. Parameters ---------- jvm_type: org.apache.arrow.vector.types.pojo.ArrowType$FloatingPoint Returns ------- typ: pyarrow.DataType """ precision = jvm_type.getPrecision().toString() if precision == 'HALF': return pa.float16() elif precision == 'SINGLE': return pa.float32() elif precision == 'DOUBLE': return pa.float64()
[ "def", "_from_jvm_float_type", "(", "jvm_type", ")", ":", "precision", "=", "jvm_type", ".", "getPrecision", "(", ")", ".", "toString", "(", ")", "if", "precision", "==", "'HALF'", ":", "return", "pa", ".", "float16", "(", ")", "elif", "precision", "==", "'SINGLE'", ":", "return", "pa", ".", "float32", "(", ")", "elif", "precision", "==", "'DOUBLE'", ":", "return", "pa", ".", "float64", "(", ")" ]
https://github.com/apache/arrow/blob/af33dd1157eb8d7d9bfac25ebf61445b793b7943/python/pyarrow/jvm.py#L106-L124
epiqc/ScaffCC
66a79944ee4cd116b27bc1a69137276885461db8
llvm/bindings/python/llvm/object.py
python
Relocation.expire
(self)
Expire this instance, making future API accesses fail.
Expire this instance, making future API accesses fail.
[ "Expire", "this", "instance", "making", "future", "API", "accesses", "fail", "." ]
def expire(self): """Expire this instance, making future API accesses fail.""" self.expired = True
[ "def", "expire", "(", "self", ")", ":", "self", ".", "expired", "=", "True" ]
https://github.com/epiqc/ScaffCC/blob/66a79944ee4cd116b27bc1a69137276885461db8/llvm/bindings/python/llvm/object.py#L413-L415
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
compiler-rt/lib/sanitizer_common/scripts/cpplint.py
python
_CppLintState.IncrementErrorCount
(self, category)
Bumps the module's error statistic.
Bumps the module's error statistic.
[ "Bumps", "the", "module", "s", "error", "statistic", "." ]
def IncrementErrorCount(self, category): """Bumps the module's error statistic.""" self.error_count += 1 if self.counting in ('toplevel', 'detailed'): if self.counting != 'detailed': category = category.split('/')[0] if category not in self.errors_by_category: self.errors_by_category[category] = 0 self.errors_by_category[category] += 1
[ "def", "IncrementErrorCount", "(", "self", ",", "category", ")", ":", "self", ".", "error_count", "+=", "1", "if", "self", ".", "counting", "in", "(", "'toplevel'", ",", "'detailed'", ")", ":", "if", "self", ".", "counting", "!=", "'detailed'", ":", "category", "=", "category", ".", "split", "(", "'/'", ")", "[", "0", "]", "if", "category", "not", "in", "self", ".", "errors_by_category", ":", "self", ".", "errors_by_category", "[", "category", "]", "=", "0", "self", ".", "errors_by_category", "[", "category", "]", "+=", "1" ]
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L943-L951
LiXizhi/NPLRuntime
a42720e5fe9a6960e0a9ce40bbbcd809192906be
Client/trunk/externals/assimp-4.0.0/port/PyAssimp/scripts/transformations.py
python
quaternion_about_axis
(angle, axis)
return quaternion
Return quaternion for rotation about axis. >>> q = quaternion_about_axis(0.123, (1, 0, 0)) >>> numpy.allclose(q, [0.06146124, 0, 0, 0.99810947]) True
Return quaternion for rotation about axis.
[ "Return", "quaternion", "for", "rotation", "about", "axis", "." ]
def quaternion_about_axis(angle, axis): """Return quaternion for rotation about axis. >>> q = quaternion_about_axis(0.123, (1, 0, 0)) >>> numpy.allclose(q, [0.06146124, 0, 0, 0.99810947]) True """ quaternion = numpy.zeros((4, ), dtype=numpy.float64) quaternion[:3] = axis[:3] qlen = vector_norm(quaternion) if qlen > _EPS: quaternion *= math.sin(angle/2.0) / qlen quaternion[3] = math.cos(angle/2.0) return quaternion
[ "def", "quaternion_about_axis", "(", "angle", ",", "axis", ")", ":", "quaternion", "=", "numpy", ".", "zeros", "(", "(", "4", ",", ")", ",", "dtype", "=", "numpy", ".", "float64", ")", "quaternion", "[", ":", "3", "]", "=", "axis", "[", ":", "3", "]", "qlen", "=", "vector_norm", "(", "quaternion", ")", "if", "qlen", ">", "_EPS", ":", "quaternion", "*=", "math", ".", "sin", "(", "angle", "/", "2.0", ")", "/", "qlen", "quaternion", "[", "3", "]", "=", "math", ".", "cos", "(", "angle", "/", "2.0", ")", "return", "quaternion" ]
https://github.com/LiXizhi/NPLRuntime/blob/a42720e5fe9a6960e0a9ce40bbbcd809192906be/Client/trunk/externals/assimp-4.0.0/port/PyAssimp/scripts/transformations.py#L1157-L1171
eclipse/sumo
7132a9b8b6eea734bdec38479026b4d8c4336d03
tools/traci/main.py
python
init
(port=8813, numRetries=tc.DEFAULT_NUM_RETRIES, host="localhost", label="default", proc=None, doSwitch=True)
return _connections[label].getVersion()
Establish a connection to a TraCI-Server and store it under the given label. This method is not thread-safe. It accesses the connection pool concurrently.
Establish a connection to a TraCI-Server and store it under the given label. This method is not thread-safe. It accesses the connection pool concurrently.
[ "Establish", "a", "connection", "to", "a", "TraCI", "-", "Server", "and", "store", "it", "under", "the", "given", "label", ".", "This", "method", "is", "not", "thread", "-", "safe", ".", "It", "accesses", "the", "connection", "pool", "concurrently", "." ]
def init(port=8813, numRetries=tc.DEFAULT_NUM_RETRIES, host="localhost", label="default", proc=None, doSwitch=True): """ Establish a connection to a TraCI-Server and store it under the given label. This method is not thread-safe. It accesses the connection pool concurrently. """ _connections[label] = connect(port, numRetries, host, proc) if doSwitch: switch(label) return _connections[label].getVersion()
[ "def", "init", "(", "port", "=", "8813", ",", "numRetries", "=", "tc", ".", "DEFAULT_NUM_RETRIES", ",", "host", "=", "\"localhost\"", ",", "label", "=", "\"default\"", ",", "proc", "=", "None", ",", "doSwitch", "=", "True", ")", ":", "_connections", "[", "label", "]", "=", "connect", "(", "port", ",", "numRetries", ",", "host", ",", "proc", ")", "if", "doSwitch", ":", "switch", "(", "label", ")", "return", "_connections", "[", "label", "]", ".", "getVersion", "(", ")" ]
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/traci/main.py#L131-L140
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/rospkg/distro.py
python
DvcsConfig.get_branch
(self, branch, anonymous)
:raises: :exc:`KeyError` Invalid branch parameter
:raises: :exc:`KeyError` Invalid branch parameter
[ ":", "raises", ":", ":", "exc", ":", "KeyError", "Invalid", "branch", "parameter" ]
def get_branch(self, branch, anonymous): """ :raises: :exc:`KeyError` Invalid branch parameter """ if branch == 'release-tar': return super(DvcsConfig, self).get_branch(branch, anonymous) elif branch == 'devel': version_tag = self.dev_branch elif branch == 'distro': version_tag = self.distro_tag elif branch == 'release': version_tag = self.release_tag else: raise ValueError("invalid branch spec [%s]"%(branch)) # occurs, for example, with unreleased stacks. Only devel is valid if version_tag is None: raise ValueError("branch [%s] is not available for this config"%(branch)) if anonymous: return self.anon_repo_uri, version_tag else: return self.repo_uri, version_tag
[ "def", "get_branch", "(", "self", ",", "branch", ",", "anonymous", ")", ":", "if", "branch", "==", "'release-tar'", ":", "return", "super", "(", "DvcsConfig", ",", "self", ")", ".", "get_branch", "(", "branch", ",", "anonymous", ")", "elif", "branch", "==", "'devel'", ":", "version_tag", "=", "self", ".", "dev_branch", "elif", "branch", "==", "'distro'", ":", "version_tag", "=", "self", ".", "distro_tag", "elif", "branch", "==", "'release'", ":", "version_tag", "=", "self", ".", "release_tag", "else", ":", "raise", "ValueError", "(", "\"invalid branch spec [%s]\"", "%", "(", "branch", ")", ")", "# occurs, for example, with unreleased stacks. Only devel is valid", "if", "version_tag", "is", "None", ":", "raise", "ValueError", "(", "\"branch [%s] is not available for this config\"", "%", "(", "branch", ")", ")", "if", "anonymous", ":", "return", "self", ".", "anon_repo_uri", ",", "version_tag", "else", ":", "return", "self", ".", "repo_uri", ",", "version_tag" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/rospkg/distro.py#L416-L436
klzgrad/naiveproxy
ed2c513637c77b18721fe428d7ed395b4d284c83
src/build/win/copy_cdb_to_output.py
python
_CopyCDBToOutput
(output_dir, target_arch)
return 0
Copies the Windows debugging executable cdb.exe to the output directory, which is created if it does not exist. The output directory, and target architecture that should be copied, are passed. Supported values for the target architecture are the GYP values "ia32", "x64", "arm64" and the GN values "x86", "x64", "arm64".
Copies the Windows debugging executable cdb.exe to the output directory, which is created if it does not exist. The output directory, and target architecture that should be copied, are passed. Supported values for the target architecture are the GYP values "ia32", "x64", "arm64" and the GN values "x86", "x64", "arm64".
[ "Copies", "the", "Windows", "debugging", "executable", "cdb", ".", "exe", "to", "the", "output", "directory", "which", "is", "created", "if", "it", "does", "not", "exist", ".", "The", "output", "directory", "and", "target", "architecture", "that", "should", "be", "copied", "are", "passed", ".", "Supported", "values", "for", "the", "target", "architecture", "are", "the", "GYP", "values", "ia32", "x64", "arm64", "and", "the", "GN", "values", "x86", "x64", "arm64", "." ]
def _CopyCDBToOutput(output_dir, target_arch): """Copies the Windows debugging executable cdb.exe to the output directory, which is created if it does not exist. The output directory, and target architecture that should be copied, are passed. Supported values for the target architecture are the GYP values "ia32", "x64", "arm64" and the GN values "x86", "x64", "arm64". """ _ConditionalMkdir(output_dir) vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs() # If WINDOWSSDKDIR is not set use the default SDK path. This will be the case # when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run. win_sdk_dir = os.path.normpath( os.environ.get('WINDOWSSDKDIR', os.path.expandvars('%ProgramFiles(x86)%' '\\Windows Kits\\10'))) if target_arch == 'ia32' or target_arch == 'x86': src_arch = 'x86' elif target_arch in ['x64', 'arm64']: src_arch = target_arch else: print('copy_cdb_to_output.py: unknown target_arch %s' % target_arch) sys.exit(1) # We need to copy multiple files, so cache the computed source directory. src_dir = os.path.join(win_sdk_dir, 'Debuggers', src_arch) # We need to copy some helper DLLs to get access to the !uniqstack # command to dump all threads' stacks. src_winext_dir = os.path.join(src_dir, 'winext') dst_winext_dir = os.path.join(output_dir, 'winext') src_winxp_dir = os.path.join(src_dir, 'winxp') dst_winxp_dir = os.path.join(output_dir, 'winxp') # Starting with the 10.0.17763 SDK the ucrt files are in a version-named # directory - this handles both cases. redist_dir = os.path.join(win_sdk_dir, 'Redist') version_dirs = glob.glob(os.path.join(redist_dir, '10.*')) if len(version_dirs) > 0: version_dirs.sort(reverse=True) redist_dir = version_dirs[0] src_crt_dir = os.path.join(redist_dir, 'ucrt', 'DLLs', src_arch) _ConditionalMkdir(dst_winext_dir) _ConditionalMkdir(dst_winxp_dir) # Note that the outputs from the "copy_cdb_to_output" target need to # be kept in sync with this list. _CopyImpl('cdb.exe', output_dir, src_dir) _CopyImpl('dbgeng.dll', output_dir, src_dir) _CopyImpl('dbghelp.dll', output_dir, src_dir) _CopyImpl('dbgmodel.dll', output_dir, src_dir) _CopyImpl('ext.dll', dst_winext_dir, src_winext_dir) _CopyImpl('uext.dll', dst_winext_dir, src_winext_dir) _CopyImpl('exts.dll', dst_winxp_dir, src_winxp_dir) _CopyImpl('ntsdexts.dll', dst_winxp_dir, src_winxp_dir) if src_arch in ['x64', 'x86']: # Copy all UCRT files from the debuggers directory, for compatibility with # the Windows 10 18362 SDK (one UCRT file) and later versions (two UCRT # files). The new file is api-ms-win-downlevel-kernel32-l2-1-0.dll and # should be added to the copy_cdb_to_output outputs when we require a newer # SDK. for file in glob.glob(os.path.join(src_dir, 'api-ms-win*.dll')): _CopyImpl(os.path.split(file)[1], output_dir, src_dir) _CopyImpl('ucrtbase.dll', output_dir, src_crt_dir) for dll_path in glob.glob(os.path.join(src_crt_dir, 'api-ms-win-*.dll')): _CopyImpl(os.path.split(dll_path)[1], output_dir, src_crt_dir) return 0
[ "def", "_CopyCDBToOutput", "(", "output_dir", ",", "target_arch", ")", ":", "_ConditionalMkdir", "(", "output_dir", ")", "vs_toolchain", ".", "SetEnvironmentAndGetRuntimeDllDirs", "(", ")", "# If WINDOWSSDKDIR is not set use the default SDK path. This will be the case", "# when DEPOT_TOOLS_WIN_TOOLCHAIN=0 and vcvarsall.bat has not been run.", "win_sdk_dir", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "environ", ".", "get", "(", "'WINDOWSSDKDIR'", ",", "os", ".", "path", ".", "expandvars", "(", "'%ProgramFiles(x86)%'", "'\\\\Windows Kits\\\\10'", ")", ")", ")", "if", "target_arch", "==", "'ia32'", "or", "target_arch", "==", "'x86'", ":", "src_arch", "=", "'x86'", "elif", "target_arch", "in", "[", "'x64'", ",", "'arm64'", "]", ":", "src_arch", "=", "target_arch", "else", ":", "print", "(", "'copy_cdb_to_output.py: unknown target_arch %s'", "%", "target_arch", ")", "sys", ".", "exit", "(", "1", ")", "# We need to copy multiple files, so cache the computed source directory.", "src_dir", "=", "os", ".", "path", ".", "join", "(", "win_sdk_dir", ",", "'Debuggers'", ",", "src_arch", ")", "# We need to copy some helper DLLs to get access to the !uniqstack", "# command to dump all threads' stacks.", "src_winext_dir", "=", "os", ".", "path", ".", "join", "(", "src_dir", ",", "'winext'", ")", "dst_winext_dir", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "'winext'", ")", "src_winxp_dir", "=", "os", ".", "path", ".", "join", "(", "src_dir", ",", "'winxp'", ")", "dst_winxp_dir", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "'winxp'", ")", "# Starting with the 10.0.17763 SDK the ucrt files are in a version-named", "# directory - this handles both cases.", "redist_dir", "=", "os", ".", "path", ".", "join", "(", "win_sdk_dir", ",", "'Redist'", ")", "version_dirs", "=", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "redist_dir", ",", "'10.*'", ")", ")", "if", "len", "(", "version_dirs", ")", ">", "0", ":", "version_dirs", ".", "sort", "(", "reverse", "=", "True", ")", "redist_dir", "=", "version_dirs", "[", "0", "]", "src_crt_dir", "=", "os", ".", "path", ".", "join", "(", "redist_dir", ",", "'ucrt'", ",", "'DLLs'", ",", "src_arch", ")", "_ConditionalMkdir", "(", "dst_winext_dir", ")", "_ConditionalMkdir", "(", "dst_winxp_dir", ")", "# Note that the outputs from the \"copy_cdb_to_output\" target need to", "# be kept in sync with this list.", "_CopyImpl", "(", "'cdb.exe'", ",", "output_dir", ",", "src_dir", ")", "_CopyImpl", "(", "'dbgeng.dll'", ",", "output_dir", ",", "src_dir", ")", "_CopyImpl", "(", "'dbghelp.dll'", ",", "output_dir", ",", "src_dir", ")", "_CopyImpl", "(", "'dbgmodel.dll'", ",", "output_dir", ",", "src_dir", ")", "_CopyImpl", "(", "'ext.dll'", ",", "dst_winext_dir", ",", "src_winext_dir", ")", "_CopyImpl", "(", "'uext.dll'", ",", "dst_winext_dir", ",", "src_winext_dir", ")", "_CopyImpl", "(", "'exts.dll'", ",", "dst_winxp_dir", ",", "src_winxp_dir", ")", "_CopyImpl", "(", "'ntsdexts.dll'", ",", "dst_winxp_dir", ",", "src_winxp_dir", ")", "if", "src_arch", "in", "[", "'x64'", ",", "'x86'", "]", ":", "# Copy all UCRT files from the debuggers directory, for compatibility with", "# the Windows 10 18362 SDK (one UCRT file) and later versions (two UCRT", "# files). The new file is api-ms-win-downlevel-kernel32-l2-1-0.dll and", "# should be added to the copy_cdb_to_output outputs when we require a newer", "# SDK.", "for", "file", "in", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "src_dir", ",", "'api-ms-win*.dll'", ")", ")", ":", "_CopyImpl", "(", "os", ".", "path", ".", "split", "(", "file", ")", "[", "1", "]", ",", "output_dir", ",", "src_dir", ")", "_CopyImpl", "(", "'ucrtbase.dll'", ",", "output_dir", ",", "src_crt_dir", ")", "for", "dll_path", "in", "glob", ".", "glob", "(", "os", ".", "path", ".", "join", "(", "src_crt_dir", ",", "'api-ms-win-*.dll'", ")", ")", ":", "_CopyImpl", "(", "os", ".", "path", ".", "split", "(", "dll_path", ")", "[", "1", "]", ",", "output_dir", ",", "src_crt_dir", ")", "return", "0" ]
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/build/win/copy_cdb_to_output.py#L54-L115
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/random.py
python
Random.expovariate
(self, lambd)
return -_log(1.0 - self.random())/lambd
Exponential distribution. lambd is 1.0 divided by the desired mean. It should be nonzero. (The parameter would be called "lambda", but that is a reserved word in Python.) Returned values range from 0 to positive infinity if lambd is positive, and from negative infinity to 0 if lambd is negative.
Exponential distribution.
[ "Exponential", "distribution", "." ]
def expovariate(self, lambd): """Exponential distribution. lambd is 1.0 divided by the desired mean. It should be nonzero. (The parameter would be called "lambda", but that is a reserved word in Python.) Returned values range from 0 to positive infinity if lambd is positive, and from negative infinity to 0 if lambd is negative. """ # lambd: rate lambd = 1/mean # ('lambda' is a Python reserved word) # we use 1-random() instead of random() to preclude the # possibility of taking the log of zero. return -_log(1.0 - self.random())/lambd
[ "def", "expovariate", "(", "self", ",", "lambd", ")", ":", "# lambd: rate lambd = 1/mean", "# ('lambda' is a Python reserved word)", "# we use 1-random() instead of random() to preclude the", "# possibility of taking the log of zero.", "return", "-", "_log", "(", "1.0", "-", "self", ".", "random", "(", ")", ")", "/", "lambd" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/random.py#L437-L452
wujian16/Cornell-MOE
df299d1be882d2af9796d7a68b3f9505cac7a53e
moe/optimal_learning/python/base_prior.py
python
TophatPrior.sample_from_prior
(self, n_samples)
return p0[:, np.newaxis]
Returns N samples from the prior. Parameters ---------- n_samples : int The number of samples that will be drawn. Returns ------- (N, D) np.array The samples from the prior.
Returns N samples from the prior.
[ "Returns", "N", "samples", "from", "the", "prior", "." ]
def sample_from_prior(self, n_samples): """ Returns N samples from the prior. Parameters ---------- n_samples : int The number of samples that will be drawn. Returns ------- (N, D) np.array The samples from the prior. """ p0 = self.min + self.rng.rand(n_samples) * (self.max - self.min) return p0[:, np.newaxis]
[ "def", "sample_from_prior", "(", "self", ",", "n_samples", ")", ":", "p0", "=", "self", ".", "min", "+", "self", ".", "rng", ".", "rand", "(", "n_samples", ")", "*", "(", "self", ".", "max", "-", "self", ".", "min", ")", "return", "p0", "[", ":", ",", "np", ".", "newaxis", "]" ]
https://github.com/wujian16/Cornell-MOE/blob/df299d1be882d2af9796d7a68b3f9505cac7a53e/moe/optimal_learning/python/base_prior.py#L125-L141
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/ops/data_flow_ops.py
python
QueueBase.__init__
(self, dtypes, shapes, names, queue_ref)
Constructs a queue object from a queue reference. The two optional lists, `shapes` and `names`, must be of the same length as `dtypes` if provided. The values at a given index `i` indicate the shape and name to use for the corresponding queue component in `dtypes`. Args: dtypes: A list of types. The length of dtypes must equal the number of tensors in each element. shapes: Constraints on the shapes of tensors in an element: A list of shape tuples or None. This list is the same length as dtypes. If the shape of any tensors in the element are constrained, all must be; shapes can be None if the shapes should not be constrained. names: Optional list of names. If provided, the `enqueue()` and `dequeue()` methods will use dictionaries with these names as keys. Must be None or a list or tuple of the same length as `dtypes`. queue_ref: The queue reference, i.e. the output of the queue op. Raises: ValueError: If one of the arguments is invalid.
Constructs a queue object from a queue reference.
[ "Constructs", "a", "queue", "object", "from", "a", "queue", "reference", "." ]
def __init__(self, dtypes, shapes, names, queue_ref): """Constructs a queue object from a queue reference. The two optional lists, `shapes` and `names`, must be of the same length as `dtypes` if provided. The values at a given index `i` indicate the shape and name to use for the corresponding queue component in `dtypes`. Args: dtypes: A list of types. The length of dtypes must equal the number of tensors in each element. shapes: Constraints on the shapes of tensors in an element: A list of shape tuples or None. This list is the same length as dtypes. If the shape of any tensors in the element are constrained, all must be; shapes can be None if the shapes should not be constrained. names: Optional list of names. If provided, the `enqueue()` and `dequeue()` methods will use dictionaries with these names as keys. Must be None or a list or tuple of the same length as `dtypes`. queue_ref: The queue reference, i.e. the output of the queue op. Raises: ValueError: If one of the arguments is invalid. """ self._dtypes = dtypes if shapes is not None: if len(shapes) != len(dtypes): raise ValueError("Queue shapes must have the same length as dtypes") self._shapes = [tensor_shape.TensorShape(s) for s in shapes] else: self._shapes = [tensor_shape.unknown_shape() for _ in self._dtypes] if names is not None: if len(names) != len(dtypes): raise ValueError("Queue names must have the same length as dtypes") self._names = names else: self._names = None self._queue_ref = queue_ref self._name = self._queue_ref.op.name.split("/")[-1]
[ "def", "__init__", "(", "self", ",", "dtypes", ",", "shapes", ",", "names", ",", "queue_ref", ")", ":", "self", ".", "_dtypes", "=", "dtypes", "if", "shapes", "is", "not", "None", ":", "if", "len", "(", "shapes", ")", "!=", "len", "(", "dtypes", ")", ":", "raise", "ValueError", "(", "\"Queue shapes must have the same length as dtypes\"", ")", "self", ".", "_shapes", "=", "[", "tensor_shape", ".", "TensorShape", "(", "s", ")", "for", "s", "in", "shapes", "]", "else", ":", "self", ".", "_shapes", "=", "[", "tensor_shape", ".", "unknown_shape", "(", ")", "for", "_", "in", "self", ".", "_dtypes", "]", "if", "names", "is", "not", "None", ":", "if", "len", "(", "names", ")", "!=", "len", "(", "dtypes", ")", ":", "raise", "ValueError", "(", "\"Queue names must have the same length as dtypes\"", ")", "self", ".", "_names", "=", "names", "else", ":", "self", ".", "_names", "=", "None", "self", ".", "_queue_ref", "=", "queue_ref", "self", ".", "_name", "=", "self", ".", "_queue_ref", ".", "op", ".", "name", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/ops/data_flow_ops.py#L122-L158
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/winpython/wppm.py
python
Distribution.install
(self, package)
Install package in distribution
Install package in distribution
[ "Install", "package", "in", "distribution" ]
def install(self, package): """Install package in distribution""" assert package.is_compatible_with(self) tmp_fname = None self.uninstall_existing(package) if package.fname.endswith(('.tar.gz', '.zip')): self._print(package, "Building") try: fname = utils.source_to_wininst(package.fname, python_exe=osp.join(self.target, 'python.exe'), architecture=self.architecture, verbose=self.verbose) except RuntimeError: if not self.verbose: print("Failed!") raise tmp_fname = fname package = Package(fname) self._print_done() bname = osp.basename(package.fname) if bname.endswith('.exe'): if re.match(r'(' + ('|'.join(self.NSIS_PACKAGES)) + r')-', bname): self.install_nsis_package(package) else: self.install_bdist_wininst(package) elif bname.endswith('.msi'): self.install_bdist_msi(package) self.handle_specific_packages(package) package.save_log(self.logdir) if tmp_fname is not None: os.remove(tmp_fname)
[ "def", "install", "(", "self", ",", "package", ")", ":", "assert", "package", ".", "is_compatible_with", "(", "self", ")", "tmp_fname", "=", "None", "self", ".", "uninstall_existing", "(", "package", ")", "if", "package", ".", "fname", ".", "endswith", "(", "(", "'.tar.gz'", ",", "'.zip'", ")", ")", ":", "self", ".", "_print", "(", "package", ",", "\"Building\"", ")", "try", ":", "fname", "=", "utils", ".", "source_to_wininst", "(", "package", ".", "fname", ",", "python_exe", "=", "osp", ".", "join", "(", "self", ".", "target", ",", "'python.exe'", ")", ",", "architecture", "=", "self", ".", "architecture", ",", "verbose", "=", "self", ".", "verbose", ")", "except", "RuntimeError", ":", "if", "not", "self", ".", "verbose", ":", "print", "(", "\"Failed!\"", ")", "raise", "tmp_fname", "=", "fname", "package", "=", "Package", "(", "fname", ")", "self", ".", "_print_done", "(", ")", "bname", "=", "osp", ".", "basename", "(", "package", ".", "fname", ")", "if", "bname", ".", "endswith", "(", "'.exe'", ")", ":", "if", "re", ".", "match", "(", "r'('", "+", "(", "'|'", ".", "join", "(", "self", ".", "NSIS_PACKAGES", ")", ")", "+", "r')-'", ",", "bname", ")", ":", "self", ".", "install_nsis_package", "(", "package", ")", "else", ":", "self", ".", "install_bdist_wininst", "(", "package", ")", "elif", "bname", ".", "endswith", "(", "'.msi'", ")", ":", "self", ".", "install_bdist_msi", "(", "package", ")", "self", ".", "handle_specific_packages", "(", "package", ")", "package", ".", "save_log", "(", "self", ".", "logdir", ")", "if", "tmp_fname", "is", "not", "None", ":", "os", ".", "remove", "(", "tmp_fname", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/winpython/wppm.py#L317-L346
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_misc.py
python
JoystickEvent.GetJoystick
(*args, **kwargs)
return _misc_.JoystickEvent_GetJoystick(*args, **kwargs)
GetJoystick(self) -> int
GetJoystick(self) -> int
[ "GetJoystick", "(", "self", ")", "-", ">", "int" ]
def GetJoystick(*args, **kwargs): """GetJoystick(self) -> int""" return _misc_.JoystickEvent_GetJoystick(*args, **kwargs)
[ "def", "GetJoystick", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "JoystickEvent_GetJoystick", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_misc.py#L2358-L2360
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/httplib2/upload-diffs.py
python
SubversionVCS._GetInfo
(self, key)
Parses 'svn info' for current dir. Returns value for key or None
Parses 'svn info' for current dir. Returns value for key or None
[ "Parses", "svn", "info", "for", "current", "dir", ".", "Returns", "value", "for", "key", "or", "None" ]
def _GetInfo(self, key): """Parses 'svn info' for current dir. Returns value for key or None""" for line in RunShell(["svn", "info"]).splitlines(): if line.startswith(key + ": "): return line.split(":", 1)[1].strip()
[ "def", "_GetInfo", "(", "self", ",", "key", ")", ":", "for", "line", "in", "RunShell", "(", "[", "\"svn\"", ",", "\"info\"", "]", ")", ".", "splitlines", "(", ")", ":", "if", "line", ".", "startswith", "(", "key", "+", "\": \"", ")", ":", "return", "line", ".", "split", "(", "\":\"", ",", "1", ")", "[", "1", "]", ".", "strip", "(", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/httplib2/upload-diffs.py#L1009-L1013
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py
python
BaseEventLoop.call_later
(self, delay, callback, *args, context=None)
return timer
Arrange for a callback to be called at a given time. Return a Handle: an opaque object with a cancel() method that can be used to cancel the call. The delay can be an int or float, expressed in seconds. It is always relative to the current time. Each callback will be called exactly once. If two callbacks are scheduled for exactly the same time, it undefined which will be called first. Any positional arguments after the callback will be passed to the callback when it is called.
Arrange for a callback to be called at a given time.
[ "Arrange", "for", "a", "callback", "to", "be", "called", "at", "a", "given", "time", "." ]
def call_later(self, delay, callback, *args, context=None): """Arrange for a callback to be called at a given time. Return a Handle: an opaque object with a cancel() method that can be used to cancel the call. The delay can be an int or float, expressed in seconds. It is always relative to the current time. Each callback will be called exactly once. If two callbacks are scheduled for exactly the same time, it undefined which will be called first. Any positional arguments after the callback will be passed to the callback when it is called. """ timer = self.call_at(self.time() + delay, callback, *args, context=context) if timer._source_traceback: del timer._source_traceback[-1] return timer
[ "def", "call_later", "(", "self", ",", "delay", ",", "callback", ",", "*", "args", ",", "context", "=", "None", ")", ":", "timer", "=", "self", ".", "call_at", "(", "self", ".", "time", "(", ")", "+", "delay", ",", "callback", ",", "*", "args", ",", "context", "=", "context", ")", "if", "timer", ".", "_source_traceback", ":", "del", "timer", ".", "_source_traceback", "[", "-", "1", "]", "return", "timer" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py#L643-L663
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py3/sklearn/pipeline.py
python
Pipeline.predict_proba
(self, X)
return self.steps[-1][-1].predict_proba(Xt)
Apply transforms, and predict_proba of the final estimator Parameters ---------- X : iterable Data to predict on. Must fulfill input requirements of first step of the pipeline. Returns ------- y_proba : array-like of shape (n_samples, n_classes)
Apply transforms, and predict_proba of the final estimator
[ "Apply", "transforms", "and", "predict_proba", "of", "the", "final", "estimator" ]
def predict_proba(self, X): """Apply transforms, and predict_proba of the final estimator Parameters ---------- X : iterable Data to predict on. Must fulfill input requirements of first step of the pipeline. Returns ------- y_proba : array-like of shape (n_samples, n_classes) """ Xt = X for _, name, transform in self._iter(with_final=False): Xt = transform.transform(Xt) return self.steps[-1][-1].predict_proba(Xt)
[ "def", "predict_proba", "(", "self", ",", "X", ")", ":", "Xt", "=", "X", "for", "_", ",", "name", ",", "transform", "in", "self", ".", "_iter", "(", "with_final", "=", "False", ")", ":", "Xt", "=", "transform", ".", "transform", "(", "Xt", ")", "return", "self", ".", "steps", "[", "-", "1", "]", "[", "-", "1", "]", ".", "predict_proba", "(", "Xt", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/pipeline.py#L456-L472
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/plistlib.py
python
_BinaryPlistParser._get_size
(self, tokenL)
return tokenL
return the size of the next object.
return the size of the next object.
[ "return", "the", "size", "of", "the", "next", "object", "." ]
def _get_size(self, tokenL): """ return the size of the next object.""" if tokenL == 0xF: m = self._fp.read(1)[0] & 0x3 s = 1 << m f = '>' + _BINARY_FORMAT[s] return struct.unpack(f, self._fp.read(s))[0] return tokenL
[ "def", "_get_size", "(", "self", ",", "tokenL", ")", ":", "if", "tokenL", "==", "0xF", ":", "m", "=", "self", ".", "_fp", ".", "read", "(", "1", ")", "[", "0", "]", "&", "0x3", "s", "=", "1", "<<", "m", "f", "=", "'>'", "+", "_BINARY_FORMAT", "[", "s", "]", "return", "struct", ".", "unpack", "(", "f", ",", "self", ".", "_fp", ".", "read", "(", "s", ")", ")", "[", "0", "]", "return", "tokenL" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/plistlib.py#L574-L582
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/symbol/symbol.py
python
Symbol.zeros_like
(self, *args, **kwargs)
return op.zeros_like(self, *args, **kwargs)
Convenience fluent method for :py:func:`zeros_like`. The arguments are the same as for :py:func:`zeros_like`, with this array as data.
Convenience fluent method for :py:func:`zeros_like`.
[ "Convenience", "fluent", "method", "for", ":", "py", ":", "func", ":", "zeros_like", "." ]
def zeros_like(self, *args, **kwargs): """Convenience fluent method for :py:func:`zeros_like`. The arguments are the same as for :py:func:`zeros_like`, with this array as data. """ return op.zeros_like(self, *args, **kwargs)
[ "def", "zeros_like", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "op", ".", "zeros_like", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/symbol.py#L1973-L1979
rsummers11/CADLab
976ed959a0b5208bb4173127a7ef732ac73a9b6f
MULAN_universal_lesion_analysis/maskrcnn/data/datasets/DeepLesion.py
python
DeepLesionDataset.loadinfo
(self, path)
load annotations and meta-info from DL_info.csv
load annotations and meta-info from DL_info.csv
[ "load", "annotations", "and", "meta", "-", "info", "from", "DL_info", ".", "csv" ]
def loadinfo(self, path): """load annotations and meta-info from DL_info.csv""" info = [] with open(path) as csvfile: reader = csv.reader(csvfile) for row in reader: filename = row[0] # replace the last _ in filename with / or \ idx = filename.rindex('_') row[0] = filename[:idx] + os.sep + filename[idx + 1:] info.append(row) info = info[1:] # the information not used in this project are commented self.filenames = np.array([row[0] for row in info]) # self.patient_idx = np.array([int(row[1]) for row in info]) # self.study_idx = np.array([int(row[2]) for row in info]) # self.series_idx = np.array([int(row[3]) for row in info]) self.slice_idx = np.array([int(row[4]) for row in info]) self.d_coordinate = np.array([[float(x) for x in row[5].split(',')] for row in info]) self.d_coordinate -= 1 self.boxes = np.array([[float(x) for x in row[6].split(',')] for row in info]) self.boxes -= 1 # coordinates in info file start from 1 self.diameter = np.array([[float(x) for x in row[7].split(',')] for row in info]) self.norm_location = np.array([[float(x) for x in row[8].split(',')] for row in info]) # self.type = np.array([int(row[9]) for row in info]) self.noisy = np.array([int(row[10]) > 0 for row in info]) # self.slice_range = np.array([[int(x) for x in row[11].split(',')] for row in info]) self.spacing3D = np.array([[float(x) for x in row[12].split(',')] for row in info]) self.spacing = self.spacing3D[:, 0] self.slice_intv = self.spacing3D[:, 2] # slice intervals # self.image_size = np.array([[int(x) for x in row[13].split(',')] for row in info]) self.DICOM_window = np.array([[float(x) for x in row[14].split(',')] for row in info]) self.gender = np.array([row[15] for row in info]) self.age = np.array([float(row[16]) for row in info]) # may be NaN self.train_val_test = np.array([int(row[17]) for row in info])
[ "def", "loadinfo", "(", "self", ",", "path", ")", ":", "info", "=", "[", "]", "with", "open", "(", "path", ")", "as", "csvfile", ":", "reader", "=", "csv", ".", "reader", "(", "csvfile", ")", "for", "row", "in", "reader", ":", "filename", "=", "row", "[", "0", "]", "# replace the last _ in filename with / or \\", "idx", "=", "filename", ".", "rindex", "(", "'_'", ")", "row", "[", "0", "]", "=", "filename", "[", ":", "idx", "]", "+", "os", ".", "sep", "+", "filename", "[", "idx", "+", "1", ":", "]", "info", ".", "append", "(", "row", ")", "info", "=", "info", "[", "1", ":", "]", "# the information not used in this project are commented", "self", ".", "filenames", "=", "np", ".", "array", "(", "[", "row", "[", "0", "]", "for", "row", "in", "info", "]", ")", "# self.patient_idx = np.array([int(row[1]) for row in info])", "# self.study_idx = np.array([int(row[2]) for row in info])", "# self.series_idx = np.array([int(row[3]) for row in info])", "self", ".", "slice_idx", "=", "np", ".", "array", "(", "[", "int", "(", "row", "[", "4", "]", ")", "for", "row", "in", "info", "]", ")", "self", ".", "d_coordinate", "=", "np", ".", "array", "(", "[", "[", "float", "(", "x", ")", "for", "x", "in", "row", "[", "5", "]", ".", "split", "(", "','", ")", "]", "for", "row", "in", "info", "]", ")", "self", ".", "d_coordinate", "-=", "1", "self", ".", "boxes", "=", "np", ".", "array", "(", "[", "[", "float", "(", "x", ")", "for", "x", "in", "row", "[", "6", "]", ".", "split", "(", "','", ")", "]", "for", "row", "in", "info", "]", ")", "self", ".", "boxes", "-=", "1", "# coordinates in info file start from 1", "self", ".", "diameter", "=", "np", ".", "array", "(", "[", "[", "float", "(", "x", ")", "for", "x", "in", "row", "[", "7", "]", ".", "split", "(", "','", ")", "]", "for", "row", "in", "info", "]", ")", "self", ".", "norm_location", "=", "np", ".", "array", "(", "[", "[", "float", "(", "x", ")", "for", "x", "in", "row", "[", "8", "]", ".", "split", "(", "','", ")", "]", "for", "row", "in", "info", "]", ")", "# self.type = np.array([int(row[9]) for row in info])", "self", ".", "noisy", "=", "np", ".", "array", "(", "[", "int", "(", "row", "[", "10", "]", ")", ">", "0", "for", "row", "in", "info", "]", ")", "# self.slice_range = np.array([[int(x) for x in row[11].split(',')] for row in info])", "self", ".", "spacing3D", "=", "np", ".", "array", "(", "[", "[", "float", "(", "x", ")", "for", "x", "in", "row", "[", "12", "]", ".", "split", "(", "','", ")", "]", "for", "row", "in", "info", "]", ")", "self", ".", "spacing", "=", "self", ".", "spacing3D", "[", ":", ",", "0", "]", "self", ".", "slice_intv", "=", "self", ".", "spacing3D", "[", ":", ",", "2", "]", "# slice intervals", "# self.image_size = np.array([[int(x) for x in row[13].split(',')] for row in info])", "self", ".", "DICOM_window", "=", "np", ".", "array", "(", "[", "[", "float", "(", "x", ")", "for", "x", "in", "row", "[", "14", "]", ".", "split", "(", "','", ")", "]", "for", "row", "in", "info", "]", ")", "self", ".", "gender", "=", "np", ".", "array", "(", "[", "row", "[", "15", "]", "for", "row", "in", "info", "]", ")", "self", ".", "age", "=", "np", ".", "array", "(", "[", "float", "(", "row", "[", "16", "]", ")", "for", "row", "in", "info", "]", ")", "# may be NaN", "self", ".", "train_val_test", "=", "np", ".", "array", "(", "[", "int", "(", "row", "[", "17", "]", ")", "for", "row", "in", "info", "]", ")" ]
https://github.com/rsummers11/CADLab/blob/976ed959a0b5208bb4173127a7ef732ac73a9b6f/MULAN_universal_lesion_analysis/maskrcnn/data/datasets/DeepLesion.py#L241-L275
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/jinja2/meta.py
python
TrackingCodeGenerator.write
(self, x)
Don't write.
Don't write.
[ "Don", "t", "write", "." ]
def write(self, x): """Don't write."""
[ "def", "write", "(", "self", ",", "x", ")", ":" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/jinja2/meta.py#L25-L26
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/configure.py
python
create_android_sdk_rule
(environ_cp)
Set Android variables and write Android SDK WORKSPACE rule.
Set Android variables and write Android SDK WORKSPACE rule.
[ "Set", "Android", "variables", "and", "write", "Android", "SDK", "WORKSPACE", "rule", "." ]
def create_android_sdk_rule(environ_cp): """Set Android variables and write Android SDK WORKSPACE rule.""" if is_windows() or is_cygwin(): default_sdk_path = cygpath('%s/Android/Sdk' % environ_cp['APPDATA']) elif is_macos(): default_sdk_path = '%s/library/Android/Sdk' % environ_cp['HOME'] else: default_sdk_path = '%s/Android/Sdk' % environ_cp['HOME'] def valid_sdk_path(path): return (os.path.exists(path) and os.path.exists(os.path.join(path, 'platforms')) and os.path.exists(os.path.join(path, 'build-tools'))) android_sdk_home_path = prompt_loop_or_load_from_env( environ_cp, var_name='ANDROID_SDK_HOME', var_default=default_sdk_path, ask_for_var='Please specify the home path of the Android SDK to use.', check_success=valid_sdk_path, error_msg=('Either %s does not exist, or it does not contain the ' 'subdirectories "platforms" and "build-tools".')) platforms = os.path.join(android_sdk_home_path, 'platforms') api_levels = sorted(os.listdir(platforms)) api_levels = [x.replace('android-', '') for x in api_levels] def valid_api_level(api_level): return os.path.exists( os.path.join(android_sdk_home_path, 'platforms', 'android-' + api_level)) android_api_level = prompt_loop_or_load_from_env( environ_cp, var_name='ANDROID_API_LEVEL', var_default=api_levels[-1], ask_for_var=('Please specify the Android SDK API level to use. ' '[Available levels: %s]') % api_levels, check_success=valid_api_level, error_msg='Android-%s is not present in the SDK path.') build_tools = os.path.join(android_sdk_home_path, 'build-tools') versions = sorted(os.listdir(build_tools)) def valid_build_tools(version): return os.path.exists( os.path.join(android_sdk_home_path, 'build-tools', version)) android_build_tools_version = prompt_loop_or_load_from_env( environ_cp, var_name='ANDROID_BUILD_TOOLS_VERSION', var_default=versions[-1], ask_for_var=('Please specify an Android build tools version to use. ' '[Available versions: %s]') % versions, check_success=valid_build_tools, error_msg=('The selected SDK does not have build-tools version %s ' 'available.')) write_action_env_to_bazelrc('ANDROID_BUILD_TOOLS_VERSION', android_build_tools_version) write_action_env_to_bazelrc('ANDROID_SDK_API_LEVEL', android_api_level) write_action_env_to_bazelrc('ANDROID_SDK_HOME', android_sdk_home_path)
[ "def", "create_android_sdk_rule", "(", "environ_cp", ")", ":", "if", "is_windows", "(", ")", "or", "is_cygwin", "(", ")", ":", "default_sdk_path", "=", "cygpath", "(", "'%s/Android/Sdk'", "%", "environ_cp", "[", "'APPDATA'", "]", ")", "elif", "is_macos", "(", ")", ":", "default_sdk_path", "=", "'%s/library/Android/Sdk'", "%", "environ_cp", "[", "'HOME'", "]", "else", ":", "default_sdk_path", "=", "'%s/Android/Sdk'", "%", "environ_cp", "[", "'HOME'", "]", "def", "valid_sdk_path", "(", "path", ")", ":", "return", "(", "os", ".", "path", ".", "exists", "(", "path", ")", "and", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'platforms'", ")", ")", "and", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "path", ",", "'build-tools'", ")", ")", ")", "android_sdk_home_path", "=", "prompt_loop_or_load_from_env", "(", "environ_cp", ",", "var_name", "=", "'ANDROID_SDK_HOME'", ",", "var_default", "=", "default_sdk_path", ",", "ask_for_var", "=", "'Please specify the home path of the Android SDK to use.'", ",", "check_success", "=", "valid_sdk_path", ",", "error_msg", "=", "(", "'Either %s does not exist, or it does not contain the '", "'subdirectories \"platforms\" and \"build-tools\".'", ")", ")", "platforms", "=", "os", ".", "path", ".", "join", "(", "android_sdk_home_path", ",", "'platforms'", ")", "api_levels", "=", "sorted", "(", "os", ".", "listdir", "(", "platforms", ")", ")", "api_levels", "=", "[", "x", ".", "replace", "(", "'android-'", ",", "''", ")", "for", "x", "in", "api_levels", "]", "def", "valid_api_level", "(", "api_level", ")", ":", "return", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "android_sdk_home_path", ",", "'platforms'", ",", "'android-'", "+", "api_level", ")", ")", "android_api_level", "=", "prompt_loop_or_load_from_env", "(", "environ_cp", ",", "var_name", "=", "'ANDROID_API_LEVEL'", ",", "var_default", "=", "api_levels", "[", "-", "1", "]", ",", "ask_for_var", "=", "(", "'Please specify the Android SDK API level to use. '", "'[Available levels: %s]'", ")", "%", "api_levels", ",", "check_success", "=", "valid_api_level", ",", "error_msg", "=", "'Android-%s is not present in the SDK path.'", ")", "build_tools", "=", "os", ".", "path", ".", "join", "(", "android_sdk_home_path", ",", "'build-tools'", ")", "versions", "=", "sorted", "(", "os", ".", "listdir", "(", "build_tools", ")", ")", "def", "valid_build_tools", "(", "version", ")", ":", "return", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "android_sdk_home_path", ",", "'build-tools'", ",", "version", ")", ")", "android_build_tools_version", "=", "prompt_loop_or_load_from_env", "(", "environ_cp", ",", "var_name", "=", "'ANDROID_BUILD_TOOLS_VERSION'", ",", "var_default", "=", "versions", "[", "-", "1", "]", ",", "ask_for_var", "=", "(", "'Please specify an Android build tools version to use. '", "'[Available versions: %s]'", ")", "%", "versions", ",", "check_success", "=", "valid_build_tools", ",", "error_msg", "=", "(", "'The selected SDK does not have build-tools version %s '", "'available.'", ")", ")", "write_action_env_to_bazelrc", "(", "'ANDROID_BUILD_TOOLS_VERSION'", ",", "android_build_tools_version", ")", "write_action_env_to_bazelrc", "(", "'ANDROID_SDK_API_LEVEL'", ",", "android_api_level", ")", "write_action_env_to_bazelrc", "(", "'ANDROID_SDK_HOME'", ",", "android_sdk_home_path", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/configure.py#L713-L774
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/datetime.py
python
time.dst
(self)
return offset
Return 0 if DST is not in effect, or the DST offset (as timedelta positive eastward) if DST is in effect. This is purely informational; the DST offset has already been added to the UTC offset returned by utcoffset() if applicable, so there's no need to consult dst() unless you're interested in displaying the DST info.
Return 0 if DST is not in effect, or the DST offset (as timedelta positive eastward) if DST is in effect.
[ "Return", "0", "if", "DST", "is", "not", "in", "effect", "or", "the", "DST", "offset", "(", "as", "timedelta", "positive", "eastward", ")", "if", "DST", "is", "in", "effect", "." ]
def dst(self): """Return 0 if DST is not in effect, or the DST offset (as timedelta positive eastward) if DST is in effect. This is purely informational; the DST offset has already been added to the UTC offset returned by utcoffset() if applicable, so there's no need to consult dst() unless you're interested in displaying the DST info. """ if self._tzinfo is None: return None offset = self._tzinfo.dst(None) _check_utc_offset("dst", offset) return offset
[ "def", "dst", "(", "self", ")", ":", "if", "self", ".", "_tzinfo", "is", "None", ":", "return", "None", "offset", "=", "self", ".", "_tzinfo", ".", "dst", "(", "None", ")", "_check_utc_offset", "(", "\"dst\"", ",", "offset", ")", "return", "offset" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/datetime.py#L1437-L1450
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/decimal.py
python
Decimal.is_subnormal
(self, context=None)
return self.adjusted() < context.Emin
Return True if self is subnormal; otherwise return False.
Return True if self is subnormal; otherwise return False.
[ "Return", "True", "if", "self", "is", "subnormal", ";", "otherwise", "return", "False", "." ]
def is_subnormal(self, context=None): """Return True if self is subnormal; otherwise return False.""" if self._is_special or not self: return False if context is None: context = getcontext() return self.adjusted() < context.Emin
[ "def", "is_subnormal", "(", "self", ",", "context", "=", "None", ")", ":", "if", "self", ".", "_is_special", "or", "not", "self", ":", "return", "False", "if", "context", "is", "None", ":", "context", "=", "getcontext", "(", ")", "return", "self", ".", "adjusted", "(", ")", "<", "context", ".", "Emin" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/decimal.py#L3051-L3057
zhaoweicai/mscnn
534bcac5710a579d60827f192035f7eef6d8c585
scripts/cpp_lint.py
python
CheckForCopyright
(filename, lines, error)
Logs an error if a Copyright message appears at the top of the file.
Logs an error if a Copyright message appears at the top of the file.
[ "Logs", "an", "error", "if", "a", "Copyright", "message", "appears", "at", "the", "top", "of", "the", "file", "." ]
def CheckForCopyright(filename, lines, error): """Logs an error if a Copyright message appears at the top of the file.""" # We'll check up to line 10. Don't forget there's a # dummy line at the front. for line in xrange(1, min(len(lines), 11)): if _RE_COPYRIGHT.search(lines[line], re.I): error(filename, 0, 'legal/copyright', 5, 'Copyright message found. ' 'You should not include a copyright line.')
[ "def", "CheckForCopyright", "(", "filename", ",", "lines", ",", "error", ")", ":", "# We'll check up to line 10. Don't forget there's a", "# dummy line at the front.", "for", "line", "in", "xrange", "(", "1", ",", "min", "(", "len", "(", "lines", ")", ",", "11", ")", ")", ":", "if", "_RE_COPYRIGHT", ".", "search", "(", "lines", "[", "line", "]", ",", "re", ".", "I", ")", ":", "error", "(", "filename", ",", "0", ",", "'legal/copyright'", ",", "5", ",", "'Copyright message found. '", "'You should not include a copyright line.'", ")" ]
https://github.com/zhaoweicai/mscnn/blob/534bcac5710a579d60827f192035f7eef6d8c585/scripts/cpp_lint.py#L1372-L1381
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/external/xgboost/subtree/rabit/tracker/rabit_tracker.py
python
Tracker.get_link_map
(self, nslave)
return tree_map_, parent_map_, ring_map_
get the link map, this is a bit hacky, call for better algorithm to place similar nodes together
get the link map, this is a bit hacky, call for better algorithm to place similar nodes together
[ "get", "the", "link", "map", "this", "is", "a", "bit", "hacky", "call", "for", "better", "algorithm", "to", "place", "similar", "nodes", "together" ]
def get_link_map(self, nslave): """ get the link map, this is a bit hacky, call for better algorithm to place similar nodes together """ tree_map, parent_map = self.get_tree(nslave) ring_map = self.get_ring(tree_map, parent_map) rmap = {0 : 0} k = 0 for i in range(nslave - 1): k = ring_map[k][1] rmap[k] = i + 1 ring_map_ = {} tree_map_ = {} parent_map_ ={} for k, v in ring_map.items(): ring_map_[rmap[k]] = (rmap[v[0]], rmap[v[1]]) for k, v in tree_map.items(): tree_map_[rmap[k]] = [rmap[x] for x in v] for k, v in parent_map.items(): if k != 0: parent_map_[rmap[k]] = rmap[v] else: parent_map_[rmap[k]] = -1 return tree_map_, parent_map_, ring_map_
[ "def", "get_link_map", "(", "self", ",", "nslave", ")", ":", "tree_map", ",", "parent_map", "=", "self", ".", "get_tree", "(", "nslave", ")", "ring_map", "=", "self", ".", "get_ring", "(", "tree_map", ",", "parent_map", ")", "rmap", "=", "{", "0", ":", "0", "}", "k", "=", "0", "for", "i", "in", "range", "(", "nslave", "-", "1", ")", ":", "k", "=", "ring_map", "[", "k", "]", "[", "1", "]", "rmap", "[", "k", "]", "=", "i", "+", "1", "ring_map_", "=", "{", "}", "tree_map_", "=", "{", "}", "parent_map_", "=", "{", "}", "for", "k", ",", "v", "in", "ring_map", ".", "items", "(", ")", ":", "ring_map_", "[", "rmap", "[", "k", "]", "]", "=", "(", "rmap", "[", "v", "[", "0", "]", "]", ",", "rmap", "[", "v", "[", "1", "]", "]", ")", "for", "k", ",", "v", "in", "tree_map", ".", "items", "(", ")", ":", "tree_map_", "[", "rmap", "[", "k", "]", "]", "=", "[", "rmap", "[", "x", "]", "for", "x", "in", "v", "]", "for", "k", ",", "v", "in", "parent_map", ".", "items", "(", ")", ":", "if", "k", "!=", "0", ":", "parent_map_", "[", "rmap", "[", "k", "]", "]", "=", "rmap", "[", "v", "]", "else", ":", "parent_map_", "[", "rmap", "[", "k", "]", "]", "=", "-", "1", "return", "tree_map_", ",", "parent_map_", ",", "ring_map_" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/xgboost/subtree/rabit/tracker/rabit_tracker.py#L208-L233
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_controls.py
python
PrePyPickerBase
(*args, **kwargs)
return val
PrePyPickerBase() -> PyPickerBase
PrePyPickerBase() -> PyPickerBase
[ "PrePyPickerBase", "()", "-", ">", "PyPickerBase" ]
def PrePyPickerBase(*args, **kwargs): """PrePyPickerBase() -> PyPickerBase""" val = _controls_.new_PrePyPickerBase(*args, **kwargs) return val
[ "def", "PrePyPickerBase", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "val", "=", "_controls_", ".", "new_PrePyPickerBase", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "val" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L6905-L6908
eventql/eventql
7ca0dbb2e683b525620ea30dc40540a22d5eb227
deps/3rdparty/spidermonkey/mozjs/python/psutil/psutil/_pssunos.py
python
cpu_count_physical
()
return cext.cpu_count_phys()
Return the number of physical CPUs in the system.
Return the number of physical CPUs in the system.
[ "Return", "the", "number", "of", "physical", "CPUs", "in", "the", "system", "." ]
def cpu_count_physical(): """Return the number of physical CPUs in the system.""" return cext.cpu_count_phys()
[ "def", "cpu_count_physical", "(", ")", ":", "return", "cext", ".", "cpu_count_phys", "(", ")" ]
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/psutil/psutil/_pssunos.py#L149-L151
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/external/coremltools_wrap/coremltools/coremltools/converters/keras/_layers.py
python
convert_flatten
(builder, layer, input_names, output_names, keras_layer)
Convert a flatten layer from keras to coreml. Parameters keras_layer: layer ---------- A keras layer object. builder: NeuralNetworkBuilder A neural network builder object.
Convert a flatten layer from keras to coreml.
[ "Convert", "a", "flatten", "layer", "from", "keras", "to", "coreml", "." ]
def convert_flatten(builder, layer, input_names, output_names, keras_layer): """Convert a flatten layer from keras to coreml. Parameters keras_layer: layer ---------- A keras layer object. builder: NeuralNetworkBuilder A neural network builder object. """ input_name, output_name = (input_names[0], output_names[0]) # blob_order == 0 if the input blob needs not be rearranged # blob_order == 1 if the input blob needs to be rearranged blob_order = 0 # using keras_layer.input.shape have a "?" (Dimension[None] at the front), # making a 3D tensor with unknown batch size 4D if len(keras_layer.input.shape) == 4: blob_order = 1 builder.add_flatten( name=layer, mode=blob_order, input_name=input_name, output_name=output_name )
[ "def", "convert_flatten", "(", "builder", ",", "layer", ",", "input_names", ",", "output_names", ",", "keras_layer", ")", ":", "input_name", ",", "output_name", "=", "(", "input_names", "[", "0", "]", ",", "output_names", "[", "0", "]", ")", "# blob_order == 0 if the input blob needs not be rearranged", "# blob_order == 1 if the input blob needs to be rearranged", "blob_order", "=", "0", "# using keras_layer.input.shape have a \"?\" (Dimension[None] at the front),", "# making a 3D tensor with unknown batch size 4D", "if", "len", "(", "keras_layer", ".", "input", ".", "shape", ")", "==", "4", ":", "blob_order", "=", "1", "builder", ".", "add_flatten", "(", "name", "=", "layer", ",", "mode", "=", "blob_order", ",", "input_name", "=", "input_name", ",", "output_name", "=", "output_name", ")" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/coremltools/converters/keras/_layers.py#L983-L1007
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pkg_resources/__init__.py
python
get_default_cache
()
return ( os.environ.get('PYTHON_EGG_CACHE') or appdirs.user_cache_dir(appname='Python-Eggs') )
Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs".
Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs".
[ "Return", "the", "PYTHON_EGG_CACHE", "environment", "variable", "or", "a", "platform", "-", "relevant", "user", "cache", "dir", "for", "an", "app", "named", "Python", "-", "Eggs", "." ]
def get_default_cache(): """ Return the ``PYTHON_EGG_CACHE`` environment variable or a platform-relevant user cache dir for an app named "Python-Eggs". """ return ( os.environ.get('PYTHON_EGG_CACHE') or appdirs.user_cache_dir(appname='Python-Eggs') )
[ "def", "get_default_cache", "(", ")", ":", "return", "(", "os", ".", "environ", ".", "get", "(", "'PYTHON_EGG_CACHE'", ")", "or", "appdirs", ".", "user_cache_dir", "(", "appname", "=", "'Python-Eggs'", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pkg_resources/__init__.py#L1307-L1316
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2class.py
python
createInputBuffer
(file, encoding)
return inputBuffer(_obj=ret)
Create a libxml2 input buffer from a Python file
Create a libxml2 input buffer from a Python file
[ "Create", "a", "libxml2", "input", "buffer", "from", "a", "Python", "file" ]
def createInputBuffer(file, encoding): """Create a libxml2 input buffer from a Python file """ ret = libxml2mod.xmlCreateInputBuffer(file, encoding) if ret is None:raise treeError('xmlCreateInputBuffer() failed') return inputBuffer(_obj=ret)
[ "def", "createInputBuffer", "(", "file", ",", "encoding", ")", ":", "ret", "=", "libxml2mod", ".", "xmlCreateInputBuffer", "(", "file", ",", "encoding", ")", "if", "ret", "is", "None", ":", "raise", "treeError", "(", "'xmlCreateInputBuffer() failed'", ")", "return", "inputBuffer", "(", "_obj", "=", "ret", ")" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L771-L775
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/python2_version/klampt/model/trajectory.py
python
Trajectory.discretize
(self,dt)
return self.discretize_state(dt)
Returns a trajectory, uniformly discretized at resolution dt, and with state-space the same as its configuration space. Similar to discretize, but if the state space is of higher dimension (e.g., Hermite trajectories) this projects to a piecewise linear trajectory.
Returns a trajectory, uniformly discretized at resolution dt, and with state-space the same as its configuration space. Similar to discretize, but if the state space is of higher dimension (e.g., Hermite trajectories) this projects to a piecewise linear trajectory.
[ "Returns", "a", "trajectory", "uniformly", "discretized", "at", "resolution", "dt", "and", "with", "state", "-", "space", "the", "same", "as", "its", "configuration", "space", ".", "Similar", "to", "discretize", "but", "if", "the", "state", "space", "is", "of", "higher", "dimension", "(", "e", ".", "g", ".", "Hermite", "trajectories", ")", "this", "projects", "to", "a", "piecewise", "linear", "trajectory", "." ]
def discretize(self,dt): """Returns a trajectory, uniformly discretized at resolution dt, and with state-space the same as its configuration space. Similar to discretize, but if the state space is of higher dimension (e.g., Hermite trajectories) this projects to a piecewise linear trajectory. """ return self.discretize_state(dt)
[ "def", "discretize", "(", "self", ",", "dt", ")", ":", "return", "self", ".", "discretize_state", "(", "dt", ")" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/model/trajectory.py#L390-L396
emscripten-core/emscripten
0d413d3c5af8b28349682496edc14656f5700c2f
third_party/ply/example/ansic/cparse.py
python
p_relational_expression_2
(t)
relational_expression : relational_expression LT shift_expression
relational_expression : relational_expression LT shift_expression
[ "relational_expression", ":", "relational_expression", "LT", "shift_expression" ]
def p_relational_expression_2(t): 'relational_expression : relational_expression LT shift_expression' pass
[ "def", "p_relational_expression_2", "(", "t", ")", ":", "pass" ]
https://github.com/emscripten-core/emscripten/blob/0d413d3c5af8b28349682496edc14656f5700c2f/third_party/ply/example/ansic/cparse.py#L685-L687
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/feature_extraction/image.py
python
img_to_graph
(img, mask=None, return_as=sparse.coo_matrix, dtype=None)
return _to_graph(n_x, n_y, n_z, mask, img, return_as, dtype)
Graph of the pixel-to-pixel gradient connections Edges are weighted with the gradient values. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- img : ndarray, 2D or 3D 2D or 3D image mask : ndarray of booleans, optional An optional mask of the image, to consider only part of the pixels. return_as : np.ndarray or a sparse matrix class, optional The class to use to build the returned adjacency matrix. dtype : None or dtype, optional The data of the returned sparse matrix. By default it is the dtype of img Notes ----- For scikit-learn versions 0.14.1 and prior, return_as=np.ndarray was handled by returning a dense np.matrix instance. Going forward, np.ndarray returns an np.ndarray, as expected. For compatibility, user code relying on this method should wrap its calls in ``np.asarray`` to avoid type issues.
Graph of the pixel-to-pixel gradient connections
[ "Graph", "of", "the", "pixel", "-", "to", "-", "pixel", "gradient", "connections" ]
def img_to_graph(img, mask=None, return_as=sparse.coo_matrix, dtype=None): """Graph of the pixel-to-pixel gradient connections Edges are weighted with the gradient values. Read more in the :ref:`User Guide <image_feature_extraction>`. Parameters ---------- img : ndarray, 2D or 3D 2D or 3D image mask : ndarray of booleans, optional An optional mask of the image, to consider only part of the pixels. return_as : np.ndarray or a sparse matrix class, optional The class to use to build the returned adjacency matrix. dtype : None or dtype, optional The data of the returned sparse matrix. By default it is the dtype of img Notes ----- For scikit-learn versions 0.14.1 and prior, return_as=np.ndarray was handled by returning a dense np.matrix instance. Going forward, np.ndarray returns an np.ndarray, as expected. For compatibility, user code relying on this method should wrap its calls in ``np.asarray`` to avoid type issues. """ img = np.atleast_3d(img) n_x, n_y, n_z = img.shape return _to_graph(n_x, n_y, n_z, mask, img, return_as, dtype)
[ "def", "img_to_graph", "(", "img", ",", "mask", "=", "None", ",", "return_as", "=", "sparse", ".", "coo_matrix", ",", "dtype", "=", "None", ")", ":", "img", "=", "np", ".", "atleast_3d", "(", "img", ")", "n_x", ",", "n_y", ",", "n_z", "=", "img", ".", "shape", "return", "_to_graph", "(", "n_x", ",", "n_y", ",", "n_z", ",", "mask", ",", "img", ",", "return_as", ",", "dtype", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/feature_extraction/image.py#L133-L164
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/sqs/queue.py
python
Queue.delete_message_batch
(self, messages)
return self.connection.delete_message_batch(self, messages)
Deletes a list of messages in a single request. :type messages: List of :class:`boto.sqs.message.Message` objects. :param messages: A list of message objects.
Deletes a list of messages in a single request.
[ "Deletes", "a", "list", "of", "messages", "in", "a", "single", "request", "." ]
def delete_message_batch(self, messages): """ Deletes a list of messages in a single request. :type messages: List of :class:`boto.sqs.message.Message` objects. :param messages: A list of message objects. """ return self.connection.delete_message_batch(self, messages)
[ "def", "delete_message_batch", "(", "self", ",", "messages", ")", ":", "return", "self", ".", "connection", ".", "delete_message_batch", "(", "self", ",", "messages", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/sqs/queue.py#L316-L323
carla-simulator/carla
8854804f4d7748e14d937ec763a2912823a7e5f5
PythonAPI/carla/agents/navigation/behavior_agent.py
python
BehaviorAgent.car_following_manager
(self, vehicle, distance, debug=False)
return control
Module in charge of car-following behaviors when there's someone in front of us. :param vehicle: car to follow :param distance: distance from vehicle :param debug: boolean for debugging :return control: carla.VehicleControl
Module in charge of car-following behaviors when there's someone in front of us.
[ "Module", "in", "charge", "of", "car", "-", "following", "behaviors", "when", "there", "s", "someone", "in", "front", "of", "us", "." ]
def car_following_manager(self, vehicle, distance, debug=False): """ Module in charge of car-following behaviors when there's someone in front of us. :param vehicle: car to follow :param distance: distance from vehicle :param debug: boolean for debugging :return control: carla.VehicleControl """ vehicle_speed = get_speed(vehicle) delta_v = max(1, (self._speed - vehicle_speed) / 3.6) ttc = distance / delta_v if delta_v != 0 else distance / np.nextafter(0., 1.) # Under safety time distance, slow down. if self._behavior.safety_time > ttc > 0.0: target_speed = min([ positive(vehicle_speed - self._behavior.speed_decrease), self._behavior.max_speed, self._speed_limit - self._behavior.speed_lim_dist]) self._local_planner.set_speed(target_speed) control = self._local_planner.run_step(debug=debug) # Actual safety distance area, try to follow the speed of the vehicle in front. elif 2 * self._behavior.safety_time > ttc >= self._behavior.safety_time: target_speed = min([ max(self._min_speed, vehicle_speed), self._behavior.max_speed, self._speed_limit - self._behavior.speed_lim_dist]) self._local_planner.set_speed(target_speed) control = self._local_planner.run_step(debug=debug) # Normal behavior. else: target_speed = min([ self._behavior.max_speed, self._speed_limit - self._behavior.speed_lim_dist]) self._local_planner.set_speed(target_speed) control = self._local_planner.run_step(debug=debug) return control
[ "def", "car_following_manager", "(", "self", ",", "vehicle", ",", "distance", ",", "debug", "=", "False", ")", ":", "vehicle_speed", "=", "get_speed", "(", "vehicle", ")", "delta_v", "=", "max", "(", "1", ",", "(", "self", ".", "_speed", "-", "vehicle_speed", ")", "/", "3.6", ")", "ttc", "=", "distance", "/", "delta_v", "if", "delta_v", "!=", "0", "else", "distance", "/", "np", ".", "nextafter", "(", "0.", ",", "1.", ")", "# Under safety time distance, slow down.", "if", "self", ".", "_behavior", ".", "safety_time", ">", "ttc", ">", "0.0", ":", "target_speed", "=", "min", "(", "[", "positive", "(", "vehicle_speed", "-", "self", ".", "_behavior", ".", "speed_decrease", ")", ",", "self", ".", "_behavior", ".", "max_speed", ",", "self", ".", "_speed_limit", "-", "self", ".", "_behavior", ".", "speed_lim_dist", "]", ")", "self", ".", "_local_planner", ".", "set_speed", "(", "target_speed", ")", "control", "=", "self", ".", "_local_planner", ".", "run_step", "(", "debug", "=", "debug", ")", "# Actual safety distance area, try to follow the speed of the vehicle in front.", "elif", "2", "*", "self", ".", "_behavior", ".", "safety_time", ">", "ttc", ">=", "self", ".", "_behavior", ".", "safety_time", ":", "target_speed", "=", "min", "(", "[", "max", "(", "self", ".", "_min_speed", ",", "vehicle_speed", ")", ",", "self", ".", "_behavior", ".", "max_speed", ",", "self", ".", "_speed_limit", "-", "self", ".", "_behavior", ".", "speed_lim_dist", "]", ")", "self", ".", "_local_planner", ".", "set_speed", "(", "target_speed", ")", "control", "=", "self", ".", "_local_planner", ".", "run_step", "(", "debug", "=", "debug", ")", "# Normal behavior.", "else", ":", "target_speed", "=", "min", "(", "[", "self", ".", "_behavior", ".", "max_speed", ",", "self", ".", "_speed_limit", "-", "self", ".", "_behavior", ".", "speed_lim_dist", "]", ")", "self", ".", "_local_planner", ".", "set_speed", "(", "target_speed", ")", "control", "=", "self", ".", "_local_planner", ".", "run_step", "(", "debug", "=", "debug", ")", "return", "control" ]
https://github.com/carla-simulator/carla/blob/8854804f4d7748e14d937ec763a2912823a7e5f5/PythonAPI/carla/agents/navigation/behavior_agent.py#L197-L238
yyzybb537/libgo
4af17b7c67643c4d54aa354dcc77963ea07847d0
third_party/boost.context/tools/build/src/build/targets.py
python
AbstractTarget.location
(self)
return self.location_
Return the location where the target was declared.
Return the location where the target was declared.
[ "Return", "the", "location", "where", "the", "target", "was", "declared", "." ]
def location (self): """ Return the location where the target was declared. """ return self.location_
[ "def", "location", "(", "self", ")", ":", "return", "self", ".", "location_" ]
https://github.com/yyzybb537/libgo/blob/4af17b7c67643c4d54aa354dcc77963ea07847d0/third_party/boost.context/tools/build/src/build/targets.py#L325-L328
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/boto3/dynamodb/conditions.py
python
AttributeBase.gte
(self, value)
return GreaterThanEquals(self, value)
Creates a condition where the attribute is greater than or equal to the value. :param value: The value that the attribute is greater than or equal to.
Creates a condition where the attribute is greater than or equal to the value.
[ "Creates", "a", "condition", "where", "the", "attribute", "is", "greater", "than", "or", "equal", "to", "the", "value", "." ]
def gte(self, value): """Creates a condition where the attribute is greater than or equal to the value. :param value: The value that the attribute is greater than or equal to. """ return GreaterThanEquals(self, value)
[ "def", "gte", "(", "self", ",", "value", ")", ":", "return", "GreaterThanEquals", "(", "self", ",", "value", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/boto3/dynamodb/conditions.py#L103-L109
gabyx/ApproxMVBB
838f3ff7690a938f1e4199a5f41b6feefc32a603
example/kdTreeFiltering/python/Tools/Transformations/Transformations.py
python
quaternion_from_euler
(ai, aj, ak, axes='sxyz')
return q
Return quaternion from Euler angles and axis sequence. ai, aj, ak : Euler's roll, pitch and yaw angles axes : One of 24 axis sequences as string or encoded tuple >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') >>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435]) True
Return quaternion from Euler angles and axis sequence.
[ "Return", "quaternion", "from", "Euler", "angles", "and", "axis", "sequence", "." ]
def quaternion_from_euler(ai, aj, ak, axes='sxyz'): """Return quaternion from Euler angles and axis sequence. ai, aj, ak : Euler's roll, pitch and yaw angles axes : One of 24 axis sequences as string or encoded tuple >>> q = quaternion_from_euler(1, 2, 3, 'ryxz') >>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435]) True """ try: firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()] except (AttributeError, KeyError): _TUPLE2AXES[axes] # validation firstaxis, parity, repetition, frame = axes i = firstaxis + 1 j = _NEXT_AXIS[i+parity-1] + 1 k = _NEXT_AXIS[i-parity] + 1 if frame: ai, ak = ak, ai if parity: aj = -aj ai /= 2.0 aj /= 2.0 ak /= 2.0 ci = math.cos(ai) si = math.sin(ai) cj = math.cos(aj) sj = math.sin(aj) ck = math.cos(ak) sk = math.sin(ak) cc = ci*ck cs = ci*sk sc = si*ck ss = si*sk q = numpy.empty((4, )) if repetition: q[0] = cj*(cc - ss) q[i] = cj*(cs + sc) q[j] = sj*(cc + ss) q[k] = sj*(cs - sc) else: q[0] = cj*cc + sj*ss q[i] = cj*sc - sj*cs q[j] = cj*ss + sj*cc q[k] = cj*cs - sj*sc if parity: q[j] *= -1.0 return q
[ "def", "quaternion_from_euler", "(", "ai", ",", "aj", ",", "ak", ",", "axes", "=", "'sxyz'", ")", ":", "try", ":", "firstaxis", ",", "parity", ",", "repetition", ",", "frame", "=", "_AXES2TUPLE", "[", "axes", ".", "lower", "(", ")", "]", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "_TUPLE2AXES", "[", "axes", "]", "# validation", "firstaxis", ",", "parity", ",", "repetition", ",", "frame", "=", "axes", "i", "=", "firstaxis", "+", "1", "j", "=", "_NEXT_AXIS", "[", "i", "+", "parity", "-", "1", "]", "+", "1", "k", "=", "_NEXT_AXIS", "[", "i", "-", "parity", "]", "+", "1", "if", "frame", ":", "ai", ",", "ak", "=", "ak", ",", "ai", "if", "parity", ":", "aj", "=", "-", "aj", "ai", "/=", "2.0", "aj", "/=", "2.0", "ak", "/=", "2.0", "ci", "=", "math", ".", "cos", "(", "ai", ")", "si", "=", "math", ".", "sin", "(", "ai", ")", "cj", "=", "math", ".", "cos", "(", "aj", ")", "sj", "=", "math", ".", "sin", "(", "aj", ")", "ck", "=", "math", ".", "cos", "(", "ak", ")", "sk", "=", "math", ".", "sin", "(", "ak", ")", "cc", "=", "ci", "*", "ck", "cs", "=", "ci", "*", "sk", "sc", "=", "si", "*", "ck", "ss", "=", "si", "*", "sk", "q", "=", "numpy", ".", "empty", "(", "(", "4", ",", ")", ")", "if", "repetition", ":", "q", "[", "0", "]", "=", "cj", "*", "(", "cc", "-", "ss", ")", "q", "[", "i", "]", "=", "cj", "*", "(", "cs", "+", "sc", ")", "q", "[", "j", "]", "=", "sj", "*", "(", "cc", "+", "ss", ")", "q", "[", "k", "]", "=", "sj", "*", "(", "cs", "-", "sc", ")", "else", ":", "q", "[", "0", "]", "=", "cj", "*", "cc", "+", "sj", "*", "ss", "q", "[", "i", "]", "=", "cj", "*", "sc", "-", "sj", "*", "cs", "q", "[", "j", "]", "=", "cj", "*", "ss", "+", "sj", "*", "cc", "q", "[", "k", "]", "=", "cj", "*", "cs", "-", "sj", "*", "sc", "if", "parity", ":", "q", "[", "j", "]", "*=", "-", "1.0", "return", "q" ]
https://github.com/gabyx/ApproxMVBB/blob/838f3ff7690a938f1e4199a5f41b6feefc32a603/example/kdTreeFiltering/python/Tools/Transformations/Transformations.py#L1176-L1230
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/stc.py
python
StyledTextCtrl.SearchNext
(*args, **kwargs)
return _stc.StyledTextCtrl_SearchNext(*args, **kwargs)
SearchNext(self, int flags, String text) -> int Find some text starting at the search anchor. Does not ensure the selection is visible.
SearchNext(self, int flags, String text) -> int
[ "SearchNext", "(", "self", "int", "flags", "String", "text", ")", "-", ">", "int" ]
def SearchNext(*args, **kwargs): """ SearchNext(self, int flags, String text) -> int Find some text starting at the search anchor. Does not ensure the selection is visible. """ return _stc.StyledTextCtrl_SearchNext(*args, **kwargs)
[ "def", "SearchNext", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_SearchNext", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/stc.py#L4929-L4936
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/propgrid.py
python
PropertyGridInterface.EditorValidate
(*args, **kwargs)
return _propgrid.PropertyGridInterface_EditorValidate(*args, **kwargs)
EditorValidate(self) -> bool
EditorValidate(self) -> bool
[ "EditorValidate", "(", "self", ")", "-", ">", "bool" ]
def EditorValidate(*args, **kwargs): """EditorValidate(self) -> bool""" return _propgrid.PropertyGridInterface_EditorValidate(*args, **kwargs)
[ "def", "EditorValidate", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PropertyGridInterface_EditorValidate", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/propgrid.py#L1143-L1145
MirrorYuChen/ncnn_example
a42608e6e0e51ed68d3bd8ada853595980935220
ncnn-20210525-full-source/python/ncnn/model_zoo/peleenetssd.py
python
PeleeNet_SSD.__call__
(self, img)
return objects, resized
#method 2, use ncnn.Mat->numpy.array to get the result, no memory copy too out = np.array(mat_out) for i in range(len(out)): values = out[i] obj = Detect_Object() obj.label = values[0] obj.prob = values[1] obj.rect.x = values[2] * img_w obj.rect.y = values[3] * img_h obj.rect.w = values[4] * img_w - obj.rect.x obj.rect.h = values[5] * img_h - obj.rect.y objects.append(obj)
#method 2, use ncnn.Mat->numpy.array to get the result, no memory copy too out = np.array(mat_out) for i in range(len(out)): values = out[i] obj = Detect_Object() obj.label = values[0] obj.prob = values[1] obj.rect.x = values[2] * img_w obj.rect.y = values[3] * img_h obj.rect.w = values[4] * img_w - obj.rect.x obj.rect.h = values[5] * img_h - obj.rect.y objects.append(obj)
[ "#method", "2", "use", "ncnn", ".", "Mat", "-", ">", "numpy", ".", "array", "to", "get", "the", "result", "no", "memory", "copy", "too", "out", "=", "np", ".", "array", "(", "mat_out", ")", "for", "i", "in", "range", "(", "len", "(", "out", "))", ":", "values", "=", "out", "[", "i", "]", "obj", "=", "Detect_Object", "()", "obj", ".", "label", "=", "values", "[", "0", "]", "obj", ".", "prob", "=", "values", "[", "1", "]", "obj", ".", "rect", ".", "x", "=", "values", "[", "2", "]", "*", "img_w", "obj", ".", "rect", ".", "y", "=", "values", "[", "3", "]", "*", "img_h", "obj", ".", "rect", ".", "w", "=", "values", "[", "4", "]", "*", "img_w", "-", "obj", ".", "rect", ".", "x", "obj", ".", "rect", ".", "h", "=", "values", "[", "5", "]", "*", "img_h", "-", "obj", ".", "rect", ".", "y", "objects", ".", "append", "(", "obj", ")" ]
def __call__(self, img): img_h = img.shape[0] img_w = img.shape[1] mat_in = ncnn.Mat.from_pixels_resize( img, ncnn.Mat.PixelType.PIXEL_BGR, img.shape[1], img.shape[0], self.target_size, self.target_size, ) mat_in.substract_mean_normalize(self.mean_vals, self.norm_vals) ex = self.net.create_extractor() ex.set_num_threads(self.num_threads) ex.input("data", mat_in) ret, mat_out = ex.extract("detection_out") objects = [] # printf("%d %d %d\n", mat_out.w, mat_out.h, mat_out.c) # method 1, use ncnn.Mat.row to get the result, no memory copy for i in range(mat_out.h): values = mat_out.row(i) obj = Detect_Object() obj.label = values[0] obj.prob = values[1] obj.rect.x = values[2] * img_w obj.rect.y = values[3] * img_h obj.rect.w = values[4] * img_w - obj.rect.x obj.rect.h = values[5] * img_h - obj.rect.y objects.append(obj) """ #method 2, use ncnn.Mat->numpy.array to get the result, no memory copy too out = np.array(mat_out) for i in range(len(out)): values = out[i] obj = Detect_Object() obj.label = values[0] obj.prob = values[1] obj.rect.x = values[2] * img_w obj.rect.y = values[3] * img_h obj.rect.w = values[4] * img_w - obj.rect.x obj.rect.h = values[5] * img_h - obj.rect.y objects.append(obj) """ ret, seg_out = ex.extract("sigmoid") resized = ncnn.Mat() ncnn.resize_bilinear(seg_out, resized, img_w, img_h) return objects, resized
[ "def", "__call__", "(", "self", ",", "img", ")", ":", "img_h", "=", "img", ".", "shape", "[", "0", "]", "img_w", "=", "img", ".", "shape", "[", "1", "]", "mat_in", "=", "ncnn", ".", "Mat", ".", "from_pixels_resize", "(", "img", ",", "ncnn", ".", "Mat", ".", "PixelType", ".", "PIXEL_BGR", ",", "img", ".", "shape", "[", "1", "]", ",", "img", ".", "shape", "[", "0", "]", ",", "self", ".", "target_size", ",", "self", ".", "target_size", ",", ")", "mat_in", ".", "substract_mean_normalize", "(", "self", ".", "mean_vals", ",", "self", ".", "norm_vals", ")", "ex", "=", "self", ".", "net", ".", "create_extractor", "(", ")", "ex", ".", "set_num_threads", "(", "self", ".", "num_threads", ")", "ex", ".", "input", "(", "\"data\"", ",", "mat_in", ")", "ret", ",", "mat_out", "=", "ex", ".", "extract", "(", "\"detection_out\"", ")", "objects", "=", "[", "]", "# printf(\"%d %d %d\\n\", mat_out.w, mat_out.h, mat_out.c)", "# method 1, use ncnn.Mat.row to get the result, no memory copy", "for", "i", "in", "range", "(", "mat_out", ".", "h", ")", ":", "values", "=", "mat_out", ".", "row", "(", "i", ")", "obj", "=", "Detect_Object", "(", ")", "obj", ".", "label", "=", "values", "[", "0", "]", "obj", ".", "prob", "=", "values", "[", "1", "]", "obj", ".", "rect", ".", "x", "=", "values", "[", "2", "]", "*", "img_w", "obj", ".", "rect", ".", "y", "=", "values", "[", "3", "]", "*", "img_h", "obj", ".", "rect", ".", "w", "=", "values", "[", "4", "]", "*", "img_w", "-", "obj", ".", "rect", ".", "x", "obj", ".", "rect", ".", "h", "=", "values", "[", "5", "]", "*", "img_h", "-", "obj", ".", "rect", ".", "y", "objects", ".", "append", "(", "obj", ")", "ret", ",", "seg_out", "=", "ex", ".", "extract", "(", "\"sigmoid\"", ")", "resized", "=", "ncnn", ".", "Mat", "(", ")", "ncnn", ".", "resize_bilinear", "(", "seg_out", ",", "resized", ",", "img_w", ",", "img_h", ")", "return", "objects", ",", "resized" ]
https://github.com/MirrorYuChen/ncnn_example/blob/a42608e6e0e51ed68d3bd8ada853595980935220/ncnn-20210525-full-source/python/ncnn/model_zoo/peleenetssd.py#L55-L114
NicknineTheEagle/TF2-Base
20459c5a7fbc995b6bf54fa85c2f62a101e9fb64
src/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py
python
RepeatedScalarFieldContainer.__eq__
(self, other)
return other == self._values
Compares the current instance with another one.
Compares the current instance with another one.
[ "Compares", "the", "current", "instance", "with", "another", "one", "." ]
def __eq__(self, other): """Compares the current instance with another one.""" if self is other: return True # Special case for the same type which should be common and fast. if isinstance(other, self.__class__): return other._values == self._values # We are presumably comparing against some other sequence type. return other == self._values
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "if", "self", "is", "other", ":", "return", "True", "# Special case for the same type which should be common and fast.", "if", "isinstance", "(", "other", ",", "self", ".", "__class__", ")", ":", "return", "other", ".", "_values", "==", "self", ".", "_values", "# We are presumably comparing against some other sequence type.", "return", "other", "==", "self", ".", "_values" ]
https://github.com/NicknineTheEagle/TF2-Base/blob/20459c5a7fbc995b6bf54fa85c2f62a101e9fb64/src/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py#L165-L173
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/run.py
python
main
(del_exitfunc=False)
Start the Python execution server in a subprocess In the Python subprocess, RPCServer is instantiated with handlerclass MyHandler, which inherits register/unregister methods from RPCHandler via the mix-in class SocketIO. When the RPCServer 'server' is instantiated, the TCPServer initialization creates an instance of run.MyHandler and calls its handle() method. handle() instantiates a run.Executive object, passing it a reference to the MyHandler object. That reference is saved as attribute rpchandler of the Executive instance. The Executive methods have access to the reference and can pass it on to entities that they command (e.g. debugger_r.Debugger.start_debugger()). The latter, in turn, can call MyHandler(SocketIO) register/unregister methods via the reference to register and unregister themselves.
Start the Python execution server in a subprocess
[ "Start", "the", "Python", "execution", "server", "in", "a", "subprocess" ]
def main(del_exitfunc=False): """Start the Python execution server in a subprocess In the Python subprocess, RPCServer is instantiated with handlerclass MyHandler, which inherits register/unregister methods from RPCHandler via the mix-in class SocketIO. When the RPCServer 'server' is instantiated, the TCPServer initialization creates an instance of run.MyHandler and calls its handle() method. handle() instantiates a run.Executive object, passing it a reference to the MyHandler object. That reference is saved as attribute rpchandler of the Executive instance. The Executive methods have access to the reference and can pass it on to entities that they command (e.g. debugger_r.Debugger.start_debugger()). The latter, in turn, can call MyHandler(SocketIO) register/unregister methods via the reference to register and unregister themselves. """ global exit_now global quitting global no_exitfunc no_exitfunc = del_exitfunc #time.sleep(15) # test subprocess not responding try: assert(len(sys.argv) > 1) port = int(sys.argv[-1]) except: print("IDLE Subprocess: no IP port passed in sys.argv.", file=sys.__stderr__) return capture_warnings(True) sys.argv[:] = [""] sockthread = threading.Thread(target=manage_socket, name='SockThread', args=((LOCALHOST, port),)) sockthread.daemon = True sockthread.start() while 1: try: if exit_now: try: exit() except KeyboardInterrupt: # exiting but got an extra KBI? Try again! continue try: request = rpc.request_queue.get(block=True, timeout=0.05) except queue.Empty: request = None # Issue 32207: calling handle_tk_events here adds spurious # queue.Empty traceback to event handling exceptions. if request: seq, (method, args, kwargs) = request ret = method(*args, **kwargs) rpc.response_queue.put((seq, ret)) else: handle_tk_events() except KeyboardInterrupt: if quitting: exit_now = True continue except SystemExit: capture_warnings(False) raise except: type, value, tb = sys.exc_info() try: print_exception() rpc.response_queue.put((seq, None)) except: # Link didn't work, print same exception to __stderr__ traceback.print_exception(type, value, tb, file=sys.__stderr__) exit() else: continue
[ "def", "main", "(", "del_exitfunc", "=", "False", ")", ":", "global", "exit_now", "global", "quitting", "global", "no_exitfunc", "no_exitfunc", "=", "del_exitfunc", "#time.sleep(15) # test subprocess not responding", "try", ":", "assert", "(", "len", "(", "sys", ".", "argv", ")", ">", "1", ")", "port", "=", "int", "(", "sys", ".", "argv", "[", "-", "1", "]", ")", "except", ":", "print", "(", "\"IDLE Subprocess: no IP port passed in sys.argv.\"", ",", "file", "=", "sys", ".", "__stderr__", ")", "return", "capture_warnings", "(", "True", ")", "sys", ".", "argv", "[", ":", "]", "=", "[", "\"\"", "]", "sockthread", "=", "threading", ".", "Thread", "(", "target", "=", "manage_socket", ",", "name", "=", "'SockThread'", ",", "args", "=", "(", "(", "LOCALHOST", ",", "port", ")", ",", ")", ")", "sockthread", ".", "daemon", "=", "True", "sockthread", ".", "start", "(", ")", "while", "1", ":", "try", ":", "if", "exit_now", ":", "try", ":", "exit", "(", ")", "except", "KeyboardInterrupt", ":", "# exiting but got an extra KBI? Try again!", "continue", "try", ":", "request", "=", "rpc", ".", "request_queue", ".", "get", "(", "block", "=", "True", ",", "timeout", "=", "0.05", ")", "except", "queue", ".", "Empty", ":", "request", "=", "None", "# Issue 32207: calling handle_tk_events here adds spurious", "# queue.Empty traceback to event handling exceptions.", "if", "request", ":", "seq", ",", "(", "method", ",", "args", ",", "kwargs", ")", "=", "request", "ret", "=", "method", "(", "*", "args", ",", "*", "*", "kwargs", ")", "rpc", ".", "response_queue", ".", "put", "(", "(", "seq", ",", "ret", ")", ")", "else", ":", "handle_tk_events", "(", ")", "except", "KeyboardInterrupt", ":", "if", "quitting", ":", "exit_now", "=", "True", "continue", "except", "SystemExit", ":", "capture_warnings", "(", "False", ")", "raise", "except", ":", "type", ",", "value", ",", "tb", "=", "sys", ".", "exc_info", "(", ")", "try", ":", "print_exception", "(", ")", "rpc", ".", "response_queue", ".", "put", "(", "(", "seq", ",", "None", ")", ")", "except", ":", "# Link didn't work, print same exception to __stderr__", "traceback", ".", "print_exception", "(", "type", ",", "value", ",", "tb", ",", "file", "=", "sys", ".", "__stderr__", ")", "exit", "(", ")", "else", ":", "continue" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/idlelib/run.py#L101-L176
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/tools/docs/doc_generator_visitor.py
python
DocGeneratorVisitor.duplicates
(self)
return self._duplicates
A map from preferred full names to a list of all names for this symbol. This function returns a map from preferred (master) name for a symbol to a lexicographically sorted list of all aliases for that name (incl. the master name). Symbols without duplicate names do not appear in this map. It is computed when it, `reverse_index`, or `duplicate_of` are first accessed. Returns: The map from master name to list of all duplicate names.
A map from preferred full names to a list of all names for this symbol.
[ "A", "map", "from", "preferred", "full", "names", "to", "a", "list", "of", "all", "names", "for", "this", "symbol", "." ]
def duplicates(self): """A map from preferred full names to a list of all names for this symbol. This function returns a map from preferred (master) name for a symbol to a lexicographically sorted list of all aliases for that name (incl. the master name). Symbols without duplicate names do not appear in this map. It is computed when it, `reverse_index`, or `duplicate_of` are first accessed. Returns: The map from master name to list of all duplicate names. """ self._maybe_find_duplicates() return self._duplicates
[ "def", "duplicates", "(", "self", ")", ":", "self", ".", "_maybe_find_duplicates", "(", ")", "return", "self", ".", "_duplicates" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/tools/docs/doc_generator_visitor.py#L106-L120
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
cmake/developer_package/cpplint/cpplint.py
python
FlagCxx11Features
(filename, clean_lines, linenum, error)
Flag those c++11 features that we only allow in certain places. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Flag those c++11 features that we only allow in certain places.
[ "Flag", "those", "c", "++", "11", "features", "that", "we", "only", "allow", "in", "certain", "places", "." ]
def FlagCxx11Features(filename, clean_lines, linenum, error): """Flag those c++11 features that we only allow in certain places. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line) # Flag unapproved C++ TR1 headers. if include and include.group(1).startswith('tr1/'): error(filename, linenum, 'build/c++tr1', 5, ('C++ TR1 headers such as <%s> are unapproved.') % include.group(1)) # Flag unapproved C++11 headers. if include and include.group(1) in ('cfenv', 'condition_variable', 'fenv.h', 'future', 'mutex', 'thread', 'chrono', 'ratio', 'regex', 'system_error', ): error(filename, linenum, 'build/c++11', 5, ('<%s> is an unapproved C++11 header.') % include.group(1)) # The only place where we need to worry about C++11 keywords and library # features in preprocessor directives is in macro definitions. if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return # These are classes and free functions. The classes are always # mentioned as std::*, but we only catch the free functions if # they're not found by ADL. They're alphabetical by header. for top_name in ( # type_traits 'alignment_of', 'aligned_union', ): if Search(r'\bstd::%s\b' % top_name, line): error(filename, linenum, 'build/c++11', 5, ('std::%s is an unapproved C++11 class or function. Send c-style ' 'an example of where it would make your code more readable, and ' 'they may let you use it.') % top_name)
[ "def", "FlagCxx11Features", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "include", "=", "Match", "(", "r'\\s*#\\s*include\\s+[<\"]([^<\"]+)[\">]'", ",", "line", ")", "# Flag unapproved C++ TR1 headers.", "if", "include", "and", "include", ".", "group", "(", "1", ")", ".", "startswith", "(", "'tr1/'", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++tr1'", ",", "5", ",", "(", "'C++ TR1 headers such as <%s> are unapproved.'", ")", "%", "include", ".", "group", "(", "1", ")", ")", "# Flag unapproved C++11 headers.", "if", "include", "and", "include", ".", "group", "(", "1", ")", "in", "(", "'cfenv'", ",", "'condition_variable'", ",", "'fenv.h'", ",", "'future'", ",", "'mutex'", ",", "'thread'", ",", "'chrono'", ",", "'ratio'", ",", "'regex'", ",", "'system_error'", ",", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++11'", ",", "5", ",", "(", "'<%s> is an unapproved C++11 header.'", ")", "%", "include", ".", "group", "(", "1", ")", ")", "# The only place where we need to worry about C++11 keywords and library", "# features in preprocessor directives is in macro definitions.", "if", "Match", "(", "r'\\s*#'", ",", "line", ")", "and", "not", "Match", "(", "r'\\s*#\\s*define\\b'", ",", "line", ")", ":", "return", "# These are classes and free functions. The classes are always", "# mentioned as std::*, but we only catch the free functions if", "# they're not found by ADL. They're alphabetical by header.", "for", "top_name", "in", "(", "# type_traits", "'alignment_of'", ",", "'aligned_union'", ",", ")", ":", "if", "Search", "(", "r'\\bstd::%s\\b'", "%", "top_name", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++11'", ",", "5", ",", "(", "'std::%s is an unapproved C++11 class or function. Send c-style '", "'an example of where it would make your code more readable, and '", "'they may let you use it.'", ")", "%", "top_name", ")" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/cmake/developer_package/cpplint/cpplint.py#L6110-L6159
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/training/saver.py
python
Saver.set_last_checkpoints_with_time
(self, last_checkpoints_with_time)
Sets the list of old checkpoint filenames and timestamps. Args: last_checkpoints_with_time: A list of tuples of checkpoint filenames and timestamps. Raises: AssertionError: If last_checkpoints_with_time is not a list.
Sets the list of old checkpoint filenames and timestamps.
[ "Sets", "the", "list", "of", "old", "checkpoint", "filenames", "and", "timestamps", "." ]
def set_last_checkpoints_with_time(self, last_checkpoints_with_time): """Sets the list of old checkpoint filenames and timestamps. Args: last_checkpoints_with_time: A list of tuples of checkpoint filenames and timestamps. Raises: AssertionError: If last_checkpoints_with_time is not a list. """ assert isinstance(last_checkpoints_with_time, list) self._last_checkpoints = last_checkpoints_with_time
[ "def", "set_last_checkpoints_with_time", "(", "self", ",", "last_checkpoints_with_time", ")", ":", "assert", "isinstance", "(", "last_checkpoints_with_time", ",", "list", ")", "self", ".", "_last_checkpoints", "=", "last_checkpoints_with_time" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/training/saver.py#L1000-L1011