nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
sequence | function
stringlengths 34
151k
| function_tokens
sequence | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
hcdth011/ROS-Hydro-SLAM | 629448eecd2c9a3511158115fa53ea9e4ae41359 | rpg_vikit/vikit_py/src/vikit_py/transformations.py | python | euler_matrix | (ai, aj, ak, axes='sxyz') | return M | Return homogeneous rotation matrix from Euler angles and axis sequence.
ai, aj, ak : Euler's roll, pitch and yaw angles
axes : One of 24 axis sequences as string or encoded tuple
>>> R = euler_matrix(1, 2, 3, 'syxz')
>>> numpy.allclose(numpy.sum(R[0]), -1.34786452)
True
>>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1))
>>> numpy.allclose(numpy.sum(R[0]), -0.383436184)
True
>>> ai, aj, ak = (4.0*math.pi) * (numpy.random.random(3) - 0.5)
>>> for axes in _AXES2TUPLE.keys():
... R = euler_matrix(ai, aj, ak, axes)
>>> for axes in _TUPLE2AXES.keys():
... R = euler_matrix(ai, aj, ak, axes) | Return homogeneous rotation matrix from Euler angles and axis sequence. | [
"Return",
"homogeneous",
"rotation",
"matrix",
"from",
"Euler",
"angles",
"and",
"axis",
"sequence",
"."
] | def euler_matrix(ai, aj, ak, axes='sxyz'):
"""Return homogeneous rotation matrix from Euler angles and axis sequence.
ai, aj, ak : Euler's roll, pitch and yaw angles
axes : One of 24 axis sequences as string or encoded tuple
>>> R = euler_matrix(1, 2, 3, 'syxz')
>>> numpy.allclose(numpy.sum(R[0]), -1.34786452)
True
>>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1))
>>> numpy.allclose(numpy.sum(R[0]), -0.383436184)
True
>>> ai, aj, ak = (4.0*math.pi) * (numpy.random.random(3) - 0.5)
>>> for axes in _AXES2TUPLE.keys():
... R = euler_matrix(ai, aj, ak, axes)
>>> for axes in _TUPLE2AXES.keys():
... R = euler_matrix(ai, aj, ak, axes)
"""
try:
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes]
except (AttributeError, KeyError):
_ = _TUPLE2AXES[axes]
firstaxis, parity, repetition, frame = axes
i = firstaxis
j = _NEXT_AXIS[i+parity]
k = _NEXT_AXIS[i-parity+1]
if frame:
ai, ak = ak, ai
if parity:
ai, aj, ak = -ai, -aj, -ak
si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak)
ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak)
cc, cs = ci*ck, ci*sk
sc, ss = si*ck, si*sk
M = numpy.identity(4)
if repetition:
M[i, i] = cj
M[i, j] = sj*si
M[i, k] = sj*ci
M[j, i] = sj*sk
M[j, j] = -cj*ss+cc
M[j, k] = -cj*cs-sc
M[k, i] = -sj*ck
M[k, j] = cj*sc+cs
M[k, k] = cj*cc-ss
else:
M[i, i] = cj*ck
M[i, j] = sj*sc-cs
M[i, k] = sj*cc+ss
M[j, i] = cj*sk
M[j, j] = sj*ss+cc
M[j, k] = sj*cs-sc
M[k, i] = -sj
M[k, j] = cj*si
M[k, k] = cj*ci
return M | [
"def",
"euler_matrix",
"(",
"ai",
",",
"aj",
",",
"ak",
",",
"axes",
"=",
"'sxyz'",
")",
":",
"try",
":",
"firstaxis",
",",
"parity",
",",
"repetition",
",",
"frame",
"=",
"_AXES2TUPLE",
"[",
"axes",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"_",
"=",
"_TUPLE2AXES",
"[",
"axes",
"]",
"firstaxis",
",",
"parity",
",",
"repetition",
",",
"frame",
"=",
"axes",
"i",
"=",
"firstaxis",
"j",
"=",
"_NEXT_AXIS",
"[",
"i",
"+",
"parity",
"]",
"k",
"=",
"_NEXT_AXIS",
"[",
"i",
"-",
"parity",
"+",
"1",
"]",
"if",
"frame",
":",
"ai",
",",
"ak",
"=",
"ak",
",",
"ai",
"if",
"parity",
":",
"ai",
",",
"aj",
",",
"ak",
"=",
"-",
"ai",
",",
"-",
"aj",
",",
"-",
"ak",
"si",
",",
"sj",
",",
"sk",
"=",
"math",
".",
"sin",
"(",
"ai",
")",
",",
"math",
".",
"sin",
"(",
"aj",
")",
",",
"math",
".",
"sin",
"(",
"ak",
")",
"ci",
",",
"cj",
",",
"ck",
"=",
"math",
".",
"cos",
"(",
"ai",
")",
",",
"math",
".",
"cos",
"(",
"aj",
")",
",",
"math",
".",
"cos",
"(",
"ak",
")",
"cc",
",",
"cs",
"=",
"ci",
"*",
"ck",
",",
"ci",
"*",
"sk",
"sc",
",",
"ss",
"=",
"si",
"*",
"ck",
",",
"si",
"*",
"sk",
"M",
"=",
"numpy",
".",
"identity",
"(",
"4",
")",
"if",
"repetition",
":",
"M",
"[",
"i",
",",
"i",
"]",
"=",
"cj",
"M",
"[",
"i",
",",
"j",
"]",
"=",
"sj",
"*",
"si",
"M",
"[",
"i",
",",
"k",
"]",
"=",
"sj",
"*",
"ci",
"M",
"[",
"j",
",",
"i",
"]",
"=",
"sj",
"*",
"sk",
"M",
"[",
"j",
",",
"j",
"]",
"=",
"-",
"cj",
"*",
"ss",
"+",
"cc",
"M",
"[",
"j",
",",
"k",
"]",
"=",
"-",
"cj",
"*",
"cs",
"-",
"sc",
"M",
"[",
"k",
",",
"i",
"]",
"=",
"-",
"sj",
"*",
"ck",
"M",
"[",
"k",
",",
"j",
"]",
"=",
"cj",
"*",
"sc",
"+",
"cs",
"M",
"[",
"k",
",",
"k",
"]",
"=",
"cj",
"*",
"cc",
"-",
"ss",
"else",
":",
"M",
"[",
"i",
",",
"i",
"]",
"=",
"cj",
"*",
"ck",
"M",
"[",
"i",
",",
"j",
"]",
"=",
"sj",
"*",
"sc",
"-",
"cs",
"M",
"[",
"i",
",",
"k",
"]",
"=",
"sj",
"*",
"cc",
"+",
"ss",
"M",
"[",
"j",
",",
"i",
"]",
"=",
"cj",
"*",
"sk",
"M",
"[",
"j",
",",
"j",
"]",
"=",
"sj",
"*",
"ss",
"+",
"cc",
"M",
"[",
"j",
",",
"k",
"]",
"=",
"sj",
"*",
"cs",
"-",
"sc",
"M",
"[",
"k",
",",
"i",
"]",
"=",
"-",
"sj",
"M",
"[",
"k",
",",
"j",
"]",
"=",
"cj",
"*",
"si",
"M",
"[",
"k",
",",
"k",
"]",
"=",
"cj",
"*",
"ci",
"return",
"M"
] | https://github.com/hcdth011/ROS-Hydro-SLAM/blob/629448eecd2c9a3511158115fa53ea9e4ae41359/rpg_vikit/vikit_py/src/vikit_py/transformations.py#L972-L1032 |
|
asLody/whale | 6a661b27cc4cf83b7b5a3b02451597ee1ac7f264 | whale/cpplint.py | python | CheckStyle | (filename, clean_lines, linenum, file_extension, nesting_state,
error) | Checks rules from the 'C++ style rules' section of cppguide.html.
Most of these rules are hard to test (naming, comment style), but we
do what we can. In particular we check for 2-space indents, line lengths,
tab usage, spaces inside code, etc.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
file_extension: The extension (without the dot) of the filename.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found. | Checks rules from the 'C++ style rules' section of cppguide.html. | [
"Checks",
"rules",
"from",
"the",
"C",
"++",
"style",
"rules",
"section",
"of",
"cppguide",
".",
"html",
"."
] | def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state,
error):
"""Checks rules from the 'C++ style rules' section of cppguide.html.
Most of these rules are hard to test (naming, comment style), but we
do what we can. In particular we check for 2-space indents, line lengths,
tab usage, spaces inside code, etc.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
file_extension: The extension (without the dot) of the filename.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found.
"""
# Don't use "elided" lines here, otherwise we can't check commented lines.
# Don't want to use "raw" either, because we don't want to check inside C++11
# raw strings,
raw_lines = clean_lines.lines_without_raw_strings
line = raw_lines[linenum]
prev = raw_lines[linenum - 1] if linenum > 0 else ''
if line.find('\t') != -1:
error(filename, linenum, 'whitespace/tab', 1,
'Tab found; better to use spaces')
# One or three blank spaces at the beginning of the line is weird; it's
# hard to reconcile that with 2-space indents.
# NOTE: here are the conditions rob pike used for his tests. Mine aren't
# as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces
# if(RLENGTH > 20) complain = 0;
# if(match($0, " +(error|private|public|protected):")) complain = 0;
# if(match(prev, "&& *$")) complain = 0;
# if(match(prev, "\\|\\| *$")) complain = 0;
# if(match(prev, "[\",=><] *$")) complain = 0;
# if(match($0, " <<")) complain = 0;
# if(match(prev, " +for \\(")) complain = 0;
# if(prevodd && match(prevprev, " +for \\(")) complain = 0;
scope_or_label_pattern = r'\s*\w+\s*:\s*\\?$'
classinfo = nesting_state.InnermostClass()
initial_spaces = 0
cleansed_line = clean_lines.elided[linenum]
while initial_spaces < len(line) and line[initial_spaces] == ' ':
initial_spaces += 1
# There are certain situations we allow one space, notably for
# section labels, and also lines containing multi-line raw strings.
# We also don't check for lines that look like continuation lines
# (of lines ending in double quotes, commas, equals, or angle brackets)
# because the rules for how to indent those are non-trivial.
if (not Search(r'[",=><] *$', prev) and
(initial_spaces == 1 or initial_spaces == 3) and
not Match(scope_or_label_pattern, cleansed_line) and
not (clean_lines.raw_lines[linenum] != line and
Match(r'^\s*""', line))):
error(filename, linenum, 'whitespace/indent', 3,
'Weird number of spaces at line-start. '
'Are you using a 2-space indent?')
if line and line[-1].isspace():
error(filename, linenum, 'whitespace/end_of_line', 4,
'Line ends in whitespace. Consider deleting these extra spaces.')
# Check if the line is a header guard.
is_header_guard = False
if IsHeaderExtension(file_extension):
cppvar = GetHeaderGuardCPPVariable(filename)
if (line.startswith('#ifndef %s' % cppvar) or
line.startswith('#define %s' % cppvar) or
line.startswith('#endif // %s' % cppvar)):
is_header_guard = True
# #include lines and header guards can be long, since there's no clean way to
# split them.
#
# URLs can be long too. It's possible to split these, but it makes them
# harder to cut&paste.
#
# The "$Id:...$" comment may also get very long without it being the
# developers fault.
if (not line.startswith('#include') and not is_header_guard and
not Match(r'^\s*//.*http(s?)://\S*$', line) and
not Match(r'^\s*//\s*[^\s]*$', line) and
not Match(r'^// \$Id:.*#[0-9]+ \$$', line)):
line_width = GetLineWidth(line)
if line_width > _line_length:
error(filename, linenum, 'whitespace/line_length', 2,
'Lines should be <= %i characters long' % _line_length)
if (cleansed_line.count(';') > 1 and
# for loops are allowed two ;'s (and may run over two lines).
cleansed_line.find('for') == -1 and
(GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or
GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and
# It's ok to have many commands in a switch case that fits in 1 line
not ((cleansed_line.find('case ') != -1 or
cleansed_line.find('default:') != -1) and
cleansed_line.find('break;') != -1)):
error(filename, linenum, 'whitespace/newline', 0,
'More than one command on the same line')
# Some more style checks
CheckBraces(filename, clean_lines, linenum, error)
CheckTrailingSemicolon(filename, clean_lines, linenum, error)
CheckEmptyBlockBody(filename, clean_lines, linenum, error)
CheckSpacing(filename, clean_lines, linenum, nesting_state, error)
CheckOperatorSpacing(filename, clean_lines, linenum, error)
CheckParenthesisSpacing(filename, clean_lines, linenum, error)
CheckCommaSpacing(filename, clean_lines, linenum, error)
CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error)
CheckSpacingForFunctionCall(filename, clean_lines, linenum, error)
CheckCheck(filename, clean_lines, linenum, error)
CheckAltTokens(filename, clean_lines, linenum, error)
classinfo = nesting_state.InnermostClass()
if classinfo:
CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error) | [
"def",
"CheckStyle",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"file_extension",
",",
"nesting_state",
",",
"error",
")",
":",
"# Don't use \"elided\" lines here, otherwise we can't check commented lines.",
"# Don't want to use \"raw\" either, because we don't want to check inside C++11",
"# raw strings,",
"raw_lines",
"=",
"clean_lines",
".",
"lines_without_raw_strings",
"line",
"=",
"raw_lines",
"[",
"linenum",
"]",
"prev",
"=",
"raw_lines",
"[",
"linenum",
"-",
"1",
"]",
"if",
"linenum",
">",
"0",
"else",
"''",
"if",
"line",
".",
"find",
"(",
"'\\t'",
")",
"!=",
"-",
"1",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/tab'",
",",
"1",
",",
"'Tab found; better to use spaces'",
")",
"# One or three blank spaces at the beginning of the line is weird; it's",
"# hard to reconcile that with 2-space indents.",
"# NOTE: here are the conditions rob pike used for his tests. Mine aren't",
"# as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces",
"# if(RLENGTH > 20) complain = 0;",
"# if(match($0, \" +(error|private|public|protected):\")) complain = 0;",
"# if(match(prev, \"&& *$\")) complain = 0;",
"# if(match(prev, \"\\\\|\\\\| *$\")) complain = 0;",
"# if(match(prev, \"[\\\",=><] *$\")) complain = 0;",
"# if(match($0, \" <<\")) complain = 0;",
"# if(match(prev, \" +for \\\\(\")) complain = 0;",
"# if(prevodd && match(prevprev, \" +for \\\\(\")) complain = 0;",
"scope_or_label_pattern",
"=",
"r'\\s*\\w+\\s*:\\s*\\\\?$'",
"classinfo",
"=",
"nesting_state",
".",
"InnermostClass",
"(",
")",
"initial_spaces",
"=",
"0",
"cleansed_line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"while",
"initial_spaces",
"<",
"len",
"(",
"line",
")",
"and",
"line",
"[",
"initial_spaces",
"]",
"==",
"' '",
":",
"initial_spaces",
"+=",
"1",
"# There are certain situations we allow one space, notably for",
"# section labels, and also lines containing multi-line raw strings.",
"# We also don't check for lines that look like continuation lines",
"# (of lines ending in double quotes, commas, equals, or angle brackets)",
"# because the rules for how to indent those are non-trivial.",
"if",
"(",
"not",
"Search",
"(",
"r'[\",=><] *$'",
",",
"prev",
")",
"and",
"(",
"initial_spaces",
"==",
"1",
"or",
"initial_spaces",
"==",
"3",
")",
"and",
"not",
"Match",
"(",
"scope_or_label_pattern",
",",
"cleansed_line",
")",
"and",
"not",
"(",
"clean_lines",
".",
"raw_lines",
"[",
"linenum",
"]",
"!=",
"line",
"and",
"Match",
"(",
"r'^\\s*\"\"'",
",",
"line",
")",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/indent'",
",",
"3",
",",
"'Weird number of spaces at line-start. '",
"'Are you using a 2-space indent?'",
")",
"if",
"line",
"and",
"line",
"[",
"-",
"1",
"]",
".",
"isspace",
"(",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/end_of_line'",
",",
"4",
",",
"'Line ends in whitespace. Consider deleting these extra spaces.'",
")",
"# Check if the line is a header guard.",
"is_header_guard",
"=",
"False",
"if",
"IsHeaderExtension",
"(",
"file_extension",
")",
":",
"cppvar",
"=",
"GetHeaderGuardCPPVariable",
"(",
"filename",
")",
"if",
"(",
"line",
".",
"startswith",
"(",
"'#ifndef %s'",
"%",
"cppvar",
")",
"or",
"line",
".",
"startswith",
"(",
"'#define %s'",
"%",
"cppvar",
")",
"or",
"line",
".",
"startswith",
"(",
"'#endif // %s'",
"%",
"cppvar",
")",
")",
":",
"is_header_guard",
"=",
"True",
"# #include lines and header guards can be long, since there's no clean way to",
"# split them.",
"#",
"# URLs can be long too. It's possible to split these, but it makes them",
"# harder to cut&paste.",
"#",
"# The \"$Id:...$\" comment may also get very long without it being the",
"# developers fault.",
"if",
"(",
"not",
"line",
".",
"startswith",
"(",
"'#include'",
")",
"and",
"not",
"is_header_guard",
"and",
"not",
"Match",
"(",
"r'^\\s*//.*http(s?)://\\S*$'",
",",
"line",
")",
"and",
"not",
"Match",
"(",
"r'^\\s*//\\s*[^\\s]*$'",
",",
"line",
")",
"and",
"not",
"Match",
"(",
"r'^// \\$Id:.*#[0-9]+ \\$$'",
",",
"line",
")",
")",
":",
"line_width",
"=",
"GetLineWidth",
"(",
"line",
")",
"if",
"line_width",
">",
"_line_length",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/line_length'",
",",
"2",
",",
"'Lines should be <= %i characters long'",
"%",
"_line_length",
")",
"if",
"(",
"cleansed_line",
".",
"count",
"(",
"';'",
")",
">",
"1",
"and",
"# for loops are allowed two ;'s (and may run over two lines).",
"cleansed_line",
".",
"find",
"(",
"'for'",
")",
"==",
"-",
"1",
"and",
"(",
"GetPreviousNonBlankLine",
"(",
"clean_lines",
",",
"linenum",
")",
"[",
"0",
"]",
".",
"find",
"(",
"'for'",
")",
"==",
"-",
"1",
"or",
"GetPreviousNonBlankLine",
"(",
"clean_lines",
",",
"linenum",
")",
"[",
"0",
"]",
".",
"find",
"(",
"';'",
")",
"!=",
"-",
"1",
")",
"and",
"# It's ok to have many commands in a switch case that fits in 1 line",
"not",
"(",
"(",
"cleansed_line",
".",
"find",
"(",
"'case '",
")",
"!=",
"-",
"1",
"or",
"cleansed_line",
".",
"find",
"(",
"'default:'",
")",
"!=",
"-",
"1",
")",
"and",
"cleansed_line",
".",
"find",
"(",
"'break;'",
")",
"!=",
"-",
"1",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/newline'",
",",
"0",
",",
"'More than one command on the same line'",
")",
"# Some more style checks",
"CheckBraces",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckTrailingSemicolon",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckEmptyBlockBody",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
"CheckOperatorSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckParenthesisSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckCommaSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckBracesSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
"CheckSpacingForFunctionCall",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckCheck",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"CheckAltTokens",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"classinfo",
"=",
"nesting_state",
".",
"InnermostClass",
"(",
")",
"if",
"classinfo",
":",
"CheckSectionSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"classinfo",
",",
"linenum",
",",
"error",
")"
] | https://github.com/asLody/whale/blob/6a661b27cc4cf83b7b5a3b02451597ee1ac7f264/whale/cpplint.py#L4311-L4427 |
||
papyrussolution/OpenPapyrus | bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91 | Src/OSF/protobuf-3.19.1/python/google/protobuf/descriptor_pool.py | python | DescriptorPool.FindExtensionByNumber | (self, message_descriptor, number) | Gets the extension of the specified message with the specified number.
Extensions have to be registered to this pool by calling :func:`Add` or
:func:`AddExtensionDescriptor`.
Args:
message_descriptor (Descriptor): descriptor of the extended message.
number (int): Number of the extension field.
Returns:
FieldDescriptor: The descriptor for the extension.
Raises:
KeyError: when no extension with the given number is known for the
specified message. | Gets the extension of the specified message with the specified number. | [
"Gets",
"the",
"extension",
"of",
"the",
"specified",
"message",
"with",
"the",
"specified",
"number",
"."
] | def FindExtensionByNumber(self, message_descriptor, number):
"""Gets the extension of the specified message with the specified number.
Extensions have to be registered to this pool by calling :func:`Add` or
:func:`AddExtensionDescriptor`.
Args:
message_descriptor (Descriptor): descriptor of the extended message.
number (int): Number of the extension field.
Returns:
FieldDescriptor: The descriptor for the extension.
Raises:
KeyError: when no extension with the given number is known for the
specified message.
"""
try:
return self._extensions_by_number[message_descriptor][number]
except KeyError:
self._TryLoadExtensionFromDB(message_descriptor, number)
return self._extensions_by_number[message_descriptor][number] | [
"def",
"FindExtensionByNumber",
"(",
"self",
",",
"message_descriptor",
",",
"number",
")",
":",
"try",
":",
"return",
"self",
".",
"_extensions_by_number",
"[",
"message_descriptor",
"]",
"[",
"number",
"]",
"except",
"KeyError",
":",
"self",
".",
"_TryLoadExtensionFromDB",
"(",
"message_descriptor",
",",
"number",
")",
"return",
"self",
".",
"_extensions_by_number",
"[",
"message_descriptor",
"]",
"[",
"number",
"]"
] | https://github.com/papyrussolution/OpenPapyrus/blob/bbfb5ec2ea2109b8e2f125edd838e12eaf7b8b91/Src/OSF/protobuf-3.19.1/python/google/protobuf/descriptor_pool.py#L601-L622 |
||
gem5/gem5 | 141cc37c2d4b93959d4c249b8f7e6a8b2ef75338 | src/arch/isa_parser/isa_parser.py | python | ISAParser.p_decode_stmt_format | (self, t) | decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE | decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE | [
"decode_stmt",
":",
"FORMAT",
"push_format_id",
"LBRACE",
"decode_stmt_list",
"RBRACE"
] | def p_decode_stmt_format(self, t):
'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
# The format will be pushed on the stack when 'push_format_id'
# is processed (see below). Once the parser has recognized
# the full production (though the right brace), we're done
# with the format, so now we can pop it.
self.formatStack.pop()
t[0] = t[4] | [
"def",
"p_decode_stmt_format",
"(",
"self",
",",
"t",
")",
":",
"# The format will be pushed on the stack when 'push_format_id'",
"# is processed (see below). Once the parser has recognized",
"# the full production (though the right brace), we're done",
"# with the format, so now we can pop it.",
"self",
".",
"formatStack",
".",
"pop",
"(",
")",
"t",
"[",
"0",
"]",
"=",
"t",
"[",
"4",
"]"
] | https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/src/arch/isa_parser/isa_parser.py#L1238-L1245 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/applications/workbench/workbench/app/mainwindow.py | python | MainWindow.post_mantid_init | (self) | Run any setup that requires mantid
to have been initialized | Run any setup that requires mantid
to have been initialized | [
"Run",
"any",
"setup",
"that",
"requires",
"mantid",
"to",
"have",
"been",
"initialized"
] | def post_mantid_init(self):
"""Run any setup that requires mantid
to have been initialized
"""
self.redirect_python_warnings()
self.populate_menus()
self.algorithm_selector.refresh()
# turn on algorithm factory notifications
from mantid.api import AlgorithmFactory
algorithm_factory = AlgorithmFactory.Instance()
algorithm_factory.enableNotifications() | [
"def",
"post_mantid_init",
"(",
"self",
")",
":",
"self",
".",
"redirect_python_warnings",
"(",
")",
"self",
".",
"populate_menus",
"(",
")",
"self",
".",
"algorithm_selector",
".",
"refresh",
"(",
")",
"# turn on algorithm factory notifications",
"from",
"mantid",
".",
"api",
"import",
"AlgorithmFactory",
"algorithm_factory",
"=",
"AlgorithmFactory",
".",
"Instance",
"(",
")",
"algorithm_factory",
".",
"enableNotifications",
"(",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/applications/workbench/workbench/app/mainwindow.py#L230-L241 |
||
cvxpy/cvxpy | 5165b4fb750dfd237de8659383ef24b4b2e33aaf | cvxpy/transforms/indicator.py | python | indicator.is_constant | (self) | return all([arg.is_constant() for arg in all_args]) | The Indicator is constant if all constraints have constant args. | The Indicator is constant if all constraints have constant args. | [
"The",
"Indicator",
"is",
"constant",
"if",
"all",
"constraints",
"have",
"constant",
"args",
"."
] | def is_constant(self) -> bool:
"""The Indicator is constant if all constraints have constant args.
"""
all_args = sum([c.args for c in self.args], [])
return all([arg.is_constant() for arg in all_args]) | [
"def",
"is_constant",
"(",
"self",
")",
"->",
"bool",
":",
"all_args",
"=",
"sum",
"(",
"[",
"c",
".",
"args",
"for",
"c",
"in",
"self",
".",
"args",
"]",
",",
"[",
"]",
")",
"return",
"all",
"(",
"[",
"arg",
".",
"is_constant",
"(",
")",
"for",
"arg",
"in",
"all_args",
"]",
")"
] | https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/transforms/indicator.py#L44-L48 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/results_tab_widget/results_tab_widget.py | python | ResultsTabWidget.__init__ | (self, fit_context, context, parent) | Initialize the widget.
:param fit_context: A reference to the a FitContext object used to store fit results
:param parent: A parent widget for the view | Initialize the widget.
:param fit_context: A reference to the a FitContext object used to store fit results
:param parent: A parent widget for the view | [
"Initialize",
"the",
"widget",
".",
":",
"param",
"fit_context",
":",
"A",
"reference",
"to",
"the",
"a",
"FitContext",
"object",
"used",
"to",
"store",
"fit",
"results",
":",
"param",
"parent",
":",
"A",
"parent",
"widget",
"for",
"the",
"view"
] | def __init__(self, fit_context, context, parent):
"""
Initialize the widget.
:param fit_context: A reference to the a FitContext object used to store fit results
:param parent: A parent widget for the view
"""
self.results_tab_view = ResultsTabView(parent=parent)
self.results_tab_presenter = ResultsTabPresenter(
self.results_tab_view, ResultsTabModel(fit_context, context.results_context))
context.update_view_from_model_notifier.add_subscriber(self.results_tab_presenter.update_view_from_model_observer)
fit_context.fit_removed_notifier.add_subscriber(self.results_tab_presenter.new_fit_performed_observer) | [
"def",
"__init__",
"(",
"self",
",",
"fit_context",
",",
"context",
",",
"parent",
")",
":",
"self",
".",
"results_tab_view",
"=",
"ResultsTabView",
"(",
"parent",
"=",
"parent",
")",
"self",
".",
"results_tab_presenter",
"=",
"ResultsTabPresenter",
"(",
"self",
".",
"results_tab_view",
",",
"ResultsTabModel",
"(",
"fit_context",
",",
"context",
".",
"results_context",
")",
")",
"context",
".",
"update_view_from_model_notifier",
".",
"add_subscriber",
"(",
"self",
".",
"results_tab_presenter",
".",
"update_view_from_model_observer",
")",
"fit_context",
".",
"fit_removed_notifier",
".",
"add_subscriber",
"(",
"self",
".",
"results_tab_presenter",
".",
"new_fit_performed_observer",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/results_tab_widget/results_tab_widget.py#L15-L26 |
||
cinder/Cinder | e83f5bb9c01a63eec20168d02953a0879e5100f7 | docs/libs/markdown/extensions/abbr.py | python | AbbrPreprocessor._generate_pattern | (self, text) | return r'(?P<abbr>\b%s\b)' % (r''.join(chars)) | Given a string, returns an regex pattern to match that string.
'HTML' -> r'(?P<abbr>[H][T][M][L])'
Note: we force each char as a literal match (in brackets) as we don't
know what they will be beforehand. | Given a string, returns an regex pattern to match that string. | [
"Given",
"a",
"string",
"returns",
"an",
"regex",
"pattern",
"to",
"match",
"that",
"string",
"."
] | def _generate_pattern(self, text):
'''
Given a string, returns an regex pattern to match that string.
'HTML' -> r'(?P<abbr>[H][T][M][L])'
Note: we force each char as a literal match (in brackets) as we don't
know what they will be beforehand.
'''
chars = list(text)
for i in range(len(chars)):
chars[i] = r'[%s]' % chars[i]
return r'(?P<abbr>\b%s\b)' % (r''.join(chars)) | [
"def",
"_generate_pattern",
"(",
"self",
",",
"text",
")",
":",
"chars",
"=",
"list",
"(",
"text",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"chars",
")",
")",
":",
"chars",
"[",
"i",
"]",
"=",
"r'[%s]'",
"%",
"chars",
"[",
"i",
"]",
"return",
"r'(?P<abbr>\\b%s\\b)'",
"%",
"(",
"r''",
".",
"join",
"(",
"chars",
")",
")"
] | https://github.com/cinder/Cinder/blob/e83f5bb9c01a63eec20168d02953a0879e5100f7/docs/libs/markdown/extensions/abbr.py#L60-L73 |
|
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TCs.GetCsFromBf | (*args) | return _snap.TCs_GetCsFromBf(*args) | GetCsFromBf(char * Bf, int const & BfL) -> TCs
Parameters:
Bf: char *
BfL: int const & | GetCsFromBf(char * Bf, int const & BfL) -> TCs | [
"GetCsFromBf",
"(",
"char",
"*",
"Bf",
"int",
"const",
"&",
"BfL",
")",
"-",
">",
"TCs"
] | def GetCsFromBf(*args):
"""
GetCsFromBf(char * Bf, int const & BfL) -> TCs
Parameters:
Bf: char *
BfL: int const &
"""
return _snap.TCs_GetCsFromBf(*args) | [
"def",
"GetCsFromBf",
"(",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TCs_GetCsFromBf",
"(",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L1704-L1713 |
|
weolar/miniblink49 | 1c4678db0594a4abde23d3ebbcc7cd13c3170777 | third_party/jinja2/lexer.py | python | Lexer.tokenize | (self, source, name=None, filename=None, state=None) | return TokenStream(self.wrap(stream, name, filename), name, filename) | Calls tokeniter + tokenize and wraps it in a token stream. | Calls tokeniter + tokenize and wraps it in a token stream. | [
"Calls",
"tokeniter",
"+",
"tokenize",
"and",
"wraps",
"it",
"in",
"a",
"token",
"stream",
"."
] | def tokenize(self, source, name=None, filename=None, state=None):
"""Calls tokeniter + tokenize and wraps it in a token stream.
"""
stream = self.tokeniter(source, name, filename, state)
return TokenStream(self.wrap(stream, name, filename), name, filename) | [
"def",
"tokenize",
"(",
"self",
",",
"source",
",",
"name",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"state",
"=",
"None",
")",
":",
"stream",
"=",
"self",
".",
"tokeniter",
"(",
"source",
",",
"name",
",",
"filename",
",",
"state",
")",
"return",
"TokenStream",
"(",
"self",
".",
"wrap",
"(",
"stream",
",",
"name",
",",
"filename",
")",
",",
"name",
",",
"filename",
")"
] | https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/jinja2/lexer.py#L542-L546 |
|
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/nn/probability/distribution/log_normal.py | python | LogNormal._cdf | (self, value, loc=None, scale=None) | return self.select(self.greater(value, 0.), cdf, zeros) | r"""
Compute the cdf via the below formula,
where g is the exp bijector,
and P is the cdf of the underlying normal dist
.. math::
Y = g(X)
P(Y <= a) = P(X <= g^{-1}(a)) | r"""
Compute the cdf via the below formula,
where g is the exp bijector,
and P is the cdf of the underlying normal dist
.. math::
Y = g(X)
P(Y <= a) = P(X <= g^{-1}(a)) | [
"r",
"Compute",
"the",
"cdf",
"via",
"the",
"below",
"formula",
"where",
"g",
"is",
"the",
"exp",
"bijector",
"and",
"P",
"is",
"the",
"cdf",
"of",
"the",
"underlying",
"normal",
"dist",
"..",
"math",
"::",
"Y",
"=",
"g",
"(",
"X",
")",
"P",
"(",
"Y",
"<",
"=",
"a",
")",
"=",
"P",
"(",
"X",
"<",
"=",
"g^",
"{",
"-",
"1",
"}",
"(",
"a",
"))"
] | def _cdf(self, value, loc=None, scale=None):
r"""
Compute the cdf via the below formula,
where g is the exp bijector,
and P is the cdf of the underlying normal dist
.. math::
Y = g(X)
P(Y <= a) = P(X <= g^{-1}(a))
"""
mean, sd = self._check_param_type(loc, scale)
inverse_value = self.bijector("inverse", value)
cdf = self.distribution("cdf", inverse_value, mean, sd)
# to increase numerical stability, set cdf = 0 when value <= 0
zeros = self.fill(self.dtypeop(cdf), self.shape(cdf), 0.0)
return self.select(self.greater(value, 0.), cdf, zeros) | [
"def",
"_cdf",
"(",
"self",
",",
"value",
",",
"loc",
"=",
"None",
",",
"scale",
"=",
"None",
")",
":",
"mean",
",",
"sd",
"=",
"self",
".",
"_check_param_type",
"(",
"loc",
",",
"scale",
")",
"inverse_value",
"=",
"self",
".",
"bijector",
"(",
"\"inverse\"",
",",
"value",
")",
"cdf",
"=",
"self",
".",
"distribution",
"(",
"\"cdf\"",
",",
"inverse_value",
",",
"mean",
",",
"sd",
")",
"# to increase numerical stability, set cdf = 0 when value <= 0",
"zeros",
"=",
"self",
".",
"fill",
"(",
"self",
".",
"dtypeop",
"(",
"cdf",
")",
",",
"self",
".",
"shape",
"(",
"cdf",
")",
",",
"0.0",
")",
"return",
"self",
".",
"select",
"(",
"self",
".",
"greater",
"(",
"value",
",",
"0.",
")",
",",
"cdf",
",",
"zeros",
")"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/probability/distribution/log_normal.py#L204-L220 |
|
intel/llvm | e6d0547e9d99b5a56430c4749f6c7e328bf221ab | clang/tools/scan-build-py/lib/libscanbuild/__init__.py | python | command_entry_point | (function) | return wrapper | Decorator for command entry methods.
The decorator initialize/shutdown logging and guard on programming
errors (catch exceptions).
The decorated method can have arbitrary parameters, the return value will
be the exit code of the process. | Decorator for command entry methods. | [
"Decorator",
"for",
"command",
"entry",
"methods",
"."
] | def command_entry_point(function):
""" Decorator for command entry methods.
The decorator initialize/shutdown logging and guard on programming
errors (catch exceptions).
The decorated method can have arbitrary parameters, the return value will
be the exit code of the process. """
@functools.wraps(function)
def wrapper(*args, **kwargs):
""" Do housekeeping tasks and execute the wrapped method. """
try:
logging.basicConfig(format='%(name)s: %(message)s',
level=logging.WARNING,
stream=sys.stdout)
# This hack to get the executable name as %(name).
logging.getLogger().name = os.path.basename(sys.argv[0])
return function(*args, **kwargs)
except KeyboardInterrupt:
logging.warning('Keyboard interrupt')
return 130 # Signal received exit code for bash.
except Exception:
logging.exception('Internal error.')
if logging.getLogger().isEnabledFor(logging.DEBUG):
logging.error("Please report this bug and attach the output "
"to the bug report")
else:
logging.error("Please run this command again and turn on "
"verbose mode (add '-vvvv' as argument).")
return 64 # Some non used exit code for internal errors.
finally:
logging.shutdown()
return wrapper | [
"def",
"command_entry_point",
"(",
"function",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"function",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\" Do housekeeping tasks and execute the wrapped method. \"\"\"",
"try",
":",
"logging",
".",
"basicConfig",
"(",
"format",
"=",
"'%(name)s: %(message)s'",
",",
"level",
"=",
"logging",
".",
"WARNING",
",",
"stream",
"=",
"sys",
".",
"stdout",
")",
"# This hack to get the executable name as %(name).",
"logging",
".",
"getLogger",
"(",
")",
".",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
"return",
"function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"KeyboardInterrupt",
":",
"logging",
".",
"warning",
"(",
"'Keyboard interrupt'",
")",
"return",
"130",
"# Signal received exit code for bash.",
"except",
"Exception",
":",
"logging",
".",
"exception",
"(",
"'Internal error.'",
")",
"if",
"logging",
".",
"getLogger",
"(",
")",
".",
"isEnabledFor",
"(",
"logging",
".",
"DEBUG",
")",
":",
"logging",
".",
"error",
"(",
"\"Please report this bug and attach the output \"",
"\"to the bug report\"",
")",
"else",
":",
"logging",
".",
"error",
"(",
"\"Please run this command again and turn on \"",
"\"verbose mode (add '-vvvv' as argument).\"",
")",
"return",
"64",
"# Some non used exit code for internal errors.",
"finally",
":",
"logging",
".",
"shutdown",
"(",
")",
"return",
"wrapper"
] | https://github.com/intel/llvm/blob/e6d0547e9d99b5a56430c4749f6c7e328bf221ab/clang/tools/scan-build-py/lib/libscanbuild/__init__.py#L106-L141 |
|
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/pylib/gyp/xcodeproj_file.py | python | SourceTreeAndPathFromPath | (input_path) | return (source_tree, output_path) | Given input_path, returns a tuple with sourceTree and path values.
Examples:
input_path (source_tree, output_path)
'$(VAR)/path' ('VAR', 'path')
'$(VAR)' ('VAR', None)
'path' (None, 'path') | Given input_path, returns a tuple with sourceTree and path values. | [
"Given",
"input_path",
"returns",
"a",
"tuple",
"with",
"sourceTree",
"and",
"path",
"values",
"."
] | def SourceTreeAndPathFromPath(input_path):
"""Given input_path, returns a tuple with sourceTree and path values.
Examples:
input_path (source_tree, output_path)
'$(VAR)/path' ('VAR', 'path')
'$(VAR)' ('VAR', None)
'path' (None, 'path')
"""
source_group_match = _path_leading_variable.match(input_path)
if source_group_match:
source_tree = source_group_match.group(1)
output_path = source_group_match.group(3) # This may be None.
else:
source_tree = None
output_path = input_path
return (source_tree, output_path) | [
"def",
"SourceTreeAndPathFromPath",
"(",
"input_path",
")",
":",
"source_group_match",
"=",
"_path_leading_variable",
".",
"match",
"(",
"input_path",
")",
"if",
"source_group_match",
":",
"source_tree",
"=",
"source_group_match",
".",
"group",
"(",
"1",
")",
"output_path",
"=",
"source_group_match",
".",
"group",
"(",
"3",
")",
"# This may be None.",
"else",
":",
"source_tree",
"=",
"None",
"output_path",
"=",
"input_path",
"return",
"(",
"source_tree",
",",
"output_path",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/pylib/gyp/xcodeproj_file.py#L178-L196 |
|
mickem/nscp | 79f89fdbb6da63f91bc9dedb7aea202fe938f237 | scripts/python/lib/google/protobuf/internal/containers.py | python | RepeatedScalarFieldContainer.insert | (self, key, value) | Inserts the item at the specified position. Similar to list.insert(). | Inserts the item at the specified position. Similar to list.insert(). | [
"Inserts",
"the",
"item",
"at",
"the",
"specified",
"position",
".",
"Similar",
"to",
"list",
".",
"insert",
"()",
"."
] | def insert(self, key, value):
"""Inserts the item at the specified position. Similar to list.insert()."""
self._type_checker.CheckValue(value)
self._values.insert(key, value)
if not self._message_listener.dirty:
self._message_listener.Modified() | [
"def",
"insert",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"self",
".",
"_type_checker",
".",
"CheckValue",
"(",
"value",
")",
"self",
".",
"_values",
".",
"insert",
"(",
"key",
",",
"value",
")",
"if",
"not",
"self",
".",
"_message_listener",
".",
"dirty",
":",
"self",
".",
"_message_listener",
".",
"Modified",
"(",
")"
] | https://github.com/mickem/nscp/blob/79f89fdbb6da63f91bc9dedb7aea202fe938f237/scripts/python/lib/google/protobuf/internal/containers.py#L111-L116 |
||
timi-liuliang/echo | 40a5a24d430eee4118314459ab7e03afcb3b8719 | thirdparty/protobuf/python/google/protobuf/internal/cpp_message.py | python | RepeatedCompositeProperty | (cdescriptor, message_type) | return property(Getter, Setter, doc=doc) | Returns a Python property for the given repeated composite field. | Returns a Python property for the given repeated composite field. | [
"Returns",
"a",
"Python",
"property",
"for",
"the",
"given",
"repeated",
"composite",
"field",
"."
] | def RepeatedCompositeProperty(cdescriptor, message_type):
"""Returns a Python property for the given repeated composite field."""
def Getter(self):
container = self._composite_fields.get(cdescriptor.name, None)
if container is None:
container = RepeatedCompositeContainer(
self, cdescriptor, message_type._concrete_class)
self._composite_fields[cdescriptor.name] = container
return container
def Setter(self, new_value):
raise AttributeError('Assignment not allowed to repeated field '
'"%s" in protocol message object.' % cdescriptor.name)
doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name
return property(Getter, Setter, doc=doc) | [
"def",
"RepeatedCompositeProperty",
"(",
"cdescriptor",
",",
"message_type",
")",
":",
"def",
"Getter",
"(",
"self",
")",
":",
"container",
"=",
"self",
".",
"_composite_fields",
".",
"get",
"(",
"cdescriptor",
".",
"name",
",",
"None",
")",
"if",
"container",
"is",
"None",
":",
"container",
"=",
"RepeatedCompositeContainer",
"(",
"self",
",",
"cdescriptor",
",",
"message_type",
".",
"_concrete_class",
")",
"self",
".",
"_composite_fields",
"[",
"cdescriptor",
".",
"name",
"]",
"=",
"container",
"return",
"container",
"def",
"Setter",
"(",
"self",
",",
"new_value",
")",
":",
"raise",
"AttributeError",
"(",
"'Assignment not allowed to repeated field '",
"'\"%s\" in protocol message object.'",
"%",
"cdescriptor",
".",
"name",
")",
"doc",
"=",
"'Magic attribute generated for \"%s\" proto field.'",
"%",
"cdescriptor",
".",
"name",
"return",
"property",
"(",
"Getter",
",",
"Setter",
",",
"doc",
"=",
"doc",
")"
] | https://github.com/timi-liuliang/echo/blob/40a5a24d430eee4118314459ab7e03afcb3b8719/thirdparty/protobuf/python/google/protobuf/internal/cpp_message.py#L274-L290 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/wizard.py | python | PyWizardPage.Create | (*args, **kwargs) | return _wizard.PyWizardPage_Create(*args, **kwargs) | Create(self, Wizard parent, Bitmap bitmap=wxNullBitmap) -> bool | Create(self, Wizard parent, Bitmap bitmap=wxNullBitmap) -> bool | [
"Create",
"(",
"self",
"Wizard",
"parent",
"Bitmap",
"bitmap",
"=",
"wxNullBitmap",
")",
"-",
">",
"bool"
] | def Create(*args, **kwargs):
"""Create(self, Wizard parent, Bitmap bitmap=wxNullBitmap) -> bool"""
return _wizard.PyWizardPage_Create(*args, **kwargs) | [
"def",
"Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_wizard",
".",
"PyWizardPage_Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/wizard.py#L143-L145 |
|
tfwu/FaceDetection-ConvNet-3D | f9251c48eb40c5aec8fba7455115c355466555be | example/multi-task/data.py | python | mnist_iterator | (batch_size, input_shape) | return (train_dataiter, val_dataiter) | return train and val iterators for mnist | return train and val iterators for mnist | [
"return",
"train",
"and",
"val",
"iterators",
"for",
"mnist"
] | def mnist_iterator(batch_size, input_shape):
"""return train and val iterators for mnist"""
# download data
get_data.GetMNIST_ubyte()
flat = False if len(input_shape) == 3 else True
train_dataiter = mx.io.MNISTIter(
image="data/train-images-idx3-ubyte",
label="data/train-labels-idx1-ubyte",
input_shape=input_shape,
batch_size=batch_size,
shuffle=True,
flat=flat)
val_dataiter = mx.io.MNISTIter(
image="data/t10k-images-idx3-ubyte",
label="data/t10k-labels-idx1-ubyte",
input_shape=input_shape,
batch_size=batch_size,
flat=flat)
return (train_dataiter, val_dataiter) | [
"def",
"mnist_iterator",
"(",
"batch_size",
",",
"input_shape",
")",
":",
"# download data",
"get_data",
".",
"GetMNIST_ubyte",
"(",
")",
"flat",
"=",
"False",
"if",
"len",
"(",
"input_shape",
")",
"==",
"3",
"else",
"True",
"train_dataiter",
"=",
"mx",
".",
"io",
".",
"MNISTIter",
"(",
"image",
"=",
"\"data/train-images-idx3-ubyte\"",
",",
"label",
"=",
"\"data/train-labels-idx1-ubyte\"",
",",
"input_shape",
"=",
"input_shape",
",",
"batch_size",
"=",
"batch_size",
",",
"shuffle",
"=",
"True",
",",
"flat",
"=",
"flat",
")",
"val_dataiter",
"=",
"mx",
".",
"io",
".",
"MNISTIter",
"(",
"image",
"=",
"\"data/t10k-images-idx3-ubyte\"",
",",
"label",
"=",
"\"data/t10k-labels-idx1-ubyte\"",
",",
"input_shape",
"=",
"input_shape",
",",
"batch_size",
"=",
"batch_size",
",",
"flat",
"=",
"flat",
")",
"return",
"(",
"train_dataiter",
",",
"val_dataiter",
")"
] | https://github.com/tfwu/FaceDetection-ConvNet-3D/blob/f9251c48eb40c5aec8fba7455115c355466555be/example/multi-task/data.py#L11-L32 |
|
openvinotoolkit/openvino | dedcbeafa8b84cccdc55ca64b8da516682b381c7 | docs/nbdoc/utils.py | python | verify_notebook_name | (notebook_name: str) | return notebook_name[:3].isdigit() and notebook_name[-4:] == ".rst" | Verification based on notebook name
:param notebook_name: Notebook name by default keeps convention:
[3 digit]-name-with-dashes-with-output.rst,
example: 001-hello-world-with-output.rst
:type notebook_name: str
:returns: Return if notebook meets requirements
:rtype: bool | Verification based on notebook name | [
"Verification",
"based",
"on",
"notebook",
"name"
] | def verify_notebook_name(notebook_name: str) -> bool:
"""Verification based on notebook name
:param notebook_name: Notebook name by default keeps convention:
[3 digit]-name-with-dashes-with-output.rst,
example: 001-hello-world-with-output.rst
:type notebook_name: str
:returns: Return if notebook meets requirements
:rtype: bool
"""
return notebook_name[:3].isdigit() and notebook_name[-4:] == ".rst" | [
"def",
"verify_notebook_name",
"(",
"notebook_name",
":",
"str",
")",
"->",
"bool",
":",
"return",
"notebook_name",
"[",
":",
"3",
"]",
".",
"isdigit",
"(",
")",
"and",
"notebook_name",
"[",
"-",
"4",
":",
"]",
"==",
"\".rst\""
] | https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/docs/nbdoc/utils.py#L67-L78 |
|
mozilla/DeepSpeech | aa1d28530d531d0d92289bf5f11a49fe516fdc86 | native_client/ctcdecode/__init__.py | python | UTF8Alphabet.Encode | (self, input) | return [el for el in res] | Encode a sequence of character/output classes into a sequence of labels.
Characters are assumed to always take a single Unicode codepoint.
Characters must be in the alphabet, this method will assert that. Use
`CanEncode` and `CanEncodeSingle` to test. | Encode a sequence of character/output classes into a sequence of labels.
Characters are assumed to always take a single Unicode codepoint.
Characters must be in the alphabet, this method will assert that. Use
`CanEncode` and `CanEncodeSingle` to test. | [
"Encode",
"a",
"sequence",
"of",
"character",
"/",
"output",
"classes",
"into",
"a",
"sequence",
"of",
"labels",
".",
"Characters",
"are",
"assumed",
"to",
"always",
"take",
"a",
"single",
"Unicode",
"codepoint",
".",
"Characters",
"must",
"be",
"in",
"the",
"alphabet",
"this",
"method",
"will",
"assert",
"that",
".",
"Use",
"CanEncode",
"and",
"CanEncodeSingle",
"to",
"test",
"."
] | def Encode(self, input):
'''
Encode a sequence of character/output classes into a sequence of labels.
Characters are assumed to always take a single Unicode codepoint.
Characters must be in the alphabet, this method will assert that. Use
`CanEncode` and `CanEncodeSingle` to test.
'''
# Convert SWIG's UnsignedIntVec to a Python list
res = super(UTF8Alphabet, self).Encode(input.encode('utf-8'))
return [el for el in res] | [
"def",
"Encode",
"(",
"self",
",",
"input",
")",
":",
"# Convert SWIG's UnsignedIntVec to a Python list",
"res",
"=",
"super",
"(",
"UTF8Alphabet",
",",
"self",
")",
".",
"Encode",
"(",
"input",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"return",
"[",
"el",
"for",
"el",
"in",
"res",
"]"
] | https://github.com/mozilla/DeepSpeech/blob/aa1d28530d531d0d92289bf5f11a49fe516fdc86/native_client/ctcdecode/__init__.py#L120-L129 |
|
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TStr.CmpI | (self, *args) | return _snap.TStr_CmpI(self, *args) | CmpI(TStr self, TStr Str) -> int
Parameters:
Str: TStr const & | CmpI(TStr self, TStr Str) -> int | [
"CmpI",
"(",
"TStr",
"self",
"TStr",
"Str",
")",
"-",
">",
"int"
] | def CmpI(self, *args):
"""
CmpI(TStr self, TStr Str) -> int
Parameters:
Str: TStr const &
"""
return _snap.TStr_CmpI(self, *args) | [
"def",
"CmpI",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TStr_CmpI",
"(",
"self",
",",
"*",
"args",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L9733-L9741 |
|
openvinotoolkit/openvino | dedcbeafa8b84cccdc55ca64b8da516682b381c7 | tools/mo/openvino/tools/mo/ops/assert_op.py | python | Assert.assert_control_flow_infer | (node: Node, is_executable: bool, mark_executability: callable) | Infers control flow through assert operation node. It marks output data nodes executability according to
executability of current node and assert data value
:param node: Node instance to infer control flow through
:param is_executable: if current node is executable
:param mark_executability: function to mark executability of node | Infers control flow through assert operation node. It marks output data nodes executability according to
executability of current node and assert data value
:param node: Node instance to infer control flow through
:param is_executable: if current node is executable
:param mark_executability: function to mark executability of node | [
"Infers",
"control",
"flow",
"through",
"assert",
"operation",
"node",
".",
"It",
"marks",
"output",
"data",
"nodes",
"executability",
"according",
"to",
"executability",
"of",
"current",
"node",
"and",
"assert",
"data",
"value",
":",
"param",
"node",
":",
"Node",
"instance",
"to",
"infer",
"control",
"flow",
"through",
":",
"param",
"is_executable",
":",
"if",
"current",
"node",
"is",
"executable",
":",
"param",
"mark_executability",
":",
"function",
"to",
"mark",
"executability",
"of",
"node"
] | def assert_control_flow_infer(node: Node, is_executable: bool, mark_executability: callable):
"""
Infers control flow through assert operation node. It marks output data nodes executability according to
executability of current node and assert data value
:param node: Node instance to infer control flow through
:param is_executable: if current node is executable
:param mark_executability: function to mark executability of node
"""
graph = node.graph
assert_value = node.out_node().value
for n in [v for _, v in graph.out_edges(node.id)]:
mark_executability(n, assert_value and is_executable) | [
"def",
"assert_control_flow_infer",
"(",
"node",
":",
"Node",
",",
"is_executable",
":",
"bool",
",",
"mark_executability",
":",
"callable",
")",
":",
"graph",
"=",
"node",
".",
"graph",
"assert_value",
"=",
"node",
".",
"out_node",
"(",
")",
".",
"value",
"for",
"n",
"in",
"[",
"v",
"for",
"_",
",",
"v",
"in",
"graph",
".",
"out_edges",
"(",
"node",
".",
"id",
")",
"]",
":",
"mark_executability",
"(",
"n",
",",
"assert_value",
"and",
"is_executable",
")"
] | https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/ops/assert_op.py#L26-L37 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/math_ops.py | python | matmul | (a,
b,
transpose_a=False,
transpose_b=False,
adjoint_a=False,
adjoint_b=False,
a_is_sparse=False,
b_is_sparse=False,
output_type=None,
name=None) | Multiplies matrix `a` by matrix `b`, producing `a` * `b`.
The inputs must, following any transpositions, be tensors of rank >= 2
where the inner 2 dimensions specify valid matrix multiplication dimensions,
and any further outer dimensions specify matching batch size.
Both matrices must be of the same type. The supported types are:
`bfloat16`, `float16`, `float32`, `float64`, `int32`, `int64`,
`complex64`, `complex128`.
Either matrix can be transposed or adjointed (conjugated and transposed) on
the fly by setting one of the corresponding flag to `True`. These are `False`
by default.
If one or both of the matrices contain a lot of zeros, a more efficient
multiplication algorithm can be used by setting the corresponding
`a_is_sparse` or `b_is_sparse` flag to `True`. These are `False` by default.
This optimization is only available for plain matrices (rank-2 tensors) with
datatypes `bfloat16` or `float32`.
A simple 2-D tensor matrix multiplication:
>>> a = tf.constant([1, 2, 3, 4, 5, 6], shape=[2, 3])
>>> a # 2-D tensor
<tf.Tensor: shape=(2, 3), dtype=int32, numpy=
array([[1, 2, 3],
[4, 5, 6]], dtype=int32)>
>>> b = tf.constant([7, 8, 9, 10, 11, 12], shape=[3, 2])
>>> b # 2-D tensor
<tf.Tensor: shape=(3, 2), dtype=int32, numpy=
array([[ 7, 8],
[ 9, 10],
[11, 12]], dtype=int32)>
>>> c = tf.matmul(a, b)
>>> c # `a` * `b`
<tf.Tensor: shape=(2, 2), dtype=int32, numpy=
array([[ 58, 64],
[139, 154]], dtype=int32)>
A batch matrix multiplication with batch shape [2]:
>>> a = tf.constant(np.arange(1, 13, dtype=np.int32), shape=[2, 2, 3])
>>> a # 3-D tensor
<tf.Tensor: shape=(2, 2, 3), dtype=int32, numpy=
array([[[ 1, 2, 3],
[ 4, 5, 6]],
[[ 7, 8, 9],
[10, 11, 12]]], dtype=int32)>
>>> b = tf.constant(np.arange(13, 25, dtype=np.int32), shape=[2, 3, 2])
>>> b # 3-D tensor
<tf.Tensor: shape=(2, 3, 2), dtype=int32, numpy=
array([[[13, 14],
[15, 16],
[17, 18]],
[[19, 20],
[21, 22],
[23, 24]]], dtype=int32)>
>>> c = tf.matmul(a, b)
>>> c # `a` * `b`
<tf.Tensor: shape=(2, 2, 2), dtype=int32, numpy=
array([[[ 94, 100],
[229, 244]],
[[508, 532],
[697, 730]]], dtype=int32)>
Since python >= 3.5 the @ operator is supported
(see [PEP 465](https://www.python.org/dev/peps/pep-0465/)). In TensorFlow,
it simply calls the `tf.matmul()` function, so the following lines are
equivalent:
>>> d = a @ b @ [[10], [11]]
>>> d = tf.matmul(tf.matmul(a, b), [[10], [11]])
Args:
a: `tf.Tensor` of type `float16`, `float32`, `float64`, `int32`,
`complex64`, `complex128` and rank > 1.
b: `tf.Tensor` with same type and rank as `a`.
transpose_a: If `True`, `a` is transposed before multiplication.
transpose_b: If `True`, `b` is transposed before multiplication.
adjoint_a: If `True`, `a` is conjugated and transposed before
multiplication.
adjoint_b: If `True`, `b` is conjugated and transposed before
multiplication.
a_is_sparse: If `True`, `a` is treated as a sparse matrix. Notice, this
**does not support `tf.sparse.SparseTensor`**, it just makes optimizations
that assume most values in `a` are zero.
See `tf.sparse.sparse_dense_matmul`
for some support for `tf.sparse.SparseTensor` multiplication.
b_is_sparse: If `True`, `b` is treated as a sparse matrix. Notice, this
**does not support `tf.sparse.SparseTensor`**, it just makes optimizations
that assume most values in `a` are zero.
See `tf.sparse.sparse_dense_matmul`
for some support for `tf.sparse.SparseTensor` multiplication.
output_type: The output datatype if needed. Defaults to None in which case
the output_type is the same as input type. Currently only works when input
tensors are type (u)int8 and output_type can be int32.
name: Name for the operation (optional).
Returns:
A `tf.Tensor` of the same type as `a` and `b` where each inner-most matrix
is the product of the corresponding matrices in `a` and `b`, e.g. if all
transpose or adjoint attributes are `False`:
`output[..., i, j] = sum_k (a[..., i, k] * b[..., k, j])`,
for all indices `i`, `j`.
Note: This is matrix product, not element-wise product.
Raises:
ValueError: If `transpose_a` and `adjoint_a`, or `transpose_b` and
`adjoint_b` are both set to `True`.
TypeError: If output_type is specified but the types of `a`, `b` and
`output_type` is not (u)int8, (u)int8 and int32. | Multiplies matrix `a` by matrix `b`, producing `a` * `b`. | [
"Multiplies",
"matrix",
"a",
"by",
"matrix",
"b",
"producing",
"a",
"*",
"b",
"."
] | def matmul(a,
b,
transpose_a=False,
transpose_b=False,
adjoint_a=False,
adjoint_b=False,
a_is_sparse=False,
b_is_sparse=False,
output_type=None,
name=None):
"""Multiplies matrix `a` by matrix `b`, producing `a` * `b`.
The inputs must, following any transpositions, be tensors of rank >= 2
where the inner 2 dimensions specify valid matrix multiplication dimensions,
and any further outer dimensions specify matching batch size.
Both matrices must be of the same type. The supported types are:
`bfloat16`, `float16`, `float32`, `float64`, `int32`, `int64`,
`complex64`, `complex128`.
Either matrix can be transposed or adjointed (conjugated and transposed) on
the fly by setting one of the corresponding flag to `True`. These are `False`
by default.
If one or both of the matrices contain a lot of zeros, a more efficient
multiplication algorithm can be used by setting the corresponding
`a_is_sparse` or `b_is_sparse` flag to `True`. These are `False` by default.
This optimization is only available for plain matrices (rank-2 tensors) with
datatypes `bfloat16` or `float32`.
A simple 2-D tensor matrix multiplication:
>>> a = tf.constant([1, 2, 3, 4, 5, 6], shape=[2, 3])
>>> a # 2-D tensor
<tf.Tensor: shape=(2, 3), dtype=int32, numpy=
array([[1, 2, 3],
[4, 5, 6]], dtype=int32)>
>>> b = tf.constant([7, 8, 9, 10, 11, 12], shape=[3, 2])
>>> b # 2-D tensor
<tf.Tensor: shape=(3, 2), dtype=int32, numpy=
array([[ 7, 8],
[ 9, 10],
[11, 12]], dtype=int32)>
>>> c = tf.matmul(a, b)
>>> c # `a` * `b`
<tf.Tensor: shape=(2, 2), dtype=int32, numpy=
array([[ 58, 64],
[139, 154]], dtype=int32)>
A batch matrix multiplication with batch shape [2]:
>>> a = tf.constant(np.arange(1, 13, dtype=np.int32), shape=[2, 2, 3])
>>> a # 3-D tensor
<tf.Tensor: shape=(2, 2, 3), dtype=int32, numpy=
array([[[ 1, 2, 3],
[ 4, 5, 6]],
[[ 7, 8, 9],
[10, 11, 12]]], dtype=int32)>
>>> b = tf.constant(np.arange(13, 25, dtype=np.int32), shape=[2, 3, 2])
>>> b # 3-D tensor
<tf.Tensor: shape=(2, 3, 2), dtype=int32, numpy=
array([[[13, 14],
[15, 16],
[17, 18]],
[[19, 20],
[21, 22],
[23, 24]]], dtype=int32)>
>>> c = tf.matmul(a, b)
>>> c # `a` * `b`
<tf.Tensor: shape=(2, 2, 2), dtype=int32, numpy=
array([[[ 94, 100],
[229, 244]],
[[508, 532],
[697, 730]]], dtype=int32)>
Since python >= 3.5 the @ operator is supported
(see [PEP 465](https://www.python.org/dev/peps/pep-0465/)). In TensorFlow,
it simply calls the `tf.matmul()` function, so the following lines are
equivalent:
>>> d = a @ b @ [[10], [11]]
>>> d = tf.matmul(tf.matmul(a, b), [[10], [11]])
Args:
a: `tf.Tensor` of type `float16`, `float32`, `float64`, `int32`,
`complex64`, `complex128` and rank > 1.
b: `tf.Tensor` with same type and rank as `a`.
transpose_a: If `True`, `a` is transposed before multiplication.
transpose_b: If `True`, `b` is transposed before multiplication.
adjoint_a: If `True`, `a` is conjugated and transposed before
multiplication.
adjoint_b: If `True`, `b` is conjugated and transposed before
multiplication.
a_is_sparse: If `True`, `a` is treated as a sparse matrix. Notice, this
**does not support `tf.sparse.SparseTensor`**, it just makes optimizations
that assume most values in `a` are zero.
See `tf.sparse.sparse_dense_matmul`
for some support for `tf.sparse.SparseTensor` multiplication.
b_is_sparse: If `True`, `b` is treated as a sparse matrix. Notice, this
**does not support `tf.sparse.SparseTensor`**, it just makes optimizations
that assume most values in `a` are zero.
See `tf.sparse.sparse_dense_matmul`
for some support for `tf.sparse.SparseTensor` multiplication.
output_type: The output datatype if needed. Defaults to None in which case
the output_type is the same as input type. Currently only works when input
tensors are type (u)int8 and output_type can be int32.
name: Name for the operation (optional).
Returns:
A `tf.Tensor` of the same type as `a` and `b` where each inner-most matrix
is the product of the corresponding matrices in `a` and `b`, e.g. if all
transpose or adjoint attributes are `False`:
`output[..., i, j] = sum_k (a[..., i, k] * b[..., k, j])`,
for all indices `i`, `j`.
Note: This is matrix product, not element-wise product.
Raises:
ValueError: If `transpose_a` and `adjoint_a`, or `transpose_b` and
`adjoint_b` are both set to `True`.
TypeError: If output_type is specified but the types of `a`, `b` and
`output_type` is not (u)int8, (u)int8 and int32.
"""
with ops.name_scope(name, "MatMul", [a, b]) as name:
if transpose_a and adjoint_a:
raise ValueError(
f"Only one of `transpose_a` and `adjoint_a` can be True. "
f"Received `transpose_a`={transpose_a}, "
f"`adjoint_a`={adjoint_a}.")
if transpose_b and adjoint_b:
raise ValueError(
f"Only one of `transpose_b` and `adjoint_b` can be True. "
f"Received `transpose_b`={transpose_b}, "
f"`adjoint_b`={adjoint_b}.")
if context.executing_eagerly():
if not isinstance(a, (ops.EagerTensor, _resource_variable_type)):
a = ops.convert_to_tensor(a, name="a")
if not isinstance(b, (ops.EagerTensor, _resource_variable_type)):
b = ops.convert_to_tensor(b, dtype_hint=a.dtype.base_dtype, name="b")
else:
a = ops.convert_to_tensor(a, name="a")
b = ops.convert_to_tensor(b, dtype_hint=a.dtype.base_dtype, name="b")
# TODO(apassos) remove _shape_tuple here when it is not needed.
a_shape = a._shape_tuple() # pylint: disable=protected-access
b_shape = b._shape_tuple() # pylint: disable=protected-access
output_may_have_non_empty_batch_shape = (
(a_shape is None or len(a_shape) > 2) or
(b_shape is None or len(b_shape) > 2))
# TODO(b/178749687): remove this boolean and all related branches once the
# bridges are ready.
# batch_matmul_v3 is for when input type is different from output type.
use_batch_matmul_v3 = False
if output_type and (output_type != a.dtype or output_type != b.dtype):
use_batch_matmul_v3 = True
if (not a_is_sparse and
not b_is_sparse) and output_may_have_non_empty_batch_shape:
# BatchMatmul does not support transpose, so we conjugate the matrix and
# use adjoint instead. Conj() is a noop for real matrices.
if transpose_a:
a = conj(a)
adjoint_a = True
if transpose_b:
b = conj(b)
adjoint_b = True
if use_batch_matmul_v3:
return gen_math_ops.batch_mat_mul_v3(
a, b, adj_x=adjoint_a, adj_y=adjoint_b, Tout=output_type, name=name)
else:
return gen_math_ops.batch_mat_mul_v2(
a, b, adj_x=adjoint_a, adj_y=adjoint_b, name=name)
# Neither matmul nor sparse_matmul support adjoint, so we conjugate
# the matrix and use transpose instead. Conj() is a noop for real
# matrices.
if adjoint_a:
a = conj(a)
transpose_a = True
if adjoint_b:
b = conj(b)
transpose_b = True
use_sparse_matmul = False
if a_is_sparse or b_is_sparse:
sparse_matmul_types = [dtypes.bfloat16, dtypes.float32]
use_sparse_matmul = (
a.dtype in sparse_matmul_types and b.dtype in sparse_matmul_types)
if (((a.dtype == dtypes.bfloat16 and
b.dtype not in (dtypes.int8, dtypes.uint8)) or
(b.dtype == dtypes.bfloat16 and
a.dtype not in (dtypes.int8, dtypes.uint8))) and a.dtype != b.dtype):
# matmul currently doesn't handle mixed-precision inputs other than
# fp16 * int8 which is supported in BatchMatMulV3.
use_sparse_matmul = True
if use_sparse_matmul:
ret = sparse_matmul(
a,
b,
transpose_a=transpose_a,
transpose_b=transpose_b,
a_is_sparse=a_is_sparse,
b_is_sparse=b_is_sparse,
name=name)
# sparse_matmul always returns float32, even with
# bfloat16 inputs. This prevents us from configuring bfloat16 training.
# casting to bfloat16 also matches non-sparse matmul behavior better.
if a.dtype == dtypes.bfloat16 and b.dtype == dtypes.bfloat16:
ret = cast(ret, dtypes.bfloat16)
return ret
else:
if use_batch_matmul_v3:
adjoint_a = adjoint_a or transpose_a
adjoint_b = adjoint_b or transpose_b
return gen_math_ops.batch_mat_mul_v3(
a, b, adj_x=adjoint_a, adj_y=adjoint_b, Tout=output_type, name=name)
else:
return gen_math_ops.mat_mul(
a, b, transpose_a=transpose_a, transpose_b=transpose_b, name=name) | [
"def",
"matmul",
"(",
"a",
",",
"b",
",",
"transpose_a",
"=",
"False",
",",
"transpose_b",
"=",
"False",
",",
"adjoint_a",
"=",
"False",
",",
"adjoint_b",
"=",
"False",
",",
"a_is_sparse",
"=",
"False",
",",
"b_is_sparse",
"=",
"False",
",",
"output_type",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"MatMul\"",
",",
"[",
"a",
",",
"b",
"]",
")",
"as",
"name",
":",
"if",
"transpose_a",
"and",
"adjoint_a",
":",
"raise",
"ValueError",
"(",
"f\"Only one of `transpose_a` and `adjoint_a` can be True. \"",
"f\"Received `transpose_a`={transpose_a}, \"",
"f\"`adjoint_a`={adjoint_a}.\"",
")",
"if",
"transpose_b",
"and",
"adjoint_b",
":",
"raise",
"ValueError",
"(",
"f\"Only one of `transpose_b` and `adjoint_b` can be True. \"",
"f\"Received `transpose_b`={transpose_b}, \"",
"f\"`adjoint_b`={adjoint_b}.\"",
")",
"if",
"context",
".",
"executing_eagerly",
"(",
")",
":",
"if",
"not",
"isinstance",
"(",
"a",
",",
"(",
"ops",
".",
"EagerTensor",
",",
"_resource_variable_type",
")",
")",
":",
"a",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"a",
",",
"name",
"=",
"\"a\"",
")",
"if",
"not",
"isinstance",
"(",
"b",
",",
"(",
"ops",
".",
"EagerTensor",
",",
"_resource_variable_type",
")",
")",
":",
"b",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"b",
",",
"dtype_hint",
"=",
"a",
".",
"dtype",
".",
"base_dtype",
",",
"name",
"=",
"\"b\"",
")",
"else",
":",
"a",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"a",
",",
"name",
"=",
"\"a\"",
")",
"b",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"b",
",",
"dtype_hint",
"=",
"a",
".",
"dtype",
".",
"base_dtype",
",",
"name",
"=",
"\"b\"",
")",
"# TODO(apassos) remove _shape_tuple here when it is not needed.",
"a_shape",
"=",
"a",
".",
"_shape_tuple",
"(",
")",
"# pylint: disable=protected-access",
"b_shape",
"=",
"b",
".",
"_shape_tuple",
"(",
")",
"# pylint: disable=protected-access",
"output_may_have_non_empty_batch_shape",
"=",
"(",
"(",
"a_shape",
"is",
"None",
"or",
"len",
"(",
"a_shape",
")",
">",
"2",
")",
"or",
"(",
"b_shape",
"is",
"None",
"or",
"len",
"(",
"b_shape",
")",
">",
"2",
")",
")",
"# TODO(b/178749687): remove this boolean and all related branches once the",
"# bridges are ready.",
"# batch_matmul_v3 is for when input type is different from output type.",
"use_batch_matmul_v3",
"=",
"False",
"if",
"output_type",
"and",
"(",
"output_type",
"!=",
"a",
".",
"dtype",
"or",
"output_type",
"!=",
"b",
".",
"dtype",
")",
":",
"use_batch_matmul_v3",
"=",
"True",
"if",
"(",
"not",
"a_is_sparse",
"and",
"not",
"b_is_sparse",
")",
"and",
"output_may_have_non_empty_batch_shape",
":",
"# BatchMatmul does not support transpose, so we conjugate the matrix and",
"# use adjoint instead. Conj() is a noop for real matrices.",
"if",
"transpose_a",
":",
"a",
"=",
"conj",
"(",
"a",
")",
"adjoint_a",
"=",
"True",
"if",
"transpose_b",
":",
"b",
"=",
"conj",
"(",
"b",
")",
"adjoint_b",
"=",
"True",
"if",
"use_batch_matmul_v3",
":",
"return",
"gen_math_ops",
".",
"batch_mat_mul_v3",
"(",
"a",
",",
"b",
",",
"adj_x",
"=",
"adjoint_a",
",",
"adj_y",
"=",
"adjoint_b",
",",
"Tout",
"=",
"output_type",
",",
"name",
"=",
"name",
")",
"else",
":",
"return",
"gen_math_ops",
".",
"batch_mat_mul_v2",
"(",
"a",
",",
"b",
",",
"adj_x",
"=",
"adjoint_a",
",",
"adj_y",
"=",
"adjoint_b",
",",
"name",
"=",
"name",
")",
"# Neither matmul nor sparse_matmul support adjoint, so we conjugate",
"# the matrix and use transpose instead. Conj() is a noop for real",
"# matrices.",
"if",
"adjoint_a",
":",
"a",
"=",
"conj",
"(",
"a",
")",
"transpose_a",
"=",
"True",
"if",
"adjoint_b",
":",
"b",
"=",
"conj",
"(",
"b",
")",
"transpose_b",
"=",
"True",
"use_sparse_matmul",
"=",
"False",
"if",
"a_is_sparse",
"or",
"b_is_sparse",
":",
"sparse_matmul_types",
"=",
"[",
"dtypes",
".",
"bfloat16",
",",
"dtypes",
".",
"float32",
"]",
"use_sparse_matmul",
"=",
"(",
"a",
".",
"dtype",
"in",
"sparse_matmul_types",
"and",
"b",
".",
"dtype",
"in",
"sparse_matmul_types",
")",
"if",
"(",
"(",
"(",
"a",
".",
"dtype",
"==",
"dtypes",
".",
"bfloat16",
"and",
"b",
".",
"dtype",
"not",
"in",
"(",
"dtypes",
".",
"int8",
",",
"dtypes",
".",
"uint8",
")",
")",
"or",
"(",
"b",
".",
"dtype",
"==",
"dtypes",
".",
"bfloat16",
"and",
"a",
".",
"dtype",
"not",
"in",
"(",
"dtypes",
".",
"int8",
",",
"dtypes",
".",
"uint8",
")",
")",
")",
"and",
"a",
".",
"dtype",
"!=",
"b",
".",
"dtype",
")",
":",
"# matmul currently doesn't handle mixed-precision inputs other than",
"# fp16 * int8 which is supported in BatchMatMulV3.",
"use_sparse_matmul",
"=",
"True",
"if",
"use_sparse_matmul",
":",
"ret",
"=",
"sparse_matmul",
"(",
"a",
",",
"b",
",",
"transpose_a",
"=",
"transpose_a",
",",
"transpose_b",
"=",
"transpose_b",
",",
"a_is_sparse",
"=",
"a_is_sparse",
",",
"b_is_sparse",
"=",
"b_is_sparse",
",",
"name",
"=",
"name",
")",
"# sparse_matmul always returns float32, even with",
"# bfloat16 inputs. This prevents us from configuring bfloat16 training.",
"# casting to bfloat16 also matches non-sparse matmul behavior better.",
"if",
"a",
".",
"dtype",
"==",
"dtypes",
".",
"bfloat16",
"and",
"b",
".",
"dtype",
"==",
"dtypes",
".",
"bfloat16",
":",
"ret",
"=",
"cast",
"(",
"ret",
",",
"dtypes",
".",
"bfloat16",
")",
"return",
"ret",
"else",
":",
"if",
"use_batch_matmul_v3",
":",
"adjoint_a",
"=",
"adjoint_a",
"or",
"transpose_a",
"adjoint_b",
"=",
"adjoint_b",
"or",
"transpose_b",
"return",
"gen_math_ops",
".",
"batch_mat_mul_v3",
"(",
"a",
",",
"b",
",",
"adj_x",
"=",
"adjoint_a",
",",
"adj_y",
"=",
"adjoint_b",
",",
"Tout",
"=",
"output_type",
",",
"name",
"=",
"name",
")",
"else",
":",
"return",
"gen_math_ops",
".",
"mat_mul",
"(",
"a",
",",
"b",
",",
"transpose_a",
"=",
"transpose_a",
",",
"transpose_b",
"=",
"transpose_b",
",",
"name",
"=",
"name",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/math_ops.py#L3490-L3714 |
||
microsoft/onnxruntime | f92e47e95b13a240e37caf7b36577983544f98fc | onnxruntime/python/onnxruntime_inference_collection.py | python | OrtValue.as_sparse_tensor | (self) | return SparseTensor(self._ortvalue.as_sparse_tensor()) | The function will return SparseTensor contained in this OrtValue | The function will return SparseTensor contained in this OrtValue | [
"The",
"function",
"will",
"return",
"SparseTensor",
"contained",
"in",
"this",
"OrtValue"
] | def as_sparse_tensor(self):
'''
The function will return SparseTensor contained in this OrtValue
'''
return SparseTensor(self._ortvalue.as_sparse_tensor()) | [
"def",
"as_sparse_tensor",
"(",
"self",
")",
":",
"return",
"SparseTensor",
"(",
"self",
".",
"_ortvalue",
".",
"as_sparse_tensor",
"(",
")",
")"
] | https://github.com/microsoft/onnxruntime/blob/f92e47e95b13a240e37caf7b36577983544f98fc/onnxruntime/python/onnxruntime_inference_collection.py#L577-L581 |
|
KhronosGroup/glslang | 90d4bd05cd77ef5782a6779a0fe3d084440dc80d | build_info.py | python | describe | (directory) | Returns a string describing the current Git HEAD version as descriptively
as possible.
Runs 'git describe', or alternately 'git rev-parse HEAD', in directory. If
successful, returns the output; otherwise returns 'unknown hash, <date>'. | Returns a string describing the current Git HEAD version as descriptively
as possible. | [
"Returns",
"a",
"string",
"describing",
"the",
"current",
"Git",
"HEAD",
"version",
"as",
"descriptively",
"as",
"possible",
"."
] | def describe(directory):
"""Returns a string describing the current Git HEAD version as descriptively
as possible.
Runs 'git describe', or alternately 'git rev-parse HEAD', in directory. If
successful, returns the output; otherwise returns 'unknown hash, <date>'."""
try:
# decode() is needed here for Python3 compatibility. In Python2,
# str and bytes are the same type, but not in Python3.
# Popen.communicate() returns a bytes instance, which needs to be
# decoded into text data first in Python3. And this decode() won't
# hurt Python2.
return command_output(['git', 'describe'], directory).rstrip().decode()
except:
try:
return command_output(
['git', 'rev-parse', 'HEAD'], directory).rstrip().decode()
except:
# This is the fallback case where git gives us no information,
# e.g. because the source tree might not be in a git tree.
# In this case, usually use a timestamp. However, to ensure
# reproducible builds, allow the builder to override the wall
# clock time with environment variable SOURCE_DATE_EPOCH
# containing a (presumably) fixed timestamp.
timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))
formatted = datetime.datetime.utcfromtimestamp(timestamp).isoformat()
return 'unknown hash, {}'.format(formatted) | [
"def",
"describe",
"(",
"directory",
")",
":",
"try",
":",
"# decode() is needed here for Python3 compatibility. In Python2,",
"# str and bytes are the same type, but not in Python3.",
"# Popen.communicate() returns a bytes instance, which needs to be",
"# decoded into text data first in Python3. And this decode() won't",
"# hurt Python2.",
"return",
"command_output",
"(",
"[",
"'git'",
",",
"'describe'",
"]",
",",
"directory",
")",
".",
"rstrip",
"(",
")",
".",
"decode",
"(",
")",
"except",
":",
"try",
":",
"return",
"command_output",
"(",
"[",
"'git'",
",",
"'rev-parse'",
",",
"'HEAD'",
"]",
",",
"directory",
")",
".",
"rstrip",
"(",
")",
".",
"decode",
"(",
")",
"except",
":",
"# This is the fallback case where git gives us no information,",
"# e.g. because the source tree might not be in a git tree.",
"# In this case, usually use a timestamp. However, to ensure",
"# reproducible builds, allow the builder to override the wall",
"# clock time with environment variable SOURCE_DATE_EPOCH",
"# containing a (presumably) fixed timestamp.",
"timestamp",
"=",
"int",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'SOURCE_DATE_EPOCH'",
",",
"time",
".",
"time",
"(",
")",
")",
")",
"formatted",
"=",
"datetime",
".",
"datetime",
".",
"utcfromtimestamp",
"(",
"timestamp",
")",
".",
"isoformat",
"(",
")",
"return",
"'unknown hash, {}'",
".",
"format",
"(",
"formatted",
")"
] | https://github.com/KhronosGroup/glslang/blob/90d4bd05cd77ef5782a6779a0fe3d084440dc80d/build_info.py#L117-L143 |
||
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Idf/Idf.py | python | Process_comp_outline | (doc,comp_outline,comp_height) | return out_shape | Process_comp_outline(doc,comp_outline,comp_height)->part shape
Create solid component shape base on its outline | Process_comp_outline(doc,comp_outline,comp_height)->part shape
Create solid component shape base on its outline | [
"Process_comp_outline",
"(",
"doc",
"comp_outline",
"comp_height",
")",
"-",
">",
"part",
"shape",
"Create",
"solid",
"component",
"shape",
"base",
"on",
"its",
"outline"
] | def Process_comp_outline(doc,comp_outline,comp_height):
"""Process_comp_outline(doc,comp_outline,comp_height)->part shape
Create solid component shape base on its outline"""
vertex_index=-1; #presume no vertex
out_shape=[]
if comp_outline==[]: #force 0.2mm circle shape for components without place outline definition
comp_outline.append([0.0,0.0,0.0])
comp_outline.append([0.1,0.0,360.0])
for point in comp_outline:
vertex=Base.Vector(point[0],point[1],0)
vertex_index+=1
if vertex_index>0:
if point[2]!=0 and point[2]!=360:
out_shape.append(Part.Arc(prev_vertex,mid_point(prev_vertex,vertex,point[2]),vertex))
FreeCAD.Console.PrintMessage("mid point "+str(mid_point)+"\n")
elif point[2]==360:
per_point=Per_point(prev_vertex,vertex)
out_shape.append(Part.Arc(per_point,mid_point(per_point,vertex,point[2]/2),vertex))
out_shape.append(Part.Arc(per_point,mid_point(per_point,vertex,-point[2]/2),vertex))
else:
out_shape.append(Part.LineSegment(prev_vertex,vertex))
prev_vertex=vertex
out_shape=Part.Shape(out_shape)
out_shape=Part.Wire(out_shape.Edges)
out_shape=Part.Face(out_shape)
out_shape=out_shape.extrude(Base.Vector(0,0,comp_height))
#Part.show(out_shape)
return out_shape | [
"def",
"Process_comp_outline",
"(",
"doc",
",",
"comp_outline",
",",
"comp_height",
")",
":",
"vertex_index",
"=",
"-",
"1",
"#presume no vertex",
"out_shape",
"=",
"[",
"]",
"if",
"comp_outline",
"==",
"[",
"]",
":",
"#force 0.2mm circle shape for components without place outline definition",
"comp_outline",
".",
"append",
"(",
"[",
"0.0",
",",
"0.0",
",",
"0.0",
"]",
")",
"comp_outline",
".",
"append",
"(",
"[",
"0.1",
",",
"0.0",
",",
"360.0",
"]",
")",
"for",
"point",
"in",
"comp_outline",
":",
"vertex",
"=",
"Base",
".",
"Vector",
"(",
"point",
"[",
"0",
"]",
",",
"point",
"[",
"1",
"]",
",",
"0",
")",
"vertex_index",
"+=",
"1",
"if",
"vertex_index",
">",
"0",
":",
"if",
"point",
"[",
"2",
"]",
"!=",
"0",
"and",
"point",
"[",
"2",
"]",
"!=",
"360",
":",
"out_shape",
".",
"append",
"(",
"Part",
".",
"Arc",
"(",
"prev_vertex",
",",
"mid_point",
"(",
"prev_vertex",
",",
"vertex",
",",
"point",
"[",
"2",
"]",
")",
",",
"vertex",
")",
")",
"FreeCAD",
".",
"Console",
".",
"PrintMessage",
"(",
"\"mid point \"",
"+",
"str",
"(",
"mid_point",
")",
"+",
"\"\\n\"",
")",
"elif",
"point",
"[",
"2",
"]",
"==",
"360",
":",
"per_point",
"=",
"Per_point",
"(",
"prev_vertex",
",",
"vertex",
")",
"out_shape",
".",
"append",
"(",
"Part",
".",
"Arc",
"(",
"per_point",
",",
"mid_point",
"(",
"per_point",
",",
"vertex",
",",
"point",
"[",
"2",
"]",
"/",
"2",
")",
",",
"vertex",
")",
")",
"out_shape",
".",
"append",
"(",
"Part",
".",
"Arc",
"(",
"per_point",
",",
"mid_point",
"(",
"per_point",
",",
"vertex",
",",
"-",
"point",
"[",
"2",
"]",
"/",
"2",
")",
",",
"vertex",
")",
")",
"else",
":",
"out_shape",
".",
"append",
"(",
"Part",
".",
"LineSegment",
"(",
"prev_vertex",
",",
"vertex",
")",
")",
"prev_vertex",
"=",
"vertex",
"out_shape",
"=",
"Part",
".",
"Shape",
"(",
"out_shape",
")",
"out_shape",
"=",
"Part",
".",
"Wire",
"(",
"out_shape",
".",
"Edges",
")",
"out_shape",
"=",
"Part",
".",
"Face",
"(",
"out_shape",
")",
"out_shape",
"=",
"out_shape",
".",
"extrude",
"(",
"Base",
".",
"Vector",
"(",
"0",
",",
"0",
",",
"comp_height",
")",
")",
"#Part.show(out_shape)",
"return",
"out_shape"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Idf/Idf.py#L301-L328 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/tools/Editra/src/extern/aui/auibar.py | python | AuiToolBar.EnableTool | (self, tool_id, state) | Enables or disables the tool.
:param integer `tool_id`: identifier for the tool to enable or disable.
:param bool `state`: if ``True``, enables the tool, otherwise disables it. | Enables or disables the tool. | [
"Enables",
"or",
"disables",
"the",
"tool",
"."
] | def EnableTool(self, tool_id, state):
"""
Enables or disables the tool.
:param integer `tool_id`: identifier for the tool to enable or disable.
:param bool `state`: if ``True``, enables the tool, otherwise disables it.
"""
tool = self.FindTool(tool_id)
if tool:
if state:
tool.state &= ~AUI_BUTTON_STATE_DISABLED
else:
tool.state |= AUI_BUTTON_STATE_DISABLED | [
"def",
"EnableTool",
"(",
"self",
",",
"tool_id",
",",
"state",
")",
":",
"tool",
"=",
"self",
".",
"FindTool",
"(",
"tool_id",
")",
"if",
"tool",
":",
"if",
"state",
":",
"tool",
".",
"state",
"&=",
"~",
"AUI_BUTTON_STATE_DISABLED",
"else",
":",
"tool",
".",
"state",
"|=",
"AUI_BUTTON_STATE_DISABLED"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/aui/auibar.py#L2680-L2695 |
||
quantOS-org/DataCore | e2ef9bd2c22ee9e2845675b6435a14fa607f3551 | mdlink/deps/windows/protobuf-2.5.0/python/google/protobuf/message_factory.py | python | GetMessages | (file_protos) | return result | Builds a dictionary of all the messages available in a set of files.
Args:
file_protos: A sequence of file protos to build messages out of.
Returns:
A dictionary containing all the message types in the files mapping the
fully qualified name to a Message subclass for the descriptor. | Builds a dictionary of all the messages available in a set of files. | [
"Builds",
"a",
"dictionary",
"of",
"all",
"the",
"messages",
"available",
"in",
"a",
"set",
"of",
"files",
"."
] | def GetMessages(file_protos):
"""Builds a dictionary of all the messages available in a set of files.
Args:
file_protos: A sequence of file protos to build messages out of.
Returns:
A dictionary containing all the message types in the files mapping the
fully qualified name to a Message subclass for the descriptor.
"""
result = {}
for file_proto in file_protos:
_DB.Add(file_proto)
for file_proto in file_protos:
for desc in _GetAllDescriptors(file_proto.message_type, file_proto.package):
result[desc.full_name] = _FACTORY.GetPrototype(desc)
return result | [
"def",
"GetMessages",
"(",
"file_protos",
")",
":",
"result",
"=",
"{",
"}",
"for",
"file_proto",
"in",
"file_protos",
":",
"_DB",
".",
"Add",
"(",
"file_proto",
")",
"for",
"file_proto",
"in",
"file_protos",
":",
"for",
"desc",
"in",
"_GetAllDescriptors",
"(",
"file_proto",
".",
"message_type",
",",
"file_proto",
".",
"package",
")",
":",
"result",
"[",
"desc",
".",
"full_name",
"]",
"=",
"_FACTORY",
".",
"GetPrototype",
"(",
"desc",
")",
"return",
"result"
] | https://github.com/quantOS-org/DataCore/blob/e2ef9bd2c22ee9e2845675b6435a14fa607f3551/mdlink/deps/windows/protobuf-2.5.0/python/google/protobuf/message_factory.py#L78-L95 |
|
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/python2_version/klampt/plan/cspace.py | python | CSpace.setBounds | (self,bound) | Convenience function: sets the sampling bound and the
space properties in one line. | Convenience function: sets the sampling bound and the
space properties in one line. | [
"Convenience",
"function",
":",
"sets",
"the",
"sampling",
"bound",
"and",
"the",
"space",
"properties",
"in",
"one",
"line",
"."
] | def setBounds(self,bound):
"""Convenience function: sets the sampling bound and the
space properties in one line."""
self.bound = bound
self.properties["minimum"] = [b[0] for b in bound]
self.properties["maximum"] = [b[1] for b in bound]
volume = 1
for b in self.bound:
if b[0] != b[1]: volume *= b[1]-b[0]
self.properties['volume'] = volume | [
"def",
"setBounds",
"(",
"self",
",",
"bound",
")",
":",
"self",
".",
"bound",
"=",
"bound",
"self",
".",
"properties",
"[",
"\"minimum\"",
"]",
"=",
"[",
"b",
"[",
"0",
"]",
"for",
"b",
"in",
"bound",
"]",
"self",
".",
"properties",
"[",
"\"maximum\"",
"]",
"=",
"[",
"b",
"[",
"1",
"]",
"for",
"b",
"in",
"bound",
"]",
"volume",
"=",
"1",
"for",
"b",
"in",
"self",
".",
"bound",
":",
"if",
"b",
"[",
"0",
"]",
"!=",
"b",
"[",
"1",
"]",
":",
"volume",
"*=",
"b",
"[",
"1",
"]",
"-",
"b",
"[",
"0",
"]",
"self",
".",
"properties",
"[",
"'volume'",
"]",
"=",
"volume"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/plan/cspace.py#L80-L89 |
||
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | chrome/common/extensions/docs/examples/apps/hello-python/oauth2/__init__.py | python | Server._get_version | (self, request) | return version | Verify the correct version request for this server. | Verify the correct version request for this server. | [
"Verify",
"the",
"correct",
"version",
"request",
"for",
"this",
"server",
"."
] | def _get_version(self, request):
"""Verify the correct version request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
return version | [
"def",
"_get_version",
"(",
"self",
",",
"request",
")",
":",
"try",
":",
"version",
"=",
"request",
".",
"get_parameter",
"(",
"'oauth_version'",
")",
"except",
":",
"version",
"=",
"VERSION",
"if",
"version",
"and",
"version",
"!=",
"self",
".",
"version",
":",
"raise",
"Error",
"(",
"'OAuth version %s not supported.'",
"%",
"str",
"(",
"version",
")",
")",
"return",
"version"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/chrome/common/extensions/docs/examples/apps/hello-python/oauth2/__init__.py#L610-L620 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/sysconfig.py | python | parse_config_h | (fp, vars=None) | return vars | Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary. | Parse a config.h-style file. | [
"Parse",
"a",
"config",
".",
"h",
"-",
"style",
"file",
"."
] | def parse_config_h(fp, vars=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
if vars is None:
vars = {}
import re
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
while True:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try:
v = int(v)
except ValueError:
pass
vars[n] = v
else:
m = undef_rx.match(line)
if m:
vars[m.group(1)] = 0
return vars | [
"def",
"parse_config_h",
"(",
"fp",
",",
"vars",
"=",
"None",
")",
":",
"if",
"vars",
"is",
"None",
":",
"vars",
"=",
"{",
"}",
"import",
"re",
"define_rx",
"=",
"re",
".",
"compile",
"(",
"\"#define ([A-Z][A-Za-z0-9_]+) (.*)\\n\"",
")",
"undef_rx",
"=",
"re",
".",
"compile",
"(",
"\"/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\\n\"",
")",
"while",
"True",
":",
"line",
"=",
"fp",
".",
"readline",
"(",
")",
"if",
"not",
"line",
":",
"break",
"m",
"=",
"define_rx",
".",
"match",
"(",
"line",
")",
"if",
"m",
":",
"n",
",",
"v",
"=",
"m",
".",
"group",
"(",
"1",
",",
"2",
")",
"try",
":",
"v",
"=",
"int",
"(",
"v",
")",
"except",
"ValueError",
":",
"pass",
"vars",
"[",
"n",
"]",
"=",
"v",
"else",
":",
"m",
"=",
"undef_rx",
".",
"match",
"(",
"line",
")",
"if",
"m",
":",
"vars",
"[",
"m",
".",
"group",
"(",
"1",
")",
"]",
"=",
"0",
"return",
"vars"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/sysconfig.py#L442-L471 |
|
hughperkins/tf-coriander | 970d3df6c11400ad68405f22b0c42a52374e94ca | tensorflow/contrib/distributions/python/ops/shape.py | python | _DistributionShape._assert_non_negative_int32_scalar | (self, x) | return x | Helper which ensures that input is a non-negative, int32, scalar. | Helper which ensures that input is a non-negative, int32, scalar. | [
"Helper",
"which",
"ensures",
"that",
"input",
"is",
"a",
"non",
"-",
"negative",
"int32",
"scalar",
"."
] | def _assert_non_negative_int32_scalar(self, x):
"""Helper which ensures that input is a non-negative, int32, scalar."""
x = ops.convert_to_tensor(x, name="x")
if x.dtype.base_dtype != dtypes.int32.base_dtype:
raise TypeError("%s.dtype=%s is not %s" % (x.name, x.dtype, dtypes.int32))
x_value_static = tensor_util.constant_value(x)
if x.get_shape().ndims is not None and x_value_static is not None:
if x.get_shape().ndims != 0:
raise ValueError("%s.ndims=%d is not 0 (scalar)" %
(x.name, x.get_shape().ndims))
if x_value_static < 0:
raise ValueError("%s.value=%d cannot be negative" %
(x.name, x_value_static))
return x
if self.validate_args:
x = control_flow_ops.with_dependencies([
check_ops.assert_rank(x, 0),
check_ops.assert_non_negative(x)], x)
return x | [
"def",
"_assert_non_negative_int32_scalar",
"(",
"self",
",",
"x",
")",
":",
"x",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"x",
",",
"name",
"=",
"\"x\"",
")",
"if",
"x",
".",
"dtype",
".",
"base_dtype",
"!=",
"dtypes",
".",
"int32",
".",
"base_dtype",
":",
"raise",
"TypeError",
"(",
"\"%s.dtype=%s is not %s\"",
"%",
"(",
"x",
".",
"name",
",",
"x",
".",
"dtype",
",",
"dtypes",
".",
"int32",
")",
")",
"x_value_static",
"=",
"tensor_util",
".",
"constant_value",
"(",
"x",
")",
"if",
"x",
".",
"get_shape",
"(",
")",
".",
"ndims",
"is",
"not",
"None",
"and",
"x_value_static",
"is",
"not",
"None",
":",
"if",
"x",
".",
"get_shape",
"(",
")",
".",
"ndims",
"!=",
"0",
":",
"raise",
"ValueError",
"(",
"\"%s.ndims=%d is not 0 (scalar)\"",
"%",
"(",
"x",
".",
"name",
",",
"x",
".",
"get_shape",
"(",
")",
".",
"ndims",
")",
")",
"if",
"x_value_static",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"%s.value=%d cannot be negative\"",
"%",
"(",
"x",
".",
"name",
",",
"x_value_static",
")",
")",
"return",
"x",
"if",
"self",
".",
"validate_args",
":",
"x",
"=",
"control_flow_ops",
".",
"with_dependencies",
"(",
"[",
"check_ops",
".",
"assert_rank",
"(",
"x",
",",
"0",
")",
",",
"check_ops",
".",
"assert_non_negative",
"(",
"x",
")",
"]",
",",
"x",
")",
"return",
"x"
] | https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/distributions/python/ops/shape.py#L444-L462 |
|
tensorflow/deepmath | b5b721f54de1d5d6a02d78f5da5995237f9995f9 | deepmath/deephol/predictions.py | python | Predictions._batch_goal_embedding | (self, goals: List[Text]) | Computes embeddings from a list of goals. | Computes embeddings from a list of goals. | [
"Computes",
"embeddings",
"from",
"a",
"list",
"of",
"goals",
"."
] | def _batch_goal_embedding(self, goals: List[Text]) -> BATCH_GOAL_EMB_TYPE:
"""Computes embeddings from a list of goals."""
pass | [
"def",
"_batch_goal_embedding",
"(",
"self",
",",
"goals",
":",
"List",
"[",
"Text",
"]",
")",
"->",
"BATCH_GOAL_EMB_TYPE",
":",
"pass"
] | https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/deephol/predictions.py#L156-L158 |
||
syoyo/tinygltf | e7f1ff5c59d3ca2489923beb239bdf93d863498f | deps/cpplint.py | python | FileInfo.BaseName | (self) | return self.Split()[1] | File base name - text after the final slash, before the final period. | File base name - text after the final slash, before the final period. | [
"File",
"base",
"name",
"-",
"text",
"after",
"the",
"final",
"slash",
"before",
"the",
"final",
"period",
"."
] | def BaseName(self):
"""File base name - text after the final slash, before the final period."""
return self.Split()[1] | [
"def",
"BaseName",
"(",
"self",
")",
":",
"return",
"self",
".",
"Split",
"(",
")",
"[",
"1",
"]"
] | https://github.com/syoyo/tinygltf/blob/e7f1ff5c59d3ca2489923beb239bdf93d863498f/deps/cpplint.py#L1047-L1049 |
|
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/pystache/pystache/parser.py | python | _Parser._make_interpolation_node | (self, tag_type, tag_key, leading_whitespace) | Create and return a non-section node for the parse tree. | Create and return a non-section node for the parse tree. | [
"Create",
"and",
"return",
"a",
"non",
"-",
"section",
"node",
"for",
"the",
"parse",
"tree",
"."
] | def _make_interpolation_node(self, tag_type, tag_key, leading_whitespace):
"""
Create and return a non-section node for the parse tree.
"""
# TODO: switch to using a dictionary instead of a bunch of ifs and elifs.
if tag_type == '!':
return _CommentNode()
if tag_type == '=':
delimiters = tag_key.split()
self._change_delimiters(delimiters)
return _ChangeNode(delimiters)
if tag_type == '':
return _EscapeNode(tag_key)
if tag_type == '&':
return _LiteralNode(tag_key)
if tag_type == '>':
return _PartialNode(tag_key, leading_whitespace)
raise Exception("Invalid symbol for interpolation tag: %s" % repr(tag_type)) | [
"def",
"_make_interpolation_node",
"(",
"self",
",",
"tag_type",
",",
"tag_key",
",",
"leading_whitespace",
")",
":",
"# TODO: switch to using a dictionary instead of a bunch of ifs and elifs.",
"if",
"tag_type",
"==",
"'!'",
":",
"return",
"_CommentNode",
"(",
")",
"if",
"tag_type",
"==",
"'='",
":",
"delimiters",
"=",
"tag_key",
".",
"split",
"(",
")",
"self",
".",
"_change_delimiters",
"(",
"delimiters",
")",
"return",
"_ChangeNode",
"(",
"delimiters",
")",
"if",
"tag_type",
"==",
"''",
":",
"return",
"_EscapeNode",
"(",
"tag_key",
")",
"if",
"tag_type",
"==",
"'&'",
":",
"return",
"_LiteralNode",
"(",
"tag_key",
")",
"if",
"tag_type",
"==",
"'>'",
":",
"return",
"_PartialNode",
"(",
"tag_key",
",",
"leading_whitespace",
")",
"raise",
"Exception",
"(",
"\"Invalid symbol for interpolation tag: %s\"",
"%",
"repr",
"(",
"tag_type",
")",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/pystache/pystache/parser.py#L340-L363 |
||
openvinotoolkit/openvino | dedcbeafa8b84cccdc55ca64b8da516682b381c7 | tools/mo/openvino/tools/mo/middle/ApplyPermutations.py | python | ApplyPermutation.shape_of_sub_graph_reinference | (graph: Graph) | After layout permutation (shape change in data nodes) shape sub-graphs contain values in the old layout
To change that we execute full partial inference on the shape-of sub-graphs | After layout permutation (shape change in data nodes) shape sub-graphs contain values in the old layout
To change that we execute full partial inference on the shape-of sub-graphs | [
"After",
"layout",
"permutation",
"(",
"shape",
"change",
"in",
"data",
"nodes",
")",
"shape",
"sub",
"-",
"graphs",
"contain",
"values",
"in",
"the",
"old",
"layout",
"To",
"change",
"that",
"we",
"execute",
"full",
"partial",
"inference",
"on",
"the",
"shape",
"-",
"of",
"sub",
"-",
"graphs"
] | def shape_of_sub_graph_reinference(graph: Graph):
"""
After layout permutation (shape change in data nodes) shape sub-graphs contain values in the old layout
To change that we execute full partial inference on the shape-of sub-graphs
"""
shape_ops = graph.get_op_nodes(op='ShapeOf')
for shape in shape_ops:
shape.infer(shape)
def reinfer_once(in_port: Port):
node = in_port.node
if not node.soft_get('reinferred', False):
node.infer(node)
node['reinferred'] = True
LayoutChangeForConstantShapePaths().find_shape_subgraph_endpoints(
out_ports=[shape.out_port(0) for shape in shape_ops], action=reinfer_once) | [
"def",
"shape_of_sub_graph_reinference",
"(",
"graph",
":",
"Graph",
")",
":",
"shape_ops",
"=",
"graph",
".",
"get_op_nodes",
"(",
"op",
"=",
"'ShapeOf'",
")",
"for",
"shape",
"in",
"shape_ops",
":",
"shape",
".",
"infer",
"(",
"shape",
")",
"def",
"reinfer_once",
"(",
"in_port",
":",
"Port",
")",
":",
"node",
"=",
"in_port",
".",
"node",
"if",
"not",
"node",
".",
"soft_get",
"(",
"'reinferred'",
",",
"False",
")",
":",
"node",
".",
"infer",
"(",
"node",
")",
"node",
"[",
"'reinferred'",
"]",
"=",
"True",
"LayoutChangeForConstantShapePaths",
"(",
")",
".",
"find_shape_subgraph_endpoints",
"(",
"out_ports",
"=",
"[",
"shape",
".",
"out_port",
"(",
"0",
")",
"for",
"shape",
"in",
"shape_ops",
"]",
",",
"action",
"=",
"reinfer_once",
")"
] | https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/middle/ApplyPermutations.py#L92-L108 |
||
ZintrulCre/LeetCode_Archiver | de23e16ead29336b5ee7aa1898a392a5d6463d27 | LeetCode/python/1073.py | python | Solution.addNegabinary | (self, arr1, arr2) | return arr1 | :type arr1: List[int]
:type arr2: List[int]
:rtype: List[int] | :type arr1: List[int]
:type arr2: List[int]
:rtype: List[int] | [
":",
"type",
"arr1",
":",
"List",
"[",
"int",
"]",
":",
"type",
"arr2",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"List",
"[",
"int",
"]"
] | def addNegabinary(self, arr1, arr2):
"""
:type arr1: List[int]
:type arr2: List[int]
:rtype: List[int]
"""
m, n = len(arr1) - 1, len(arr2) - 1
if m < n:
return self.addNegabinary(arr2, arr1)
temp, res = 0, 0
while m >= 0:
temp = arr1[m] + res
if n >= 0:
temp += arr2[n]
if temp == 2:
arr1[m] = 0
res = -1
else:
arr1[m] = temp
res = 0
if n >= 0:
n -= 1
m -= 1
if res == -1:
arr1 = [-1] + arr1
m = len(arr1) - 1
while m >= 0:
if arr1[m] == -1:
arr1[m] = 1
if m - 1 >= 0:
arr1[m - 1] += 1
else:
arr1 = [1] + arr1
elif arr1[m] == 2:
arr1[m] = 0
if m - 1 >= 0:
arr1[m - 1] -= 1
else:
arr1 = [1, 1] + arr1
m -= 1
while len(arr1) > 1 and arr1[0] == 0:
arr1 = arr1[1:]
return arr1 | [
"def",
"addNegabinary",
"(",
"self",
",",
"arr1",
",",
"arr2",
")",
":",
"m",
",",
"n",
"=",
"len",
"(",
"arr1",
")",
"-",
"1",
",",
"len",
"(",
"arr2",
")",
"-",
"1",
"if",
"m",
"<",
"n",
":",
"return",
"self",
".",
"addNegabinary",
"(",
"arr2",
",",
"arr1",
")",
"temp",
",",
"res",
"=",
"0",
",",
"0",
"while",
"m",
">=",
"0",
":",
"temp",
"=",
"arr1",
"[",
"m",
"]",
"+",
"res",
"if",
"n",
">=",
"0",
":",
"temp",
"+=",
"arr2",
"[",
"n",
"]",
"if",
"temp",
"==",
"2",
":",
"arr1",
"[",
"m",
"]",
"=",
"0",
"res",
"=",
"-",
"1",
"else",
":",
"arr1",
"[",
"m",
"]",
"=",
"temp",
"res",
"=",
"0",
"if",
"n",
">=",
"0",
":",
"n",
"-=",
"1",
"m",
"-=",
"1",
"if",
"res",
"==",
"-",
"1",
":",
"arr1",
"=",
"[",
"-",
"1",
"]",
"+",
"arr1",
"m",
"=",
"len",
"(",
"arr1",
")",
"-",
"1",
"while",
"m",
">=",
"0",
":",
"if",
"arr1",
"[",
"m",
"]",
"==",
"-",
"1",
":",
"arr1",
"[",
"m",
"]",
"=",
"1",
"if",
"m",
"-",
"1",
">=",
"0",
":",
"arr1",
"[",
"m",
"-",
"1",
"]",
"+=",
"1",
"else",
":",
"arr1",
"=",
"[",
"1",
"]",
"+",
"arr1",
"elif",
"arr1",
"[",
"m",
"]",
"==",
"2",
":",
"arr1",
"[",
"m",
"]",
"=",
"0",
"if",
"m",
"-",
"1",
">=",
"0",
":",
"arr1",
"[",
"m",
"-",
"1",
"]",
"-=",
"1",
"else",
":",
"arr1",
"=",
"[",
"1",
",",
"1",
"]",
"+",
"arr1",
"m",
"-=",
"1",
"while",
"len",
"(",
"arr1",
")",
">",
"1",
"and",
"arr1",
"[",
"0",
"]",
"==",
"0",
":",
"arr1",
"=",
"arr1",
"[",
"1",
":",
"]",
"return",
"arr1"
] | https://github.com/ZintrulCre/LeetCode_Archiver/blob/de23e16ead29336b5ee7aa1898a392a5d6463d27/LeetCode/python/1073.py#L2-L44 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/arrayobj.py | python | _array_copy | (context, builder, sig, args) | return impl_ret_new_ref(context, builder, sig.return_type, ret._getvalue()) | Array copy. | Array copy. | [
"Array",
"copy",
"."
] | def _array_copy(context, builder, sig, args):
"""
Array copy.
"""
arytype = sig.args[0]
ary = make_array(arytype)(context, builder, value=args[0])
shapes = cgutils.unpack_tuple(builder, ary.shape)
rettype = sig.return_type
ret = _empty_nd_impl(context, builder, rettype, shapes)
src_data = ary.data
dest_data = ret.data
assert rettype.layout in "CF"
if arytype.layout == rettype.layout:
# Fast path: memcpy
cgutils.raw_memcpy(builder, dest_data, src_data, ary.nitems,
ary.itemsize, align=1)
else:
src_strides = cgutils.unpack_tuple(builder, ary.strides)
dest_strides = cgutils.unpack_tuple(builder, ret.strides)
intp_t = context.get_value_type(types.intp)
with cgutils.loop_nest(builder, shapes, intp_t) as indices:
src_ptr = cgutils.get_item_pointer2(context, builder, src_data,
shapes, src_strides,
arytype.layout, indices)
dest_ptr = cgutils.get_item_pointer2(context, builder, dest_data,
shapes, dest_strides,
rettype.layout, indices)
builder.store(builder.load(src_ptr), dest_ptr)
return impl_ret_new_ref(context, builder, sig.return_type, ret._getvalue()) | [
"def",
"_array_copy",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
")",
":",
"arytype",
"=",
"sig",
".",
"args",
"[",
"0",
"]",
"ary",
"=",
"make_array",
"(",
"arytype",
")",
"(",
"context",
",",
"builder",
",",
"value",
"=",
"args",
"[",
"0",
"]",
")",
"shapes",
"=",
"cgutils",
".",
"unpack_tuple",
"(",
"builder",
",",
"ary",
".",
"shape",
")",
"rettype",
"=",
"sig",
".",
"return_type",
"ret",
"=",
"_empty_nd_impl",
"(",
"context",
",",
"builder",
",",
"rettype",
",",
"shapes",
")",
"src_data",
"=",
"ary",
".",
"data",
"dest_data",
"=",
"ret",
".",
"data",
"assert",
"rettype",
".",
"layout",
"in",
"\"CF\"",
"if",
"arytype",
".",
"layout",
"==",
"rettype",
".",
"layout",
":",
"# Fast path: memcpy",
"cgutils",
".",
"raw_memcpy",
"(",
"builder",
",",
"dest_data",
",",
"src_data",
",",
"ary",
".",
"nitems",
",",
"ary",
".",
"itemsize",
",",
"align",
"=",
"1",
")",
"else",
":",
"src_strides",
"=",
"cgutils",
".",
"unpack_tuple",
"(",
"builder",
",",
"ary",
".",
"strides",
")",
"dest_strides",
"=",
"cgutils",
".",
"unpack_tuple",
"(",
"builder",
",",
"ret",
".",
"strides",
")",
"intp_t",
"=",
"context",
".",
"get_value_type",
"(",
"types",
".",
"intp",
")",
"with",
"cgutils",
".",
"loop_nest",
"(",
"builder",
",",
"shapes",
",",
"intp_t",
")",
"as",
"indices",
":",
"src_ptr",
"=",
"cgutils",
".",
"get_item_pointer2",
"(",
"context",
",",
"builder",
",",
"src_data",
",",
"shapes",
",",
"src_strides",
",",
"arytype",
".",
"layout",
",",
"indices",
")",
"dest_ptr",
"=",
"cgutils",
".",
"get_item_pointer2",
"(",
"context",
",",
"builder",
",",
"dest_data",
",",
"shapes",
",",
"dest_strides",
",",
"rettype",
".",
"layout",
",",
"indices",
")",
"builder",
".",
"store",
"(",
"builder",
".",
"load",
"(",
"src_ptr",
")",
",",
"dest_ptr",
")",
"return",
"impl_ret_new_ref",
"(",
"context",
",",
"builder",
",",
"sig",
".",
"return_type",
",",
"ret",
".",
"_getvalue",
"(",
")",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/arrayobj.py#L3934-L3968 |
|
chatopera/clause | dee31153d5ffdef33deedb6bff03e7806c296968 | var/assets/clients/gen-py/clause/Serving.py | python | Iface.postUtter | (self, request) | Parameters:
- request | Parameters:
- request | [
"Parameters",
":",
"-",
"request"
] | def postUtter(self, request):
"""
Parameters:
- request
"""
pass | [
"def",
"postUtter",
"(",
"self",
",",
"request",
")",
":",
"pass"
] | https://github.com/chatopera/clause/blob/dee31153d5ffdef33deedb6bff03e7806c296968/var/assets/clients/gen-py/clause/Serving.py#L206-L212 |
||
calamares/calamares | 9f6f82405b3074af7c99dc26487d2e46e4ece3e5 | src/modules/unpackfs/main.py | python | get_supported_filesystems | () | return ["file"] + get_supported_filesystems_kernel() | Returns a list of all the supported filesystems
(valid values for the *sourcefs* key in an item. | Returns a list of all the supported filesystems
(valid values for the *sourcefs* key in an item. | [
"Returns",
"a",
"list",
"of",
"all",
"the",
"supported",
"filesystems",
"(",
"valid",
"values",
"for",
"the",
"*",
"sourcefs",
"*",
"key",
"in",
"an",
"item",
"."
] | def get_supported_filesystems():
"""
Returns a list of all the supported filesystems
(valid values for the *sourcefs* key in an item.
"""
return ["file"] + get_supported_filesystems_kernel() | [
"def",
"get_supported_filesystems",
"(",
")",
":",
"return",
"[",
"\"file\"",
"]",
"+",
"get_supported_filesystems_kernel",
"(",
")"
] | https://github.com/calamares/calamares/blob/9f6f82405b3074af7c99dc26487d2e46e4ece3e5/src/modules/unpackfs/main.py#L380-L385 |
|
adobe/chromium | cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7 | third_party/mesa/MesaLib/src/mapi/glapi/gen/gl_XML.py | python | gl_api.functionIterateByCategory | (self, cat = None) | return functions.__iter__() | Iterate over functions by category.
If cat is None, all known functions are iterated in category
order. See classify_category for details of the ordering.
Within a category, functions are sorted by name. If cat is
not None, then only functions in that category are iterated. | Iterate over functions by category.
If cat is None, all known functions are iterated in category
order. See classify_category for details of the ordering.
Within a category, functions are sorted by name. If cat is
not None, then only functions in that category are iterated. | [
"Iterate",
"over",
"functions",
"by",
"category",
".",
"If",
"cat",
"is",
"None",
"all",
"known",
"functions",
"are",
"iterated",
"in",
"category",
"order",
".",
"See",
"classify_category",
"for",
"details",
"of",
"the",
"ordering",
".",
"Within",
"a",
"category",
"functions",
"are",
"sorted",
"by",
"name",
".",
"If",
"cat",
"is",
"not",
"None",
"then",
"only",
"functions",
"in",
"that",
"category",
"are",
"iterated",
"."
] | def functionIterateByCategory(self, cat = None):
"""Iterate over functions by category.
If cat is None, all known functions are iterated in category
order. See classify_category for details of the ordering.
Within a category, functions are sorted by name. If cat is
not None, then only functions in that category are iterated.
"""
lists = [{}, {}, {}, {}]
for func in self.functionIterateAll():
[cat_name, cat_number] = self.category_dict[func.name]
if (cat == None) or (cat == cat_name):
[func_cat_type, key] = classify_category(cat_name, cat_number)
if not lists[func_cat_type].has_key(key):
lists[func_cat_type][key] = {}
lists[func_cat_type][key][func.name] = func
functions = []
for func_cat_type in range(0,4):
keys = lists[func_cat_type].keys()
keys.sort()
for key in keys:
names = lists[func_cat_type][key].keys()
names.sort()
for name in names:
functions.append(lists[func_cat_type][key][name])
return functions.__iter__() | [
"def",
"functionIterateByCategory",
"(",
"self",
",",
"cat",
"=",
"None",
")",
":",
"lists",
"=",
"[",
"{",
"}",
",",
"{",
"}",
",",
"{",
"}",
",",
"{",
"}",
"]",
"for",
"func",
"in",
"self",
".",
"functionIterateAll",
"(",
")",
":",
"[",
"cat_name",
",",
"cat_number",
"]",
"=",
"self",
".",
"category_dict",
"[",
"func",
".",
"name",
"]",
"if",
"(",
"cat",
"==",
"None",
")",
"or",
"(",
"cat",
"==",
"cat_name",
")",
":",
"[",
"func_cat_type",
",",
"key",
"]",
"=",
"classify_category",
"(",
"cat_name",
",",
"cat_number",
")",
"if",
"not",
"lists",
"[",
"func_cat_type",
"]",
".",
"has_key",
"(",
"key",
")",
":",
"lists",
"[",
"func_cat_type",
"]",
"[",
"key",
"]",
"=",
"{",
"}",
"lists",
"[",
"func_cat_type",
"]",
"[",
"key",
"]",
"[",
"func",
".",
"name",
"]",
"=",
"func",
"functions",
"=",
"[",
"]",
"for",
"func_cat_type",
"in",
"range",
"(",
"0",
",",
"4",
")",
":",
"keys",
"=",
"lists",
"[",
"func_cat_type",
"]",
".",
"keys",
"(",
")",
"keys",
".",
"sort",
"(",
")",
"for",
"key",
"in",
"keys",
":",
"names",
"=",
"lists",
"[",
"func_cat_type",
"]",
"[",
"key",
"]",
".",
"keys",
"(",
")",
"names",
".",
"sort",
"(",
")",
"for",
"name",
"in",
"names",
":",
"functions",
".",
"append",
"(",
"lists",
"[",
"func_cat_type",
"]",
"[",
"key",
"]",
"[",
"name",
"]",
")",
"return",
"functions",
".",
"__iter__",
"(",
")"
] | https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/third_party/mesa/MesaLib/src/mapi/glapi/gen/gl_XML.py#L859-L893 |
|
WenmuZhou/PSENet.pytorch | f760c2f4938726a2d00efaf5e5b28218323c44ca | models/loss.py | python | PSELoss.__init__ | (self, Lambda, ratio=3, reduction='mean') | Implement PSE Loss. | Implement PSE Loss. | [
"Implement",
"PSE",
"Loss",
"."
] | def __init__(self, Lambda, ratio=3, reduction='mean'):
"""Implement PSE Loss.
"""
super(PSELoss, self).__init__()
assert reduction in ['mean', 'sum'], " reduction must in ['mean','sum']"
self.Lambda = Lambda
self.ratio = ratio
self.reduction = reduction | [
"def",
"__init__",
"(",
"self",
",",
"Lambda",
",",
"ratio",
"=",
"3",
",",
"reduction",
"=",
"'mean'",
")",
":",
"super",
"(",
"PSELoss",
",",
"self",
")",
".",
"__init__",
"(",
")",
"assert",
"reduction",
"in",
"[",
"'mean'",
",",
"'sum'",
"]",
",",
"\" reduction must in ['mean','sum']\"",
"self",
".",
"Lambda",
"=",
"Lambda",
"self",
".",
"ratio",
"=",
"ratio",
"self",
".",
"reduction",
"=",
"reduction"
] | https://github.com/WenmuZhou/PSENet.pytorch/blob/f760c2f4938726a2d00efaf5e5b28218323c44ca/models/loss.py#L10-L17 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/protobuf/py2/google/protobuf/internal/encoder.py | python | MessageSetItemEncoder | (field_number) | return EncodeField | Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
} | Encoder for extensions of MessageSet. | [
"Encoder",
"for",
"extensions",
"of",
"MessageSet",
"."
] | def MessageSetItemEncoder(field_number):
"""Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
start_bytes = b"".join([
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
TagBytes(2, wire_format.WIRETYPE_VARINT),
_VarintBytes(field_number),
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_EncodeVarint = _EncodeVarint
def EncodeField(write, value, deterministic):
write(start_bytes)
local_EncodeVarint(write, value.ByteSize(), deterministic)
value._InternalSerialize(write, deterministic)
return write(end_bytes)
return EncodeField | [
"def",
"MessageSetItemEncoder",
"(",
"field_number",
")",
":",
"start_bytes",
"=",
"b\"\"",
".",
"join",
"(",
"[",
"TagBytes",
"(",
"1",
",",
"wire_format",
".",
"WIRETYPE_START_GROUP",
")",
",",
"TagBytes",
"(",
"2",
",",
"wire_format",
".",
"WIRETYPE_VARINT",
")",
",",
"_VarintBytes",
"(",
"field_number",
")",
",",
"TagBytes",
"(",
"3",
",",
"wire_format",
".",
"WIRETYPE_LENGTH_DELIMITED",
")",
"]",
")",
"end_bytes",
"=",
"TagBytes",
"(",
"1",
",",
"wire_format",
".",
"WIRETYPE_END_GROUP",
")",
"local_EncodeVarint",
"=",
"_EncodeVarint",
"def",
"EncodeField",
"(",
"write",
",",
"value",
",",
"deterministic",
")",
":",
"write",
"(",
"start_bytes",
")",
"local_EncodeVarint",
"(",
"write",
",",
"value",
".",
"ByteSize",
"(",
")",
",",
"deterministic",
")",
"value",
".",
"_InternalSerialize",
"(",
"write",
",",
"deterministic",
")",
"return",
"write",
"(",
"end_bytes",
")",
"return",
"EncodeField"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/protobuf/py2/google/protobuf/internal/encoder.py#L777-L802 |
|
krishauser/Klampt | 972cc83ea5befac3f653c1ba20f80155768ad519 | Python/klampt/src/robotsim.py | python | RobotModel.enableSelfCollision | (self, link1: "int", link2: "int", value: "bool") | return _robotsim.RobotModel_enableSelfCollision(self, link1, link2, value) | r"""
enableSelfCollision(RobotModel self, int link1, int link2, bool value)
Enables/disables self collisions between two links (depending on value) | r"""
enableSelfCollision(RobotModel self, int link1, int link2, bool value) | [
"r",
"enableSelfCollision",
"(",
"RobotModel",
"self",
"int",
"link1",
"int",
"link2",
"bool",
"value",
")"
] | def enableSelfCollision(self, link1: "int", link2: "int", value: "bool") -> "void":
r"""
enableSelfCollision(RobotModel self, int link1, int link2, bool value)
Enables/disables self collisions between two links (depending on value)
"""
return _robotsim.RobotModel_enableSelfCollision(self, link1, link2, value) | [
"def",
"enableSelfCollision",
"(",
"self",
",",
"link1",
":",
"\"int\"",
",",
"link2",
":",
"\"int\"",
",",
"value",
":",
"\"bool\"",
")",
"->",
"\"void\"",
":",
"return",
"_robotsim",
".",
"RobotModel_enableSelfCollision",
"(",
"self",
",",
"link1",
",",
"link2",
",",
"value",
")"
] | https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/src/robotsim.py#L5425-L5433 |
|
sigmaai/self-driving-golf-cart | 8d891600af3d851add27a10ae45cf3c2108bb87c | ros/src/ros_carla_bridge/carla_ros_bridge/src/carla_ros_bridge/camera.py | python | SemanticSegmentationCamera.get_carla_image_data_array | (self, carla_image) | return carla_image_data_array, 'bgra8' | Function (override) to convert the carla image to a numpy data array
as input for the cv_bridge.cv2_to_imgmsg() function
The segmentation camera raw image is converted to the city scapes palette image
having 4-channel uint8.
:param carla_image: carla image object
:type carla_image: carla.Image
:return tuple (numpy data array containing the image information, encoding)
:rtype tuple(numpy.ndarray, string) | Function (override) to convert the carla image to a numpy data array
as input for the cv_bridge.cv2_to_imgmsg() function | [
"Function",
"(",
"override",
")",
"to",
"convert",
"the",
"carla",
"image",
"to",
"a",
"numpy",
"data",
"array",
"as",
"input",
"for",
"the",
"cv_bridge",
".",
"cv2_to_imgmsg",
"()",
"function"
] | def get_carla_image_data_array(self, carla_image):
"""
Function (override) to convert the carla image to a numpy data array
as input for the cv_bridge.cv2_to_imgmsg() function
The segmentation camera raw image is converted to the city scapes palette image
having 4-channel uint8.
:param carla_image: carla image object
:type carla_image: carla.Image
:return tuple (numpy data array containing the image information, encoding)
:rtype tuple(numpy.ndarray, string)
"""
carla_image.convert(carla.ColorConverter.CityScapesPalette)
carla_image_data_array = numpy.ndarray(
shape=(carla_image.height, carla_image.width, 4),
dtype=numpy.uint8, buffer=carla_image.raw_data)
return carla_image_data_array, 'bgra8' | [
"def",
"get_carla_image_data_array",
"(",
"self",
",",
"carla_image",
")",
":",
"carla_image",
".",
"convert",
"(",
"carla",
".",
"ColorConverter",
".",
"CityScapesPalette",
")",
"carla_image_data_array",
"=",
"numpy",
".",
"ndarray",
"(",
"shape",
"=",
"(",
"carla_image",
".",
"height",
",",
"carla_image",
".",
"width",
",",
"4",
")",
",",
"dtype",
"=",
"numpy",
".",
"uint8",
",",
"buffer",
"=",
"carla_image",
".",
"raw_data",
")",
"return",
"carla_image_data_array",
",",
"'bgra8'"
] | https://github.com/sigmaai/self-driving-golf-cart/blob/8d891600af3d851add27a10ae45cf3c2108bb87c/ros/src/ros_carla_bridge/carla_ros_bridge/src/carla_ros_bridge/camera.py#L323-L341 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/util/tf_stack.py | python | StackTraceMapper.get_effective_source_map | (self) | Returns a map (filename, lineno) -> (filename, lineno, function_name). | Returns a map (filename, lineno) -> (filename, lineno, function_name). | [
"Returns",
"a",
"map",
"(",
"filename",
"lineno",
")",
"-",
">",
"(",
"filename",
"lineno",
"function_name",
")",
"."
] | def get_effective_source_map(self):
"""Returns a map (filename, lineno) -> (filename, lineno, function_name)."""
raise NotImplementedError('subclasses need to override this') | [
"def",
"get_effective_source_map",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses need to override this'",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/util/tf_stack.py#L77-L79 |
||
openvinotoolkit/openvino | dedcbeafa8b84cccdc55ca64b8da516682b381c7 | tools/mo/openvino/tools/mo/middle/MakeKaldiConstReshapable.py | python | create_const_with_batch_from_input | (producer_port: Port, second_dim, value=0, precision=np.float32) | return init_value_prev_lstm_output | Create const with batch taken from input_out_port and second dimension equals second_dim
:param producer_port: take batch from this port
:param second_dim: second dimension for created constant
:param value: value to initialize constant
:param precision: precision for constant
:return created constant node | Create const with batch taken from input_out_port and second dimension equals second_dim
:param producer_port: take batch from this port
:param second_dim: second dimension for created constant
:param value: value to initialize constant
:param precision: precision for constant
:return created constant node | [
"Create",
"const",
"with",
"batch",
"taken",
"from",
"input_out_port",
"and",
"second",
"dimension",
"equals",
"second_dim",
":",
"param",
"producer_port",
":",
"take",
"batch",
"from",
"this",
"port",
":",
"param",
"second_dim",
":",
"second",
"dimension",
"for",
"created",
"constant",
":",
"param",
"value",
":",
"value",
"to",
"initialize",
"constant",
":",
"param",
"precision",
":",
"precision",
"for",
"constant",
":",
"return",
"created",
"constant",
"node"
] | def create_const_with_batch_from_input(producer_port: Port, second_dim, value=0, precision=np.float32):
"""
Create const with batch taken from input_out_port and second dimension equals second_dim
:param producer_port: take batch from this port
:param second_dim: second dimension for created constant
:param value: value to initialize constant
:param precision: precision for constant
:return created constant node
"""
graph = producer_port.node.graph
input_name = producer_port.node.soft_get('name', producer_port.node.id)
shape_of_input = None
for dest in producer_port.get_destinations():
if dest.node.soft_get('op') == "ShapeOf":
shape_of_input = dest.node
break
if shape_of_input is None:
shape_of_input = Shape(graph, {'name': input_name + '/Shape'}).create_node()
shape_of_input.in_port(0).connect(producer_port)
get_batch = None
for dest in shape_of_input.out_port(0).get_destinations():
if dest.node.soft_get('op') == "Crop" and \
dest.node.in_port(1).get_source().node.soft_get('value', []) == int64_array([1]):
get_batch = dest.node
break
if get_batch is None:
get_batch = create_op_node_with_second_input(graph, Crop, int64_array([1]),
{'name': shape_of_input.name + '/Crop',
'axis': int64_array([0]), 'offset': int64_array([0])},
shape_of_input)
mem_shape = None
for dest in get_batch.out_port(0).get_destinations():
if dest.node.soft_get('op') == "Concat" and \
dest.node.in_port(1).get_source().node.soft_get('value', []) == int64_array([second_dim]):
mem_shape = dest.node
break
if mem_shape is None:
mem_shape = create_op_node_with_second_input(graph, Concat, int64_array([second_dim]),
{'name': get_batch.name + '/Concat', 'axis': 0,
'in_ports_count': 2}, get_batch)
init_value_prev_lstm_output = None
for dest in mem_shape.out_port(0).get_destinations():
if dest.node.soft_get('op') == "Broadcast" and \
dest.node.in_port(1).get_source().node.soft_get('value', []) == mo_array([value], dtype=precision):
init_value_prev_lstm_output = dest.node
break
if init_value_prev_lstm_output is None:
init_value_prev_lstm_output = create_op_with_const_inputs(graph, Broadcast,
{0: mo_array([value], dtype=precision)},
{'name': mem_shape.name + '/Broadcast'})
init_value_prev_lstm_output.in_port(1).connect(mem_shape.out_port(0))
return init_value_prev_lstm_output | [
"def",
"create_const_with_batch_from_input",
"(",
"producer_port",
":",
"Port",
",",
"second_dim",
",",
"value",
"=",
"0",
",",
"precision",
"=",
"np",
".",
"float32",
")",
":",
"graph",
"=",
"producer_port",
".",
"node",
".",
"graph",
"input_name",
"=",
"producer_port",
".",
"node",
".",
"soft_get",
"(",
"'name'",
",",
"producer_port",
".",
"node",
".",
"id",
")",
"shape_of_input",
"=",
"None",
"for",
"dest",
"in",
"producer_port",
".",
"get_destinations",
"(",
")",
":",
"if",
"dest",
".",
"node",
".",
"soft_get",
"(",
"'op'",
")",
"==",
"\"ShapeOf\"",
":",
"shape_of_input",
"=",
"dest",
".",
"node",
"break",
"if",
"shape_of_input",
"is",
"None",
":",
"shape_of_input",
"=",
"Shape",
"(",
"graph",
",",
"{",
"'name'",
":",
"input_name",
"+",
"'/Shape'",
"}",
")",
".",
"create_node",
"(",
")",
"shape_of_input",
".",
"in_port",
"(",
"0",
")",
".",
"connect",
"(",
"producer_port",
")",
"get_batch",
"=",
"None",
"for",
"dest",
"in",
"shape_of_input",
".",
"out_port",
"(",
"0",
")",
".",
"get_destinations",
"(",
")",
":",
"if",
"dest",
".",
"node",
".",
"soft_get",
"(",
"'op'",
")",
"==",
"\"Crop\"",
"and",
"dest",
".",
"node",
".",
"in_port",
"(",
"1",
")",
".",
"get_source",
"(",
")",
".",
"node",
".",
"soft_get",
"(",
"'value'",
",",
"[",
"]",
")",
"==",
"int64_array",
"(",
"[",
"1",
"]",
")",
":",
"get_batch",
"=",
"dest",
".",
"node",
"break",
"if",
"get_batch",
"is",
"None",
":",
"get_batch",
"=",
"create_op_node_with_second_input",
"(",
"graph",
",",
"Crop",
",",
"int64_array",
"(",
"[",
"1",
"]",
")",
",",
"{",
"'name'",
":",
"shape_of_input",
".",
"name",
"+",
"'/Crop'",
",",
"'axis'",
":",
"int64_array",
"(",
"[",
"0",
"]",
")",
",",
"'offset'",
":",
"int64_array",
"(",
"[",
"0",
"]",
")",
"}",
",",
"shape_of_input",
")",
"mem_shape",
"=",
"None",
"for",
"dest",
"in",
"get_batch",
".",
"out_port",
"(",
"0",
")",
".",
"get_destinations",
"(",
")",
":",
"if",
"dest",
".",
"node",
".",
"soft_get",
"(",
"'op'",
")",
"==",
"\"Concat\"",
"and",
"dest",
".",
"node",
".",
"in_port",
"(",
"1",
")",
".",
"get_source",
"(",
")",
".",
"node",
".",
"soft_get",
"(",
"'value'",
",",
"[",
"]",
")",
"==",
"int64_array",
"(",
"[",
"second_dim",
"]",
")",
":",
"mem_shape",
"=",
"dest",
".",
"node",
"break",
"if",
"mem_shape",
"is",
"None",
":",
"mem_shape",
"=",
"create_op_node_with_second_input",
"(",
"graph",
",",
"Concat",
",",
"int64_array",
"(",
"[",
"second_dim",
"]",
")",
",",
"{",
"'name'",
":",
"get_batch",
".",
"name",
"+",
"'/Concat'",
",",
"'axis'",
":",
"0",
",",
"'in_ports_count'",
":",
"2",
"}",
",",
"get_batch",
")",
"init_value_prev_lstm_output",
"=",
"None",
"for",
"dest",
"in",
"mem_shape",
".",
"out_port",
"(",
"0",
")",
".",
"get_destinations",
"(",
")",
":",
"if",
"dest",
".",
"node",
".",
"soft_get",
"(",
"'op'",
")",
"==",
"\"Broadcast\"",
"and",
"dest",
".",
"node",
".",
"in_port",
"(",
"1",
")",
".",
"get_source",
"(",
")",
".",
"node",
".",
"soft_get",
"(",
"'value'",
",",
"[",
"]",
")",
"==",
"mo_array",
"(",
"[",
"value",
"]",
",",
"dtype",
"=",
"precision",
")",
":",
"init_value_prev_lstm_output",
"=",
"dest",
".",
"node",
"break",
"if",
"init_value_prev_lstm_output",
"is",
"None",
":",
"init_value_prev_lstm_output",
"=",
"create_op_with_const_inputs",
"(",
"graph",
",",
"Broadcast",
",",
"{",
"0",
":",
"mo_array",
"(",
"[",
"value",
"]",
",",
"dtype",
"=",
"precision",
")",
"}",
",",
"{",
"'name'",
":",
"mem_shape",
".",
"name",
"+",
"'/Broadcast'",
"}",
")",
"init_value_prev_lstm_output",
".",
"in_port",
"(",
"1",
")",
".",
"connect",
"(",
"mem_shape",
".",
"out_port",
"(",
"0",
")",
")",
"return",
"init_value_prev_lstm_output"
] | https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/middle/MakeKaldiConstReshapable.py#L17-L77 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/protobuf/python/google/protobuf/descriptor_database.py | python | DescriptorDatabase.FindFileByName | (self, name) | return self._file_desc_protos_by_file[name] | Finds the file descriptor proto by file name.
Typically the file name is a relative path ending to a .proto file. The
proto with the given name will have to have been added to this database
using the Add method or else an error will be raised.
Args:
name: The file name to find.
Returns:
The file descriptor proto matching the name.
Raises:
KeyError if no file by the given name was added. | Finds the file descriptor proto by file name. | [
"Finds",
"the",
"file",
"descriptor",
"proto",
"by",
"file",
"name",
"."
] | def FindFileByName(self, name):
"""Finds the file descriptor proto by file name.
Typically the file name is a relative path ending to a .proto file. The
proto with the given name will have to have been added to this database
using the Add method or else an error will be raised.
Args:
name: The file name to find.
Returns:
The file descriptor proto matching the name.
Raises:
KeyError if no file by the given name was added.
"""
return self._file_desc_protos_by_file[name] | [
"def",
"FindFileByName",
"(",
"self",
",",
"name",
")",
":",
"return",
"self",
".",
"_file_desc_protos_by_file",
"[",
"name",
"]"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/protobuf/python/google/protobuf/descriptor_database.py#L80-L97 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/bz2.py | python | BZ2File.closed | (self) | return self._mode == _MODE_CLOSED | True if this file is closed. | True if this file is closed. | [
"True",
"if",
"this",
"file",
"is",
"closed",
"."
] | def closed(self):
"""True if this file is closed."""
return self._mode == _MODE_CLOSED | [
"def",
"closed",
"(",
"self",
")",
":",
"return",
"self",
".",
"_mode",
"==",
"_MODE_CLOSED"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/bz2.py#L134-L136 |
|
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/gradients_util.py | python | _maybe_colocate_with | (op, gradient_uid, colocate_gradients_with_ops) | Context to colocate with `op` if `colocate_gradients_with_ops`. | Context to colocate with `op` if `colocate_gradients_with_ops`. | [
"Context",
"to",
"colocate",
"with",
"op",
"if",
"colocate_gradients_with_ops",
"."
] | def _maybe_colocate_with(op, gradient_uid, colocate_gradients_with_ops): # pylint: disable=invalid-name
"""Context to colocate with `op` if `colocate_gradients_with_ops`."""
if colocate_gradients_with_ops:
with ops._colocate_with_for_gradient(op, gradient_uid): # pylint: disable=protected-access
yield
else:
yield | [
"def",
"_maybe_colocate_with",
"(",
"op",
",",
"gradient_uid",
",",
"colocate_gradients_with_ops",
")",
":",
"# pylint: disable=invalid-name",
"if",
"colocate_gradients_with_ops",
":",
"with",
"ops",
".",
"_colocate_with_for_gradient",
"(",
"op",
",",
"gradient_uid",
")",
":",
"# pylint: disable=protected-access",
"yield",
"else",
":",
"yield"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/gradients_util.py#L303-L309 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/keras/saving/saved_model/utils.py | python | maybe_add_training_arg | (
original_call, wrapped_call, expects_training_arg, default_training_value) | return wrap_with_training_arg, decorator_argspec | Decorate call and optionally adds training argument.
If a layer expects a training argument, this function ensures that 'training'
is present in the layer args or kwonly args, with the default training value.
Args:
original_call: Original call function.
wrapped_call: Wrapped call function.
expects_training_arg: Whether to include 'training' argument.
default_training_value: Default value of the training kwarg to include in
the arg spec. If `None`, the default is `K.learning_phase()`.
Returns:
Tuple of (
function that calls `wrapped_call` and sets the training arg,
Argspec of returned function or `None` if the argspec is unchanged) | Decorate call and optionally adds training argument. | [
"Decorate",
"call",
"and",
"optionally",
"adds",
"training",
"argument",
"."
] | def maybe_add_training_arg(
original_call, wrapped_call, expects_training_arg, default_training_value):
"""Decorate call and optionally adds training argument.
If a layer expects a training argument, this function ensures that 'training'
is present in the layer args or kwonly args, with the default training value.
Args:
original_call: Original call function.
wrapped_call: Wrapped call function.
expects_training_arg: Whether to include 'training' argument.
default_training_value: Default value of the training kwarg to include in
the arg spec. If `None`, the default is `K.learning_phase()`.
Returns:
Tuple of (
function that calls `wrapped_call` and sets the training arg,
Argspec of returned function or `None` if the argspec is unchanged)
"""
if not expects_training_arg:
return wrapped_call, None
def wrap_with_training_arg(*args, **kwargs):
"""Wrap the `wrapped_call` function, and set training argument."""
training_arg_index = get_training_arg_index(original_call)
training = get_training_arg(training_arg_index, args, kwargs)
if training is None:
training = default_training_value or K.learning_phase()
args = list(args)
kwargs = kwargs.copy()
def replace_training_and_call(training):
set_training_arg(training, training_arg_index, args, kwargs)
return wrapped_call(*args, **kwargs)
return control_flow_util.smart_cond(
training, lambda: replace_training_and_call(True),
lambda: replace_training_and_call(False))
# Create arg spec for decorated function. If 'training' is not defined in the
# args of the original arg spec, then add it to kwonlyargs.
arg_spec = tf_inspect.getfullargspec(original_call)
defaults = list(arg_spec.defaults) if arg_spec.defaults is not None else []
kwonlyargs = arg_spec.kwonlyargs
kwonlydefaults = arg_spec.kwonlydefaults or {}
# Add training arg if it does not exist, or set the default training value.
if 'training' not in arg_spec.args:
kwonlyargs.append('training')
kwonlydefaults['training'] = default_training_value
else:
index = arg_spec.args.index('training')
training_default_index = len(arg_spec.args) - index
if (arg_spec.defaults and
len(arg_spec.defaults) >= training_default_index and
defaults[-training_default_index] is None):
defaults[-training_default_index] = default_training_value
decorator_argspec = tf_inspect.FullArgSpec(
args=arg_spec.args,
varargs=arg_spec.varargs,
varkw=arg_spec.varkw,
defaults=defaults,
kwonlyargs=kwonlyargs,
kwonlydefaults=kwonlydefaults,
annotations=arg_spec.annotations)
return wrap_with_training_arg, decorator_argspec | [
"def",
"maybe_add_training_arg",
"(",
"original_call",
",",
"wrapped_call",
",",
"expects_training_arg",
",",
"default_training_value",
")",
":",
"if",
"not",
"expects_training_arg",
":",
"return",
"wrapped_call",
",",
"None",
"def",
"wrap_with_training_arg",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\"Wrap the `wrapped_call` function, and set training argument.\"\"\"",
"training_arg_index",
"=",
"get_training_arg_index",
"(",
"original_call",
")",
"training",
"=",
"get_training_arg",
"(",
"training_arg_index",
",",
"args",
",",
"kwargs",
")",
"if",
"training",
"is",
"None",
":",
"training",
"=",
"default_training_value",
"or",
"K",
".",
"learning_phase",
"(",
")",
"args",
"=",
"list",
"(",
"args",
")",
"kwargs",
"=",
"kwargs",
".",
"copy",
"(",
")",
"def",
"replace_training_and_call",
"(",
"training",
")",
":",
"set_training_arg",
"(",
"training",
",",
"training_arg_index",
",",
"args",
",",
"kwargs",
")",
"return",
"wrapped_call",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"control_flow_util",
".",
"smart_cond",
"(",
"training",
",",
"lambda",
":",
"replace_training_and_call",
"(",
"True",
")",
",",
"lambda",
":",
"replace_training_and_call",
"(",
"False",
")",
")",
"# Create arg spec for decorated function. If 'training' is not defined in the",
"# args of the original arg spec, then add it to kwonlyargs.",
"arg_spec",
"=",
"tf_inspect",
".",
"getfullargspec",
"(",
"original_call",
")",
"defaults",
"=",
"list",
"(",
"arg_spec",
".",
"defaults",
")",
"if",
"arg_spec",
".",
"defaults",
"is",
"not",
"None",
"else",
"[",
"]",
"kwonlyargs",
"=",
"arg_spec",
".",
"kwonlyargs",
"kwonlydefaults",
"=",
"arg_spec",
".",
"kwonlydefaults",
"or",
"{",
"}",
"# Add training arg if it does not exist, or set the default training value.",
"if",
"'training'",
"not",
"in",
"arg_spec",
".",
"args",
":",
"kwonlyargs",
".",
"append",
"(",
"'training'",
")",
"kwonlydefaults",
"[",
"'training'",
"]",
"=",
"default_training_value",
"else",
":",
"index",
"=",
"arg_spec",
".",
"args",
".",
"index",
"(",
"'training'",
")",
"training_default_index",
"=",
"len",
"(",
"arg_spec",
".",
"args",
")",
"-",
"index",
"if",
"(",
"arg_spec",
".",
"defaults",
"and",
"len",
"(",
"arg_spec",
".",
"defaults",
")",
">=",
"training_default_index",
"and",
"defaults",
"[",
"-",
"training_default_index",
"]",
"is",
"None",
")",
":",
"defaults",
"[",
"-",
"training_default_index",
"]",
"=",
"default_training_value",
"decorator_argspec",
"=",
"tf_inspect",
".",
"FullArgSpec",
"(",
"args",
"=",
"arg_spec",
".",
"args",
",",
"varargs",
"=",
"arg_spec",
".",
"varargs",
",",
"varkw",
"=",
"arg_spec",
".",
"varkw",
",",
"defaults",
"=",
"defaults",
",",
"kwonlyargs",
"=",
"kwonlyargs",
",",
"kwonlydefaults",
"=",
"kwonlydefaults",
",",
"annotations",
"=",
"arg_spec",
".",
"annotations",
")",
"return",
"wrap_with_training_arg",
",",
"decorator_argspec"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/saving/saved_model/utils.py#L130-L196 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/math_grad.py | python | _ComplexGrad | (op, grad) | return (array_ops.reshape(math_ops.reduce_sum(math_ops.real(grad), rx), sx),
array_ops.reshape(math_ops.reduce_sum(math_ops.imag(grad), ry), sy)) | Returns the real and imaginary components of 'grad', respectively. | Returns the real and imaginary components of 'grad', respectively. | [
"Returns",
"the",
"real",
"and",
"imaginary",
"components",
"of",
"grad",
"respectively",
"."
] | def _ComplexGrad(op, grad):
"""Returns the real and imaginary components of 'grad', respectively."""
x = op.inputs[0]
y = op.inputs[1]
sx = array_ops.shape(x)
sy = array_ops.shape(y)
rx, ry = gen_array_ops._broadcast_gradient_args(sx, sy)
return (array_ops.reshape(math_ops.reduce_sum(math_ops.real(grad), rx), sx),
array_ops.reshape(math_ops.reduce_sum(math_ops.imag(grad), ry), sy)) | [
"def",
"_ComplexGrad",
"(",
"op",
",",
"grad",
")",
":",
"x",
"=",
"op",
".",
"inputs",
"[",
"0",
"]",
"y",
"=",
"op",
".",
"inputs",
"[",
"1",
"]",
"sx",
"=",
"array_ops",
".",
"shape",
"(",
"x",
")",
"sy",
"=",
"array_ops",
".",
"shape",
"(",
"y",
")",
"rx",
",",
"ry",
"=",
"gen_array_ops",
".",
"_broadcast_gradient_args",
"(",
"sx",
",",
"sy",
")",
"return",
"(",
"array_ops",
".",
"reshape",
"(",
"math_ops",
".",
"reduce_sum",
"(",
"math_ops",
".",
"real",
"(",
"grad",
")",
",",
"rx",
")",
",",
"sx",
")",
",",
"array_ops",
".",
"reshape",
"(",
"math_ops",
".",
"reduce_sum",
"(",
"math_ops",
".",
"imag",
"(",
"grad",
")",
",",
"ry",
")",
",",
"sy",
")",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/math_grad.py#L993-L1001 |
|
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | example/multivariate_time_series/src/metrics.py | python | rse | (label, pred) | return numerator / denominator | computes the root relative squared error (condensed using standard deviation formula) | computes the root relative squared error (condensed using standard deviation formula) | [
"computes",
"the",
"root",
"relative",
"squared",
"error",
"(",
"condensed",
"using",
"standard",
"deviation",
"formula",
")"
] | def rse(label, pred):
"""computes the root relative squared error (condensed using standard deviation formula)"""
numerator = np.sqrt(np.mean(np.square(label - pred), axis = None))
denominator = np.std(label, axis = None)
return numerator / denominator | [
"def",
"rse",
"(",
"label",
",",
"pred",
")",
":",
"numerator",
"=",
"np",
".",
"sqrt",
"(",
"np",
".",
"mean",
"(",
"np",
".",
"square",
"(",
"label",
"-",
"pred",
")",
",",
"axis",
"=",
"None",
")",
")",
"denominator",
"=",
"np",
".",
"std",
"(",
"label",
",",
"axis",
"=",
"None",
")",
"return",
"numerator",
"/",
"denominator"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/example/multivariate_time_series/src/metrics.py#L25-L29 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/dist.py | python | check_extras | (dist, attr, value) | Verify that extras_require mapping is valid | Verify that extras_require mapping is valid | [
"Verify",
"that",
"extras_require",
"mapping",
"is",
"valid"
] | def check_extras(dist, attr, value):
"""Verify that extras_require mapping is valid"""
try:
list(itertools.starmap(_check_extra, value.items()))
except (TypeError, ValueError, AttributeError):
raise DistutilsSetupError(
"'extras_require' must be a dictionary whose values are "
"strings or lists of strings containing valid project/version "
"requirement specifiers."
) | [
"def",
"check_extras",
"(",
"dist",
",",
"attr",
",",
"value",
")",
":",
"try",
":",
"list",
"(",
"itertools",
".",
"starmap",
"(",
"_check_extra",
",",
"value",
".",
"items",
"(",
")",
")",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
",",
"AttributeError",
")",
":",
"raise",
"DistutilsSetupError",
"(",
"\"'extras_require' must be a dictionary whose values are \"",
"\"strings or lists of strings containing valid project/version \"",
"\"requirement specifiers.\"",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/dist.py#L246-L255 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/numpy/py2/numpy/ma/core.py | python | MaskedArray.__rpow__ | (self, other) | return power(other, self) | Raise other to the power self, masking the potential NaNs/Infs | Raise other to the power self, masking the potential NaNs/Infs | [
"Raise",
"other",
"to",
"the",
"power",
"self",
"masking",
"the",
"potential",
"NaNs",
"/",
"Infs"
] | def __rpow__(self, other):
"""
Raise other to the power self, masking the potential NaNs/Infs
"""
return power(other, self) | [
"def",
"__rpow__",
"(",
"self",
",",
"other",
")",
":",
"return",
"power",
"(",
"other",
",",
"self",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/ma/core.py#L4159-L4164 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tix.py | python | TixWidget._subwidget_name | (self,name) | Get a subwidget name (returns a String, not a Widget !) | Get a subwidget name (returns a String, not a Widget !) | [
"Get",
"a",
"subwidget",
"name",
"(",
"returns",
"a",
"String",
"not",
"a",
"Widget",
"!",
")"
] | def _subwidget_name(self,name):
"""Get a subwidget name (returns a String, not a Widget !)"""
try:
return self.tk.call(self._w, 'subwidget', name)
except TclError:
return None | [
"def",
"_subwidget_name",
"(",
"self",
",",
"name",
")",
":",
"try",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'subwidget'",
",",
"name",
")",
"except",
"TclError",
":",
"return",
"None"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/Tix.py#L372-L377 |
||
ycm-core/ycmd | fc0fb7e5e15176cc5a2a30c80956335988c6b59a | ycmd/utils.py | python | ByteOffsetToCodepointOffset | ( line_value, byte_offset ) | return len( ToUnicode( byte_line_value[ : byte_offset - 1 ] ) ) + 1 | The API calls for byte offsets into the UTF-8 encoded version of the
buffer. However, ycmd internally uses unicode strings. This means that
when we need to walk 'characters' within the buffer, such as when checking
for semantic triggers and similar, we must use codepoint offsets, rather than
byte offsets.
This method converts the |byte_offset|, which is a 1-based utf-8 byte offset,
into a 1-based codepoint offset in the unicode string |line_value|. | The API calls for byte offsets into the UTF-8 encoded version of the
buffer. However, ycmd internally uses unicode strings. This means that
when we need to walk 'characters' within the buffer, such as when checking
for semantic triggers and similar, we must use codepoint offsets, rather than
byte offsets. | [
"The",
"API",
"calls",
"for",
"byte",
"offsets",
"into",
"the",
"UTF",
"-",
"8",
"encoded",
"version",
"of",
"the",
"buffer",
".",
"However",
"ycmd",
"internally",
"uses",
"unicode",
"strings",
".",
"This",
"means",
"that",
"when",
"we",
"need",
"to",
"walk",
"characters",
"within",
"the",
"buffer",
"such",
"as",
"when",
"checking",
"for",
"semantic",
"triggers",
"and",
"similar",
"we",
"must",
"use",
"codepoint",
"offsets",
"rather",
"than",
"byte",
"offsets",
"."
] | def ByteOffsetToCodepointOffset( line_value, byte_offset ):
"""The API calls for byte offsets into the UTF-8 encoded version of the
buffer. However, ycmd internally uses unicode strings. This means that
when we need to walk 'characters' within the buffer, such as when checking
for semantic triggers and similar, we must use codepoint offsets, rather than
byte offsets.
This method converts the |byte_offset|, which is a 1-based utf-8 byte offset,
into a 1-based codepoint offset in the unicode string |line_value|."""
byte_line_value = ToBytes( line_value )
return len( ToUnicode( byte_line_value[ : byte_offset - 1 ] ) ) + 1 | [
"def",
"ByteOffsetToCodepointOffset",
"(",
"line_value",
",",
"byte_offset",
")",
":",
"byte_line_value",
"=",
"ToBytes",
"(",
"line_value",
")",
"return",
"len",
"(",
"ToUnicode",
"(",
"byte_line_value",
"[",
":",
"byte_offset",
"-",
"1",
"]",
")",
")",
"+",
"1"
] | https://github.com/ycm-core/ycmd/blob/fc0fb7e5e15176cc5a2a30c80956335988c6b59a/ycmd/utils.py#L156-L167 |
|
yun-liu/RCF | 91bfb054ad04187dbbe21e539e165ad9bd3ff00b | scripts/cpp_lint.py | python | CloseExpression | (clean_lines, linenum, pos) | return (line, clean_lines.NumLines(), -1) | If input points to ( or { or [ or <, finds the position that closes it.
If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
linenum/pos that correspond to the closing of the expression.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
pos: A position on the line.
Returns:
A tuple (line, linenum, pos) pointer *past* the closing brace, or
(line, len(lines), -1) if we never find a close. Note we ignore
strings and comments when matching; and the line we return is the
'cleansed' line at linenum. | If input points to ( or { or [ or <, finds the position that closes it. | [
"If",
"input",
"points",
"to",
"(",
"or",
"{",
"or",
"[",
"or",
"<",
"finds",
"the",
"position",
"that",
"closes",
"it",
"."
] | def CloseExpression(clean_lines, linenum, pos):
"""If input points to ( or { or [ or <, finds the position that closes it.
If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the
linenum/pos that correspond to the closing of the expression.
Args:
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
pos: A position on the line.
Returns:
A tuple (line, linenum, pos) pointer *past* the closing brace, or
(line, len(lines), -1) if we never find a close. Note we ignore
strings and comments when matching; and the line we return is the
'cleansed' line at linenum.
"""
line = clean_lines.elided[linenum]
startchar = line[pos]
if startchar not in '({[<':
return (line, clean_lines.NumLines(), -1)
if startchar == '(': endchar = ')'
if startchar == '[': endchar = ']'
if startchar == '{': endchar = '}'
if startchar == '<': endchar = '>'
# Check first line
(end_pos, num_open) = FindEndOfExpressionInLine(
line, pos, 0, startchar, endchar)
if end_pos > -1:
return (line, linenum, end_pos)
# Continue scanning forward
while linenum < clean_lines.NumLines() - 1:
linenum += 1
line = clean_lines.elided[linenum]
(end_pos, num_open) = FindEndOfExpressionInLine(
line, 0, num_open, startchar, endchar)
if end_pos > -1:
return (line, linenum, end_pos)
# Did not find endchar before end of file, give up
return (line, clean_lines.NumLines(), -1) | [
"def",
"CloseExpression",
"(",
"clean_lines",
",",
"linenum",
",",
"pos",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"startchar",
"=",
"line",
"[",
"pos",
"]",
"if",
"startchar",
"not",
"in",
"'({[<'",
":",
"return",
"(",
"line",
",",
"clean_lines",
".",
"NumLines",
"(",
")",
",",
"-",
"1",
")",
"if",
"startchar",
"==",
"'('",
":",
"endchar",
"=",
"')'",
"if",
"startchar",
"==",
"'['",
":",
"endchar",
"=",
"']'",
"if",
"startchar",
"==",
"'{'",
":",
"endchar",
"=",
"'}'",
"if",
"startchar",
"==",
"'<'",
":",
"endchar",
"=",
"'>'",
"# Check first line",
"(",
"end_pos",
",",
"num_open",
")",
"=",
"FindEndOfExpressionInLine",
"(",
"line",
",",
"pos",
",",
"0",
",",
"startchar",
",",
"endchar",
")",
"if",
"end_pos",
">",
"-",
"1",
":",
"return",
"(",
"line",
",",
"linenum",
",",
"end_pos",
")",
"# Continue scanning forward",
"while",
"linenum",
"<",
"clean_lines",
".",
"NumLines",
"(",
")",
"-",
"1",
":",
"linenum",
"+=",
"1",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"(",
"end_pos",
",",
"num_open",
")",
"=",
"FindEndOfExpressionInLine",
"(",
"line",
",",
"0",
",",
"num_open",
",",
"startchar",
",",
"endchar",
")",
"if",
"end_pos",
">",
"-",
"1",
":",
"return",
"(",
"line",
",",
"linenum",
",",
"end_pos",
")",
"# Did not find endchar before end of file, give up",
"return",
"(",
"line",
",",
"clean_lines",
".",
"NumLines",
"(",
")",
",",
"-",
"1",
")"
] | https://github.com/yun-liu/RCF/blob/91bfb054ad04187dbbe21e539e165ad9bd3ff00b/scripts/cpp_lint.py#L1254-L1297 |
|
adobe/chromium | cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7 | native_client_sdk/src/build_tools/apply_patch.py | python | _Range.Parse | (self, diff_lines) | Parse a range diff line.
Parse the line at index 0 in diff_lines as a range-info line and set the
range info accordingly. Raise an Error exception if the line is not a valid
range line. If successful, the line at index 0 is removed from diff_lines.
Arguments:
diff_lines: a list of diff lines read from the patch file. | Parse a range diff line. | [
"Parse",
"a",
"range",
"diff",
"line",
"."
] | def Parse(self, diff_lines):
''' Parse a range diff line.
Parse the line at index 0 in diff_lines as a range-info line and set the
range info accordingly. Raise an Error exception if the line is not a valid
range line. If successful, the line at index 0 is removed from diff_lines.
Arguments:
diff_lines: a list of diff lines read from the patch file.
'''
match = re.match(RE_RANGE, diff_lines[0])
if not match:
raise Error(context=diff_lines[0], error='Bad range info')
# Range in source file. The line count is optional and defaults to 1.
self.src_start_line = int(match.group(1))
self.src_line_count = 1
if (match.group(2)):
self.src_line_count = int(match.group(2))
# Range in destination file. The line count is optional and defaults to 1.
self.dest_start_line = int(match.group(3))
self.dest_line_count = 1
if (match.group(4)):
self.dest_line_count = int(match.group(4))
diff_lines.pop(0) | [
"def",
"Parse",
"(",
"self",
",",
"diff_lines",
")",
":",
"match",
"=",
"re",
".",
"match",
"(",
"RE_RANGE",
",",
"diff_lines",
"[",
"0",
"]",
")",
"if",
"not",
"match",
":",
"raise",
"Error",
"(",
"context",
"=",
"diff_lines",
"[",
"0",
"]",
",",
"error",
"=",
"'Bad range info'",
")",
"# Range in source file. The line count is optional and defaults to 1.",
"self",
".",
"src_start_line",
"=",
"int",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
"self",
".",
"src_line_count",
"=",
"1",
"if",
"(",
"match",
".",
"group",
"(",
"2",
")",
")",
":",
"self",
".",
"src_line_count",
"=",
"int",
"(",
"match",
".",
"group",
"(",
"2",
")",
")",
"# Range in destination file. The line count is optional and defaults to 1.",
"self",
".",
"dest_start_line",
"=",
"int",
"(",
"match",
".",
"group",
"(",
"3",
")",
")",
"self",
".",
"dest_line_count",
"=",
"1",
"if",
"(",
"match",
".",
"group",
"(",
"4",
")",
")",
":",
"self",
".",
"dest_line_count",
"=",
"int",
"(",
"match",
".",
"group",
"(",
"4",
")",
")",
"diff_lines",
".",
"pop",
"(",
"0",
")"
] | https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/native_client_sdk/src/build_tools/apply_patch.py#L76-L101 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/contrib/graph_editor/subgraph.py | python | make_view_from_scope | (scope, graph) | return SubGraphView(ops) | Make a subgraph from a name scope.
Args:
scope: the name of the scope.
graph: the `tf.Graph`.
Returns:
A subgraph view representing the given scope. | Make a subgraph from a name scope. | [
"Make",
"a",
"subgraph",
"from",
"a",
"name",
"scope",
"."
] | def make_view_from_scope(scope, graph):
"""Make a subgraph from a name scope.
Args:
scope: the name of the scope.
graph: the `tf.Graph`.
Returns:
A subgraph view representing the given scope.
"""
ops = select.get_name_scope_ops(graph, scope)
return SubGraphView(ops) | [
"def",
"make_view_from_scope",
"(",
"scope",
",",
"graph",
")",
":",
"ops",
"=",
"select",
".",
"get_name_scope_ops",
"(",
"graph",
",",
"scope",
")",
"return",
"SubGraphView",
"(",
"ops",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/graph_editor/subgraph.py#L658-L668 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/keras/utils/generic_utils.py | python | validate_config | (config) | return isinstance(config, dict) and _LAYER_UNDEFINED_CONFIG_KEY not in config | Determines whether config appears to be a valid layer config. | Determines whether config appears to be a valid layer config. | [
"Determines",
"whether",
"config",
"appears",
"to",
"be",
"a",
"valid",
"layer",
"config",
"."
] | def validate_config(config):
"""Determines whether config appears to be a valid layer config."""
return isinstance(config, dict) and _LAYER_UNDEFINED_CONFIG_KEY not in config | [
"def",
"validate_config",
"(",
"config",
")",
":",
"return",
"isinstance",
"(",
"config",
",",
"dict",
")",
"and",
"_LAYER_UNDEFINED_CONFIG_KEY",
"not",
"in",
"config"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/utils/generic_utils.py#L1146-L1148 |
|
makefile/frcnn | 8d9b9ebf8be8315ba2f374d460121b0adf1df29c | scripts/cpp_lint.py | python | _DropCommonSuffixes | (filename) | return os.path.splitext(filename)[0] | Drops common suffixes like _test.cc or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cc')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
'foo/foo_unusualinternal'
Args:
filename: The input filename.
Returns:
The filename with the common suffix removed. | Drops common suffixes like _test.cc or -inl.h from filename. | [
"Drops",
"common",
"suffixes",
"like",
"_test",
".",
"cc",
"or",
"-",
"inl",
".",
"h",
"from",
"filename",
"."
] | def _DropCommonSuffixes(filename):
"""Drops common suffixes like _test.cc or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cc')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
'foo/foo_unusualinternal'
Args:
filename: The input filename.
Returns:
The filename with the common suffix removed.
"""
for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
'inl.h', 'impl.h', 'internal.h'):
if (filename.endswith(suffix) and len(filename) > len(suffix) and
filename[-len(suffix) - 1] in ('-', '_')):
return filename[:-len(suffix) - 1]
return os.path.splitext(filename)[0] | [
"def",
"_DropCommonSuffixes",
"(",
"filename",
")",
":",
"for",
"suffix",
"in",
"(",
"'test.cc'",
",",
"'regtest.cc'",
",",
"'unittest.cc'",
",",
"'inl.h'",
",",
"'impl.h'",
",",
"'internal.h'",
")",
":",
"if",
"(",
"filename",
".",
"endswith",
"(",
"suffix",
")",
"and",
"len",
"(",
"filename",
")",
">",
"len",
"(",
"suffix",
")",
"and",
"filename",
"[",
"-",
"len",
"(",
"suffix",
")",
"-",
"1",
"]",
"in",
"(",
"'-'",
",",
"'_'",
")",
")",
":",
"return",
"filename",
"[",
":",
"-",
"len",
"(",
"suffix",
")",
"-",
"1",
"]",
"return",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"[",
"0",
"]"
] | https://github.com/makefile/frcnn/blob/8d9b9ebf8be8315ba2f374d460121b0adf1df29c/scripts/cpp_lint.py#L3576-L3600 |
|
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/distributed/fleet/metrics/metric.py | python | max | (input, scope=None, util=None) | return output | distributed max in fleet
Args:
input(numpy.array|Variable|string): output of a layer
scope(Scope): specific scope
Returns:
global_metric(numpy.array): max array
Example:
.. code-block:: python
# in model.py
input = fluid.layers.cast(some_input, dtype='float32')
cnt = fluid.layers.reduce_sum(input)
global_cnt = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1], value=0)
tmp = fluid.layers.elementwise_max(cnt, global_cnt)
fluid.layers.assign(tmp, global_cnt)
# in train.py, after train or infer
res = np.array(scope.find_var(global_cnt.name).get_tensor())
print("max array: ", paddle.distributed.fleet.max(res)) | distributed max in fleet | [
"distributed",
"max",
"in",
"fleet"
] | def max(input, scope=None, util=None):
"""
distributed max in fleet
Args:
input(numpy.array|Variable|string): output of a layer
scope(Scope): specific scope
Returns:
global_metric(numpy.array): max array
Example:
.. code-block:: python
# in model.py
input = fluid.layers.cast(some_input, dtype='float32')
cnt = fluid.layers.reduce_sum(input)
global_cnt = fluid.layers.create_global_var(persistable=True, dtype='float32', shape=[1], value=0)
tmp = fluid.layers.elementwise_max(cnt, global_cnt)
fluid.layers.assign(tmp, global_cnt)
# in train.py, after train or infer
res = np.array(scope.find_var(global_cnt.name).get_tensor())
print("max array: ", paddle.distributed.fleet.max(res))
"""
if scope is None:
scope = paddle.static.global_scope()
if util is None:
util = paddle.distributed.fleet.util
if isinstance(input, Variable):
input = np.array(scope.find_var(input.name).get_tensor())
elif isinstance(input, str):
input = np.array(scope.find_var(input).get_tensor())
old_shape = np.array(input.shape)
output = np.copy(input) * 0
output = util.all_reduce(input, "max")
output = output.reshape(old_shape)
return output | [
"def",
"max",
"(",
"input",
",",
"scope",
"=",
"None",
",",
"util",
"=",
"None",
")",
":",
"if",
"scope",
"is",
"None",
":",
"scope",
"=",
"paddle",
".",
"static",
".",
"global_scope",
"(",
")",
"if",
"util",
"is",
"None",
":",
"util",
"=",
"paddle",
".",
"distributed",
".",
"fleet",
".",
"util",
"if",
"isinstance",
"(",
"input",
",",
"Variable",
")",
":",
"input",
"=",
"np",
".",
"array",
"(",
"scope",
".",
"find_var",
"(",
"input",
".",
"name",
")",
".",
"get_tensor",
"(",
")",
")",
"elif",
"isinstance",
"(",
"input",
",",
"str",
")",
":",
"input",
"=",
"np",
".",
"array",
"(",
"scope",
".",
"find_var",
"(",
"input",
")",
".",
"get_tensor",
"(",
")",
")",
"old_shape",
"=",
"np",
".",
"array",
"(",
"input",
".",
"shape",
")",
"output",
"=",
"np",
".",
"copy",
"(",
"input",
")",
"*",
"0",
"output",
"=",
"util",
".",
"all_reduce",
"(",
"input",
",",
"\"max\"",
")",
"output",
"=",
"output",
".",
"reshape",
"(",
"old_shape",
")",
"return",
"output"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/distributed/fleet/metrics/metric.py#L64-L101 |
|
trilinos/Trilinos | 6168be6dd51e35e1cd681e9c4b24433e709df140 | packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/PhactoriDriver.py | python | DuringRestartUseJsonToSetUp | (jsonIn, ioPipeAndViewsState) | used by process zero (broadcast send process) as well as other processes\n (broadcast recieve processes) to actually take the info in json format\n and set the system up for proper behavior after restart, particularly\n data ranges and plots over time | used by process zero (broadcast send process) as well as other processes\n (broadcast recieve processes) to actually take the info in json format\n and set the system up for proper behavior after restart, particularly\n data ranges and plots over time | [
"used",
"by",
"process",
"zero",
"(",
"broadcast",
"send",
"process",
")",
"as",
"well",
"as",
"other",
"processes",
"\\",
"n",
"(",
"broadcast",
"recieve",
"processes",
")",
"to",
"actually",
"take",
"the",
"info",
"in",
"json",
"format",
"\\",
"n",
"and",
"set",
"the",
"system",
"up",
"for",
"proper",
"behavior",
"after",
"restart",
"particularly",
"\\",
"n",
"data",
"ranges",
"and",
"plots",
"over",
"time"
] | def DuringRestartUseJsonToSetUp(jsonIn, ioPipeAndViewsState):
"used by process zero (broadcast send process) as well as other processes\n (broadcast recieve processes) to actually take the info in json format\n and set the system up for proper behavior after restart, particularly\n data ranges and plots over time"
#go through representations and have each add it's state info to jsonOut
if "RepresentationsRestartInfo" not in jsonIn:
if PhactoriDbg():
myDebugPrint3("StartOfVisualizationCallback returning, no rep info\n")
return
representationsJsonIn = jsonIn["RepresentationsRestartInfo"]
for oneRepName, oneRep in \
ioPipeAndViewsState.mRepresentationBlocks.items():
if oneRepName in representationsJsonIn:
if PhactoriDbg():
myDebugPrint3("restart setup callback json for rep: " + oneRepName + "\n")
oneRep.SetFromRestartInfo(representationsJsonIn[oneRepName])
else:
if PhactoriDbg():
myDebugPrint3("warning: Representation named " + oneRepName +
" not in restart info")
plotsOverTimeJsonIn = jsonIn["PlotsOverTimeRestartInfo"]
for onePlotOtName, onePlotOt in \
ioPipeAndViewsState.mPlotOverTimeBlocks.items():
if onePlotOtName in plotsOverTimeJsonIn:
if PhactoriDbg():
myDebugPrint3("plot over time setup callback json for rep: " + \
onePlotOtName + "\n")
onePlotOt.SetFromRestartInfo(plotsOverTimeJsonIn[onePlotOtName])
else:
if PhactoriDbg():
myDebugPrint3("warning: plot over time named " + onePlotOtName +
" not in restart info") | [
"def",
"DuringRestartUseJsonToSetUp",
"(",
"jsonIn",
",",
"ioPipeAndViewsState",
")",
":",
"#go through representations and have each add it's state info to jsonOut",
"if",
"\"RepresentationsRestartInfo\"",
"not",
"in",
"jsonIn",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"StartOfVisualizationCallback returning, no rep info\\n\"",
")",
"return",
"representationsJsonIn",
"=",
"jsonIn",
"[",
"\"RepresentationsRestartInfo\"",
"]",
"for",
"oneRepName",
",",
"oneRep",
"in",
"ioPipeAndViewsState",
".",
"mRepresentationBlocks",
".",
"items",
"(",
")",
":",
"if",
"oneRepName",
"in",
"representationsJsonIn",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"restart setup callback json for rep: \"",
"+",
"oneRepName",
"+",
"\"\\n\"",
")",
"oneRep",
".",
"SetFromRestartInfo",
"(",
"representationsJsonIn",
"[",
"oneRepName",
"]",
")",
"else",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"warning: Representation named \"",
"+",
"oneRepName",
"+",
"\" not in restart info\"",
")",
"plotsOverTimeJsonIn",
"=",
"jsonIn",
"[",
"\"PlotsOverTimeRestartInfo\"",
"]",
"for",
"onePlotOtName",
",",
"onePlotOt",
"in",
"ioPipeAndViewsState",
".",
"mPlotOverTimeBlocks",
".",
"items",
"(",
")",
":",
"if",
"onePlotOtName",
"in",
"plotsOverTimeJsonIn",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"plot over time setup callback json for rep: \"",
"+",
"onePlotOtName",
"+",
"\"\\n\"",
")",
"onePlotOt",
".",
"SetFromRestartInfo",
"(",
"plotsOverTimeJsonIn",
"[",
"onePlotOtName",
"]",
")",
"else",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"warning: plot over time named \"",
"+",
"onePlotOtName",
"+",
"\" not in restart info\"",
")"
] | https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/PhactoriDriver.py#L19833-L19864 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/_gdi.py | python | NamedColour | (*args, **kwargs) | return val | NamedColour(String colourName) -> Colour
Constructs a colour object using a colour name listed in
``wx.TheColourDatabase``, or any string format supported by the
wxColour typemaps. | NamedColour(String colourName) -> Colour | [
"NamedColour",
"(",
"String",
"colourName",
")",
"-",
">",
"Colour"
] | def NamedColour(*args, **kwargs):
"""
NamedColour(String colourName) -> Colour
Constructs a colour object using a colour name listed in
``wx.TheColourDatabase``, or any string format supported by the
wxColour typemaps.
"""
val = _gdi_.new_NamedColour(*args, **kwargs)
return val | [
"def",
"NamedColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"val",
"=",
"_gdi_",
".",
"new_NamedColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"val"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L302-L311 |
|
Cisco-Talos/moflow | ed71dfb0540d9e0d7a4c72f0881b58958d573728 | BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/google/protobuf/internal/wire_format.py | python | IsTypePackable | (field_type) | return field_type not in NON_PACKABLE_TYPES | Return true iff packable = true is valid for fields of this type.
Args:
field_type: a FieldDescriptor::Type value.
Returns:
True iff fields of this type are packable. | Return true iff packable = true is valid for fields of this type. | [
"Return",
"true",
"iff",
"packable",
"=",
"true",
"is",
"valid",
"for",
"fields",
"of",
"this",
"type",
"."
] | def IsTypePackable(field_type):
"""Return true iff packable = true is valid for fields of this type.
Args:
field_type: a FieldDescriptor::Type value.
Returns:
True iff fields of this type are packable.
"""
return field_type not in NON_PACKABLE_TYPES | [
"def",
"IsTypePackable",
"(",
"field_type",
")",
":",
"return",
"field_type",
"not",
"in",
"NON_PACKABLE_TYPES"
] | https://github.com/Cisco-Talos/moflow/blob/ed71dfb0540d9e0d7a4c72f0881b58958d573728/BAP-0.7-moflow/libtracewrap/libtrace/protobuf/python/google/protobuf/internal/wire_format.py#L259-L268 |
|
echronos/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | external_tools/ply_info/example/unicalc/calc.py | python | p_expression_group | (p) | expression : LPAREN expression RPAREN | expression : LPAREN expression RPAREN | [
"expression",
":",
"LPAREN",
"expression",
"RPAREN"
] | def p_expression_group(p):
'expression : LPAREN expression RPAREN'
p[0] = p[2] | [
"def",
"p_expression_group",
"(",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"2",
"]"
] | https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/ply_info/example/unicalc/calc.py#L86-L88 |
||
kungfu-origin/kungfu | 90c84b2b590855654cb9a6395ed050e0f7763512 | core/deps/SQLiteCpp-2.3.0/cpplint.py | python | IsBlankLine | (line) | return not line or line.isspace() | Returns true if the given line is blank.
We consider a line to be blank if the line is empty or consists of
only white spaces.
Args:
line: A line of a string.
Returns:
True, if the given line is blank. | Returns true if the given line is blank. | [
"Returns",
"true",
"if",
"the",
"given",
"line",
"is",
"blank",
"."
] | def IsBlankLine(line):
"""Returns true if the given line is blank.
We consider a line to be blank if the line is empty or consists of
only white spaces.
Args:
line: A line of a string.
Returns:
True, if the given line is blank.
"""
return not line or line.isspace() | [
"def",
"IsBlankLine",
"(",
"line",
")",
":",
"return",
"not",
"line",
"or",
"line",
".",
"isspace",
"(",
")"
] | https://github.com/kungfu-origin/kungfu/blob/90c84b2b590855654cb9a6395ed050e0f7763512/core/deps/SQLiteCpp-2.3.0/cpplint.py#L2298-L2310 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/ribbon/gallery.py | python | RibbonGalleryEvent.GetGalleryItem | (self) | return self._item | Returns the gallery item which the event relates to, or ``None`` if it does not relate to an item. | Returns the gallery item which the event relates to, or ``None`` if it does not relate to an item. | [
"Returns",
"the",
"gallery",
"item",
"which",
"the",
"event",
"relates",
"to",
"or",
"None",
"if",
"it",
"does",
"not",
"relate",
"to",
"an",
"item",
"."
] | def GetGalleryItem(self):
""" Returns the gallery item which the event relates to, or ``None`` if it does not relate to an item. """
return self._item | [
"def",
"GetGalleryItem",
"(",
"self",
")",
":",
"return",
"self",
".",
"_item"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ribbon/gallery.py#L71-L74 |
|
reverbrain/elliptics | 4b4f9b8094d7616c1ec50eb8605edb059b9f228e | bindings/python/src/route.py | python | RouteList.filter_by_address | (self, address) | return RouteList([route for route in self.routes
if route.address == address]) | Filters routes for specified address\n
address = Address.from_host_port_family('host.com:1025:2')
routes = routes.filter_by_address(address) | Filters routes for specified address\n
address = Address.from_host_port_family('host.com:1025:2')
routes = routes.filter_by_address(address) | [
"Filters",
"routes",
"for",
"specified",
"address",
"\\",
"n",
"address",
"=",
"Address",
".",
"from_host_port_family",
"(",
"host",
".",
"com",
":",
"1025",
":",
"2",
")",
"routes",
"=",
"routes",
".",
"filter_by_address",
"(",
"address",
")"
] | def filter_by_address(self, address):
"""
Filters routes for specified address\n
address = Address.from_host_port_family('host.com:1025:2')
routes = routes.filter_by_address(address)
"""
return RouteList([route for route in self.routes
if route.address == address]) | [
"def",
"filter_by_address",
"(",
"self",
",",
"address",
")",
":",
"return",
"RouteList",
"(",
"[",
"route",
"for",
"route",
"in",
"self",
".",
"routes",
"if",
"route",
".",
"address",
"==",
"address",
"]",
")"
] | https://github.com/reverbrain/elliptics/blob/4b4f9b8094d7616c1ec50eb8605edb059b9f228e/bindings/python/src/route.py#L215-L222 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/idlelib/AutoCompleteWindow.py | python | AutoCompleteWindow._binary_search | (self, s) | return min(i, len(self.completions)-1) | Find the first index in self.completions where completions[i] is
greater or equal to s, or the last index if there is no such
one. | Find the first index in self.completions where completions[i] is
greater or equal to s, or the last index if there is no such
one. | [
"Find",
"the",
"first",
"index",
"in",
"self",
".",
"completions",
"where",
"completions",
"[",
"i",
"]",
"is",
"greater",
"or",
"equal",
"to",
"s",
"or",
"the",
"last",
"index",
"if",
"there",
"is",
"no",
"such",
"one",
"."
] | def _binary_search(self, s):
"""Find the first index in self.completions where completions[i] is
greater or equal to s, or the last index if there is no such
one."""
i = 0; j = len(self.completions)
while j > i:
m = (i + j) // 2
if self.completions[m] >= s:
j = m
else:
i = m + 1
return min(i, len(self.completions)-1) | [
"def",
"_binary_search",
"(",
"self",
",",
"s",
")",
":",
"i",
"=",
"0",
"j",
"=",
"len",
"(",
"self",
".",
"completions",
")",
"while",
"j",
">",
"i",
":",
"m",
"=",
"(",
"i",
"+",
"j",
")",
"//",
"2",
"if",
"self",
".",
"completions",
"[",
"m",
"]",
">=",
"s",
":",
"j",
"=",
"m",
"else",
":",
"i",
"=",
"m",
"+",
"1",
"return",
"min",
"(",
"i",
",",
"len",
"(",
"self",
".",
"completions",
")",
"-",
"1",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/idlelib/AutoCompleteWindow.py#L69-L80 |
|
smilehao/xlua-framework | a03801538be2b0e92d39332d445b22caca1ef61f | ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/build/lib/google/protobuf/text_format.py | python | ParseFloat | (text) | Parse a floating point number.
Args:
text: Text to parse.
Returns:
The number parsed.
Raises:
ValueError: If a floating point number couldn't be parsed. | Parse a floating point number. | [
"Parse",
"a",
"floating",
"point",
"number",
"."
] | def ParseFloat(text):
"""Parse a floating point number.
Args:
text: Text to parse.
Returns:
The number parsed.
Raises:
ValueError: If a floating point number couldn't be parsed.
"""
try:
# Assume Python compatible syntax.
return float(text)
except ValueError:
# Check alternative spellings.
if _FLOAT_INFINITY.match(text):
if text[0] == '-':
return float('-inf')
else:
return float('inf')
elif _FLOAT_NAN.match(text):
return float('nan')
else:
# assume '1.0f' format
try:
return float(text.rstrip('f'))
except ValueError:
raise ValueError('Couldn\'t parse float: %s' % text) | [
"def",
"ParseFloat",
"(",
"text",
")",
":",
"try",
":",
"# Assume Python compatible syntax.",
"return",
"float",
"(",
"text",
")",
"except",
"ValueError",
":",
"# Check alternative spellings.",
"if",
"_FLOAT_INFINITY",
".",
"match",
"(",
"text",
")",
":",
"if",
"text",
"[",
"0",
"]",
"==",
"'-'",
":",
"return",
"float",
"(",
"'-inf'",
")",
"else",
":",
"return",
"float",
"(",
"'inf'",
")",
"elif",
"_FLOAT_NAN",
".",
"match",
"(",
"text",
")",
":",
"return",
"float",
"(",
"'nan'",
")",
"else",
":",
"# assume '1.0f' format",
"try",
":",
"return",
"float",
"(",
"text",
".",
"rstrip",
"(",
"'f'",
")",
")",
"except",
"ValueError",
":",
"raise",
"ValueError",
"(",
"'Couldn\\'t parse float: %s'",
"%",
"text",
")"
] | https://github.com/smilehao/xlua-framework/blob/a03801538be2b0e92d39332d445b22caca1ef61f/ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/build/lib/google/protobuf/text_format.py#L654-L683 |
||
domino-team/openwrt-cc | 8b181297c34d14d3ca521cc9f31430d561dbc688 | package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/generator/msvs.py | python | _InitNinjaFlavor | (params, target_list, target_dicts) | Initialize targets for the ninja flavor.
This sets up the necessary variables in the targets to generate msvs projects
that use ninja as an external builder. The variables in the spec are only set
if they have not been set. This allows individual specs to override the
default values initialized here.
Arguments:
params: Params provided to the generator.
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair. | Initialize targets for the ninja flavor. | [
"Initialize",
"targets",
"for",
"the",
"ninja",
"flavor",
"."
] | def _InitNinjaFlavor(params, target_list, target_dicts):
"""Initialize targets for the ninja flavor.
This sets up the necessary variables in the targets to generate msvs projects
that use ninja as an external builder. The variables in the spec are only set
if they have not been set. This allows individual specs to override the
default values initialized here.
Arguments:
params: Params provided to the generator.
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
"""
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec.get('msvs_external_builder'):
# The spec explicitly defined an external builder, so don't change it.
continue
path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
spec['msvs_external_builder'] = 'ninja'
if not spec.get('msvs_external_builder_out_dir'):
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
gyp_dir = os.path.dirname(gyp_file)
configuration = '$(Configuration)'
if params.get('target_arch') == 'x64':
configuration += '_x64'
spec['msvs_external_builder_out_dir'] = os.path.join(
gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
ninja_generator.ComputeOutputDir(params),
configuration)
if not spec.get('msvs_external_builder_build_cmd'):
spec['msvs_external_builder_build_cmd'] = [
path_to_ninja,
'-C',
'$(OutDir)',
'$(ProjectName)',
]
if not spec.get('msvs_external_builder_clean_cmd'):
spec['msvs_external_builder_clean_cmd'] = [
path_to_ninja,
'-C',
'$(OutDir)',
'-tclean',
'$(ProjectName)',
] | [
"def",
"_InitNinjaFlavor",
"(",
"params",
",",
"target_list",
",",
"target_dicts",
")",
":",
"for",
"qualified_target",
"in",
"target_list",
":",
"spec",
"=",
"target_dicts",
"[",
"qualified_target",
"]",
"if",
"spec",
".",
"get",
"(",
"'msvs_external_builder'",
")",
":",
"# The spec explicitly defined an external builder, so don't change it.",
"continue",
"path_to_ninja",
"=",
"spec",
".",
"get",
"(",
"'msvs_path_to_ninja'",
",",
"'ninja.exe'",
")",
"spec",
"[",
"'msvs_external_builder'",
"]",
"=",
"'ninja'",
"if",
"not",
"spec",
".",
"get",
"(",
"'msvs_external_builder_out_dir'",
")",
":",
"gyp_file",
",",
"_",
",",
"_",
"=",
"gyp",
".",
"common",
".",
"ParseQualifiedTarget",
"(",
"qualified_target",
")",
"gyp_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"gyp_file",
")",
"configuration",
"=",
"'$(Configuration)'",
"if",
"params",
".",
"get",
"(",
"'target_arch'",
")",
"==",
"'x64'",
":",
"configuration",
"+=",
"'_x64'",
"spec",
"[",
"'msvs_external_builder_out_dir'",
"]",
"=",
"os",
".",
"path",
".",
"join",
"(",
"gyp",
".",
"common",
".",
"RelativePath",
"(",
"params",
"[",
"'options'",
"]",
".",
"toplevel_dir",
",",
"gyp_dir",
")",
",",
"ninja_generator",
".",
"ComputeOutputDir",
"(",
"params",
")",
",",
"configuration",
")",
"if",
"not",
"spec",
".",
"get",
"(",
"'msvs_external_builder_build_cmd'",
")",
":",
"spec",
"[",
"'msvs_external_builder_build_cmd'",
"]",
"=",
"[",
"path_to_ninja",
",",
"'-C'",
",",
"'$(OutDir)'",
",",
"'$(ProjectName)'",
",",
"]",
"if",
"not",
"spec",
".",
"get",
"(",
"'msvs_external_builder_clean_cmd'",
")",
":",
"spec",
"[",
"'msvs_external_builder_clean_cmd'",
"]",
"=",
"[",
"path_to_ninja",
",",
"'-C'",
",",
"'$(OutDir)'",
",",
"'-tclean'",
",",
"'$(ProjectName)'",
",",
"]"
] | https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/generator/msvs.py#L1845-L1890 |
||
MythTV/mythtv | d282a209cb8be85d036f85a62a8ec971b67d45f4 | mythtv/bindings/python/MythTV/utility/dicttoxml.py | python | convert_dict | (obj, ids, parent, attr_type, item_func, cdata) | return ''.join(output) | Converts a dict into an XML string. | Converts a dict into an XML string. | [
"Converts",
"a",
"dict",
"into",
"an",
"XML",
"string",
"."
] | def convert_dict(obj, ids, parent, attr_type, item_func, cdata):
"""Converts a dict into an XML string."""
LOG.info('Inside convert_dict(): obj type is: "%s", obj="%s"' % (
type(obj).__name__, unicode_me(obj))
)
output = []
addline = output.append
item_name = item_func(parent)
for key, val in obj.items():
LOG.info('Looping inside convert_dict(): key="%s", val="%s", type(val)="%s"' % (
unicode_me(key), unicode_me(val), type(val).__name__)
)
attr = {} if not ids else {'id': '%s' % (get_unique_id(parent)) }
key, attr = make_valid_xml_name(key, attr)
if isinstance(val, numbers.Number) or type(val) in (str, unicode):
addline(convert_kv(key, val, attr_type, attr, cdata))
elif hasattr(val, 'isoformat'): # datetime
addline(convert_kv(key, val.isoformat(), attr_type, attr, cdata))
elif type(val) == bool:
addline(convert_bool(key, val, attr_type, attr, cdata))
elif isinstance(val, dict):
if attr_type:
attr['type'] = get_xml_type(val)
addline('<%s%s>%s</%s>' % (
key, make_attrstring(attr),
convert_dict(val, ids, key, attr_type, item_func, cdata),
key
)
)
elif isinstance(val, Iterable):
if attr_type:
attr['type'] = get_xml_type(val)
addline('<%s%s>%s</%s>' % (
key,
make_attrstring(attr),
convert_list(val, ids, key, attr_type, item_func, cdata),
key
)
)
elif val is None:
addline(convert_none(key, val, attr_type, attr, cdata))
else:
raise TypeError('Unsupported data type: %s (%s)' % (
val, type(val).__name__)
)
return ''.join(output) | [
"def",
"convert_dict",
"(",
"obj",
",",
"ids",
",",
"parent",
",",
"attr_type",
",",
"item_func",
",",
"cdata",
")",
":",
"LOG",
".",
"info",
"(",
"'Inside convert_dict(): obj type is: \"%s\", obj=\"%s\"'",
"%",
"(",
"type",
"(",
"obj",
")",
".",
"__name__",
",",
"unicode_me",
"(",
"obj",
")",
")",
")",
"output",
"=",
"[",
"]",
"addline",
"=",
"output",
".",
"append",
"item_name",
"=",
"item_func",
"(",
"parent",
")",
"for",
"key",
",",
"val",
"in",
"obj",
".",
"items",
"(",
")",
":",
"LOG",
".",
"info",
"(",
"'Looping inside convert_dict(): key=\"%s\", val=\"%s\", type(val)=\"%s\"'",
"%",
"(",
"unicode_me",
"(",
"key",
")",
",",
"unicode_me",
"(",
"val",
")",
",",
"type",
"(",
"val",
")",
".",
"__name__",
")",
")",
"attr",
"=",
"{",
"}",
"if",
"not",
"ids",
"else",
"{",
"'id'",
":",
"'%s'",
"%",
"(",
"get_unique_id",
"(",
"parent",
")",
")",
"}",
"key",
",",
"attr",
"=",
"make_valid_xml_name",
"(",
"key",
",",
"attr",
")",
"if",
"isinstance",
"(",
"val",
",",
"numbers",
".",
"Number",
")",
"or",
"type",
"(",
"val",
")",
"in",
"(",
"str",
",",
"unicode",
")",
":",
"addline",
"(",
"convert_kv",
"(",
"key",
",",
"val",
",",
"attr_type",
",",
"attr",
",",
"cdata",
")",
")",
"elif",
"hasattr",
"(",
"val",
",",
"'isoformat'",
")",
":",
"# datetime",
"addline",
"(",
"convert_kv",
"(",
"key",
",",
"val",
".",
"isoformat",
"(",
")",
",",
"attr_type",
",",
"attr",
",",
"cdata",
")",
")",
"elif",
"type",
"(",
"val",
")",
"==",
"bool",
":",
"addline",
"(",
"convert_bool",
"(",
"key",
",",
"val",
",",
"attr_type",
",",
"attr",
",",
"cdata",
")",
")",
"elif",
"isinstance",
"(",
"val",
",",
"dict",
")",
":",
"if",
"attr_type",
":",
"attr",
"[",
"'type'",
"]",
"=",
"get_xml_type",
"(",
"val",
")",
"addline",
"(",
"'<%s%s>%s</%s>'",
"%",
"(",
"key",
",",
"make_attrstring",
"(",
"attr",
")",
",",
"convert_dict",
"(",
"val",
",",
"ids",
",",
"key",
",",
"attr_type",
",",
"item_func",
",",
"cdata",
")",
",",
"key",
")",
")",
"elif",
"isinstance",
"(",
"val",
",",
"Iterable",
")",
":",
"if",
"attr_type",
":",
"attr",
"[",
"'type'",
"]",
"=",
"get_xml_type",
"(",
"val",
")",
"addline",
"(",
"'<%s%s>%s</%s>'",
"%",
"(",
"key",
",",
"make_attrstring",
"(",
"attr",
")",
",",
"convert_list",
"(",
"val",
",",
"ids",
",",
"key",
",",
"attr_type",
",",
"item_func",
",",
"cdata",
")",
",",
"key",
")",
")",
"elif",
"val",
"is",
"None",
":",
"addline",
"(",
"convert_none",
"(",
"key",
",",
"val",
",",
"attr_type",
",",
"attr",
",",
"cdata",
")",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'Unsupported data type: %s (%s)'",
"%",
"(",
"val",
",",
"type",
"(",
"val",
")",
".",
"__name__",
")",
")",
"return",
"''",
".",
"join",
"(",
"output",
")"
] | https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/bindings/python/MythTV/utility/dicttoxml.py#L199-L256 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_core.py | python | ImageHandler.GetType | (*args, **kwargs) | return _core_.ImageHandler_GetType(*args, **kwargs) | GetType(self) -> int | GetType(self) -> int | [
"GetType",
"(",
"self",
")",
"-",
">",
"int"
] | def GetType(*args, **kwargs):
"""GetType(self) -> int"""
return _core_.ImageHandler_GetType(*args, **kwargs) | [
"def",
"GetType",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"ImageHandler_GetType",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L2632-L2634 |
|
infinit/elle | a8154593c42743f45b9df09daf62b44630c24a02 | drake/src/drake/__init__.py | python | Path.without_last_extension | (self) | return self.with_extension(ext) | Remove the last dot and what follows from the basename.
Does nothing if there is no dot.
>>> p = Path('foo.tar.bz2')
>>> p
Path("foo.tar.bz2")
>>> p = p.without_last_extension()
>>> p
Path("foo.tar")
>>> p = p.without_last_extension()
>>> p
Path("foo")
>>> p.without_last_extension()
Path("foo") | Remove the last dot and what follows from the basename. | [
"Remove",
"the",
"last",
"dot",
"and",
"what",
"follows",
"from",
"the",
"basename",
"."
] | def without_last_extension(self):
"""Remove the last dot and what follows from the basename.
Does nothing if there is no dot.
>>> p = Path('foo.tar.bz2')
>>> p
Path("foo.tar.bz2")
>>> p = p.without_last_extension()
>>> p
Path("foo.tar")
>>> p = p.without_last_extension()
>>> p
Path("foo")
>>> p.without_last_extension()
Path("foo")
"""
ext = '.'.join(self.extension.split('.')[:-1])
return self.with_extension(ext) | [
"def",
"without_last_extension",
"(",
"self",
")",
":",
"ext",
"=",
"'.'",
".",
"join",
"(",
"self",
".",
"extension",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"-",
"1",
"]",
")",
"return",
"self",
".",
"with_extension",
"(",
"ext",
")"
] | https://github.com/infinit/elle/blob/a8154593c42743f45b9df09daf62b44630c24a02/drake/src/drake/__init__.py#L755-L773 |
|
digibyte/digibyte | 0b8a04fb06d5470a15168e2f675aec57bcc24dac | contrib/devtools/security-check.py | python | check_PE_HIGH_ENTROPY_VA | (executable) | return (bits & reqbits) == reqbits | PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR | PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR | [
"PIE",
":",
"DllCharacteristics",
"bit",
"0x20",
"signifies",
"high",
"-",
"entropy",
"ASLR"
] | def check_PE_HIGH_ENTROPY_VA(executable):
'''PIE: DllCharacteristics bit 0x20 signifies high-entropy ASLR'''
(arch,bits) = get_PE_dll_characteristics(executable)
if arch == 'i386:x86-64':
reqbits = IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA
else: # Unnecessary on 32-bit
assert(arch == 'i386')
reqbits = 0
return (bits & reqbits) == reqbits | [
"def",
"check_PE_HIGH_ENTROPY_VA",
"(",
"executable",
")",
":",
"(",
"arch",
",",
"bits",
")",
"=",
"get_PE_dll_characteristics",
"(",
"executable",
")",
"if",
"arch",
"==",
"'i386:x86-64'",
":",
"reqbits",
"=",
"IMAGE_DLL_CHARACTERISTICS_HIGH_ENTROPY_VA",
"else",
":",
"# Unnecessary on 32-bit",
"assert",
"(",
"arch",
"==",
"'i386'",
")",
"reqbits",
"=",
"0",
"return",
"(",
"bits",
"&",
"reqbits",
")",
"==",
"reqbits"
] | https://github.com/digibyte/digibyte/blob/0b8a04fb06d5470a15168e2f675aec57bcc24dac/contrib/devtools/security-check.py#L150-L158 |
|
tinyobjloader/tinyobjloader | 8322e00ae685ea623ab6ac5a6cebcfa2d22fbf93 | deps/cpplint.py | python | _RestoreFilters | () | Restores filters previously backed up. | Restores filters previously backed up. | [
"Restores",
"filters",
"previously",
"backed",
"up",
"."
] | def _RestoreFilters():
""" Restores filters previously backed up."""
_cpplint_state.RestoreFilters() | [
"def",
"_RestoreFilters",
"(",
")",
":",
"_cpplint_state",
".",
"RestoreFilters",
"(",
")"
] | https://github.com/tinyobjloader/tinyobjloader/blob/8322e00ae685ea623ab6ac5a6cebcfa2d22fbf93/deps/cpplint.py#L909-L911 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/_controls.py | python | RadioBox.IsItemEnabled | (*args, **kwargs) | return _controls_.RadioBox_IsItemEnabled(*args, **kwargs) | IsItemEnabled(self, unsigned int n) -> bool | IsItemEnabled(self, unsigned int n) -> bool | [
"IsItemEnabled",
"(",
"self",
"unsigned",
"int",
"n",
")",
"-",
">",
"bool"
] | def IsItemEnabled(*args, **kwargs):
"""IsItemEnabled(self, unsigned int n) -> bool"""
return _controls_.RadioBox_IsItemEnabled(*args, **kwargs) | [
"def",
"IsItemEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"RadioBox_IsItemEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L2637-L2639 |
|
gyunaev/birdtray | e9ddee108b3cdb9d668df88b3400205586ac9316 | .github/scripts/checkTranslation.py | python | TranslationHandler._checkPunctuation | (self) | Check for problems with punctuations. | Check for problems with punctuations. | [
"Check",
"for",
"problems",
"with",
"punctuations",
"."
] | def _checkPunctuation(self):
""" Check for problems with punctuations. """
if self._source == '' or self._translation == '':
return
lastSourceChar = self._source[-1]
lastTranslationChar = self._translation[-1]
if lastSourceChar not in self.SENTENCE_ENDING_PUNCTUATIONS and \
lastTranslationChar not in self.SENTENCE_ENDING_PUNCTUATIONS:
return # If source and translation don't end in a punctuation
if lastSourceChar != lastTranslationChar:
if not (lastSourceChar == '.' and self._source.endswith('...') and
self._translation.endswith('…')) and \
not (lastSourceChar == '…' and self._translation.endswith('...')):
self.warning('punctuation_end_differ', self._calculatePosition(
self._translation, len(self._translation) - 1, self._translationPos),
punctuation=self._escapeText(lastSourceChar),
actual=self._escapeText(lastTranslationChar)) | [
"def",
"_checkPunctuation",
"(",
"self",
")",
":",
"if",
"self",
".",
"_source",
"==",
"''",
"or",
"self",
".",
"_translation",
"==",
"''",
":",
"return",
"lastSourceChar",
"=",
"self",
".",
"_source",
"[",
"-",
"1",
"]",
"lastTranslationChar",
"=",
"self",
".",
"_translation",
"[",
"-",
"1",
"]",
"if",
"lastSourceChar",
"not",
"in",
"self",
".",
"SENTENCE_ENDING_PUNCTUATIONS",
"and",
"lastTranslationChar",
"not",
"in",
"self",
".",
"SENTENCE_ENDING_PUNCTUATIONS",
":",
"return",
"# If source and translation don't end in a punctuation",
"if",
"lastSourceChar",
"!=",
"lastTranslationChar",
":",
"if",
"not",
"(",
"lastSourceChar",
"==",
"'.'",
"and",
"self",
".",
"_source",
".",
"endswith",
"(",
"'...'",
")",
"and",
"self",
".",
"_translation",
".",
"endswith",
"(",
"'…'))",
" ",
"a",
"d \\",
"not",
"(",
"lastSourceChar",
"==",
"'…' a",
"d s",
"lf._",
"t",
"ranslation.e",
"n",
"dswith('",
".",
"..'))",
":",
"",
"",
"self",
".",
"warning",
"(",
"'punctuation_end_differ'",
",",
"self",
".",
"_calculatePosition",
"(",
"self",
".",
"_translation",
",",
"len",
"(",
"self",
".",
"_translation",
")",
"-",
"1",
",",
"self",
".",
"_translationPos",
")",
",",
"punctuation",
"=",
"self",
".",
"_escapeText",
"(",
"lastSourceChar",
")",
",",
"actual",
"=",
"self",
".",
"_escapeText",
"(",
"lastTranslationChar",
")",
")"
] | https://github.com/gyunaev/birdtray/blob/e9ddee108b3cdb9d668df88b3400205586ac9316/.github/scripts/checkTranslation.py#L520-L536 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/distutils/cmd.py | python | Command.initialize_options | (self) | Set default values for all the options that this command
supports. Note that these defaults may be overridden by other
commands, by the setup script, by config files, or by the
command-line. Thus, this is not the place to code dependencies
between options; generally, 'initialize_options()' implementations
are just a bunch of "self.foo = None" assignments.
This method must be implemented by all command classes. | Set default values for all the options that this command
supports. Note that these defaults may be overridden by other
commands, by the setup script, by config files, or by the
command-line. Thus, this is not the place to code dependencies
between options; generally, 'initialize_options()' implementations
are just a bunch of "self.foo = None" assignments. | [
"Set",
"default",
"values",
"for",
"all",
"the",
"options",
"that",
"this",
"command",
"supports",
".",
"Note",
"that",
"these",
"defaults",
"may",
"be",
"overridden",
"by",
"other",
"commands",
"by",
"the",
"setup",
"script",
"by",
"config",
"files",
"or",
"by",
"the",
"command",
"-",
"line",
".",
"Thus",
"this",
"is",
"not",
"the",
"place",
"to",
"code",
"dependencies",
"between",
"options",
";",
"generally",
"initialize_options",
"()",
"implementations",
"are",
"just",
"a",
"bunch",
"of",
"self",
".",
"foo",
"=",
"None",
"assignments",
"."
] | def initialize_options(self):
"""Set default values for all the options that this command
supports. Note that these defaults may be overridden by other
commands, by the setup script, by config files, or by the
command-line. Thus, this is not the place to code dependencies
between options; generally, 'initialize_options()' implementations
are just a bunch of "self.foo = None" assignments.
This method must be implemented by all command classes.
"""
raise RuntimeError, \
"abstract method -- subclass %s must override" % self.__class__ | [
"def",
"initialize_options",
"(",
"self",
")",
":",
"raise",
"RuntimeError",
",",
"\"abstract method -- subclass %s must override\"",
"%",
"self",
".",
"__class__"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/distutils/cmd.py#L125-L136 |
||
tensorflow/minigo | 6d89c202cdceaf449aefc3149ab2110d44f1a6a4 | dual_net.py | python | freeze_graph_tpu | (model_path) | Custom freeze_graph implementation for Cloud TPU. | Custom freeze_graph implementation for Cloud TPU. | [
"Custom",
"freeze_graph",
"implementation",
"for",
"Cloud",
"TPU",
"."
] | def freeze_graph_tpu(model_path):
"""Custom freeze_graph implementation for Cloud TPU."""
assert model_path
assert FLAGS.tpu_name
if FLAGS.tpu_name.startswith('grpc://'):
tpu_grpc_url = FLAGS.tpu_name
else:
tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver(
FLAGS.tpu_name, zone=None, project=None)
tpu_grpc_url = tpu_cluster_resolver.get_master()
sess = tf.Session(tpu_grpc_url)
output_names = []
with sess.graph.as_default():
# Replicate the inference function for each TPU core.
replicated_features = []
feature_type = tf.bool if FLAGS.bool_features else tf.float32
for i in range(FLAGS.num_tpu_cores):
name = 'pos_tensor_%d' % i
features = tf.placeholder(
feature_type, [None], name=name)
replicated_features.append((features,))
outputs = contrib_tpu.replicate(
tpu_model_inference_fn, replicated_features)
# The replicate op assigns names like output_0_shard_0 to the output
# names. Give them human readable names.
for i, (policy_output, value_output, _) in enumerate(outputs):
policy_name = 'policy_output_%d' % i
value_name = 'value_output_%d' % i
output_names.extend([policy_name, value_name])
tf.identity(policy_output, policy_name)
tf.identity(value_output, value_name)
tf.train.Saver().restore(sess, model_path)
out_graph = tf.graph_util.convert_variables_to_constants(
sess, sess.graph.as_graph_def(), output_names)
metadata = make_model_metadata({
'engine': 'tpu',
'num_replicas': FLAGS.num_tpu_cores,
})
minigo_model.write_graph_def(out_graph, metadata, model_path + '.minigo') | [
"def",
"freeze_graph_tpu",
"(",
"model_path",
")",
":",
"assert",
"model_path",
"assert",
"FLAGS",
".",
"tpu_name",
"if",
"FLAGS",
".",
"tpu_name",
".",
"startswith",
"(",
"'grpc://'",
")",
":",
"tpu_grpc_url",
"=",
"FLAGS",
".",
"tpu_name",
"else",
":",
"tpu_cluster_resolver",
"=",
"contrib_cluster_resolver",
".",
"TPUClusterResolver",
"(",
"FLAGS",
".",
"tpu_name",
",",
"zone",
"=",
"None",
",",
"project",
"=",
"None",
")",
"tpu_grpc_url",
"=",
"tpu_cluster_resolver",
".",
"get_master",
"(",
")",
"sess",
"=",
"tf",
".",
"Session",
"(",
"tpu_grpc_url",
")",
"output_names",
"=",
"[",
"]",
"with",
"sess",
".",
"graph",
".",
"as_default",
"(",
")",
":",
"# Replicate the inference function for each TPU core.",
"replicated_features",
"=",
"[",
"]",
"feature_type",
"=",
"tf",
".",
"bool",
"if",
"FLAGS",
".",
"bool_features",
"else",
"tf",
".",
"float32",
"for",
"i",
"in",
"range",
"(",
"FLAGS",
".",
"num_tpu_cores",
")",
":",
"name",
"=",
"'pos_tensor_%d'",
"%",
"i",
"features",
"=",
"tf",
".",
"placeholder",
"(",
"feature_type",
",",
"[",
"None",
"]",
",",
"name",
"=",
"name",
")",
"replicated_features",
".",
"append",
"(",
"(",
"features",
",",
")",
")",
"outputs",
"=",
"contrib_tpu",
".",
"replicate",
"(",
"tpu_model_inference_fn",
",",
"replicated_features",
")",
"# The replicate op assigns names like output_0_shard_0 to the output",
"# names. Give them human readable names.",
"for",
"i",
",",
"(",
"policy_output",
",",
"value_output",
",",
"_",
")",
"in",
"enumerate",
"(",
"outputs",
")",
":",
"policy_name",
"=",
"'policy_output_%d'",
"%",
"i",
"value_name",
"=",
"'value_output_%d'",
"%",
"i",
"output_names",
".",
"extend",
"(",
"[",
"policy_name",
",",
"value_name",
"]",
")",
"tf",
".",
"identity",
"(",
"policy_output",
",",
"policy_name",
")",
"tf",
".",
"identity",
"(",
"value_output",
",",
"value_name",
")",
"tf",
".",
"train",
".",
"Saver",
"(",
")",
".",
"restore",
"(",
"sess",
",",
"model_path",
")",
"out_graph",
"=",
"tf",
".",
"graph_util",
".",
"convert_variables_to_constants",
"(",
"sess",
",",
"sess",
".",
"graph",
".",
"as_graph_def",
"(",
")",
",",
"output_names",
")",
"metadata",
"=",
"make_model_metadata",
"(",
"{",
"'engine'",
":",
"'tpu'",
",",
"'num_replicas'",
":",
"FLAGS",
".",
"num_tpu_cores",
",",
"}",
")",
"minigo_model",
".",
"write_graph_def",
"(",
"out_graph",
",",
"metadata",
",",
"model_path",
"+",
"'.minigo'",
")"
] | https://github.com/tensorflow/minigo/blob/6d89c202cdceaf449aefc3149ab2110d44f1a6a4/dual_net.py#L720-L765 |
||
snap-stanford/snap-python | d53c51b0a26aa7e3e7400b014cdf728948fde80a | setup/snap.py | python | TStr.Empty | (self) | return _snap.TStr_Empty(self) | Empty(TStr self) -> bool
Parameters:
self: TStr const * | Empty(TStr self) -> bool | [
"Empty",
"(",
"TStr",
"self",
")",
"-",
">",
"bool"
] | def Empty(self):
"""
Empty(TStr self) -> bool
Parameters:
self: TStr const *
"""
return _snap.TStr_Empty(self) | [
"def",
"Empty",
"(",
"self",
")",
":",
"return",
"_snap",
".",
"TStr_Empty",
"(",
"self",
")"
] | https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L9693-L9701 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py | python | find_eggs_in_zip | (importer, path_item, only=False) | Find eggs in zip files; possibly multiple nested eggs. | Find eggs in zip files; possibly multiple nested eggs. | [
"Find",
"eggs",
"in",
"zip",
"files",
";",
"possibly",
"multiple",
"nested",
"eggs",
"."
] | def find_eggs_in_zip(importer, path_item, only=False):
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
if importer.archive.endswith('.whl'):
# wheels are not supported with this finder
# they don't have PKG-INFO metadata, and won't ever contain eggs
return
metadata = EggMetadata(importer)
if metadata.has_metadata('PKG-INFO'):
yield Distribution.from_filename(path_item, metadata=metadata)
if only:
# don't yield nested distros
return
for subitem in metadata.resource_listdir(''):
if _is_egg_path(subitem):
subpath = os.path.join(path_item, subitem)
dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
for dist in dists:
yield dist
elif subitem.lower().endswith('.dist-info'):
subpath = os.path.join(path_item, subitem)
submeta = EggMetadata(zipimport.zipimporter(subpath))
submeta.egg_info = subpath
yield Distribution.from_location(path_item, subitem, submeta) | [
"def",
"find_eggs_in_zip",
"(",
"importer",
",",
"path_item",
",",
"only",
"=",
"False",
")",
":",
"if",
"importer",
".",
"archive",
".",
"endswith",
"(",
"'.whl'",
")",
":",
"# wheels are not supported with this finder",
"# they don't have PKG-INFO metadata, and won't ever contain eggs",
"return",
"metadata",
"=",
"EggMetadata",
"(",
"importer",
")",
"if",
"metadata",
".",
"has_metadata",
"(",
"'PKG-INFO'",
")",
":",
"yield",
"Distribution",
".",
"from_filename",
"(",
"path_item",
",",
"metadata",
"=",
"metadata",
")",
"if",
"only",
":",
"# don't yield nested distros",
"return",
"for",
"subitem",
"in",
"metadata",
".",
"resource_listdir",
"(",
"''",
")",
":",
"if",
"_is_egg_path",
"(",
"subitem",
")",
":",
"subpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path_item",
",",
"subitem",
")",
"dists",
"=",
"find_eggs_in_zip",
"(",
"zipimport",
".",
"zipimporter",
"(",
"subpath",
")",
",",
"subpath",
")",
"for",
"dist",
"in",
"dists",
":",
"yield",
"dist",
"elif",
"subitem",
".",
"lower",
"(",
")",
".",
"endswith",
"(",
"'.dist-info'",
")",
":",
"subpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path_item",
",",
"subitem",
")",
"submeta",
"=",
"EggMetadata",
"(",
"zipimport",
".",
"zipimporter",
"(",
"subpath",
")",
")",
"submeta",
".",
"egg_info",
"=",
"subpath",
"yield",
"Distribution",
".",
"from_location",
"(",
"path_item",
",",
"subitem",
",",
"submeta",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/pkg_resources/__init__.py#L1974-L1998 |
||
ApolloAuto/apollo-platform | 86d9dc6743b496ead18d597748ebabd34a513289 | ros/ros_comm/rostopic/src/rostopic/__init__.py | python | _rostopic_echo | (topic, callback_echo, bag_file=None, echo_all_topics=False) | Print new messages on topic to screen.
:param topic: topic name, ``str``
:param bag_file: name of bag file to echo messages from or ``None``, ``str`` | Print new messages on topic to screen.
:param topic: topic name, ``str``
:param bag_file: name of bag file to echo messages from or ``None``, ``str`` | [
"Print",
"new",
"messages",
"on",
"topic",
"to",
"screen",
".",
":",
"param",
"topic",
":",
"topic",
"name",
"str",
":",
"param",
"bag_file",
":",
"name",
"of",
"bag",
"file",
"to",
"echo",
"messages",
"from",
"or",
"None",
"str"
] | def _rostopic_echo(topic, callback_echo, bag_file=None, echo_all_topics=False):
"""
Print new messages on topic to screen.
:param topic: topic name, ``str``
:param bag_file: name of bag file to echo messages from or ``None``, ``str``
"""
# we have to init a node regardless and bag echoing can print timestamps
if bag_file:
# initialize rospy time due to potential timestamp printing
rospy.rostime.set_rostime_initialized(True)
_rostopic_echo_bag(callback_echo, bag_file)
else:
_check_master()
rospy.init_node(NAME, anonymous=True)
msg_class, real_topic, msg_eval = get_topic_class(topic, blocking=True)
if msg_class is None:
# occurs on ctrl-C
return
callback_echo.msg_eval = msg_eval
# extract type information for submessages
type_information = None
if len(topic) > len(real_topic):
subtopic = topic[len(real_topic):]
subtopic = subtopic.strip('/')
if subtopic:
fields = subtopic.split('/')
submsg_class = msg_class
while fields:
field = fields[0].split('[')[0]
del fields[0]
index = submsg_class.__slots__.index(field)
type_information = submsg_class._slot_types[index]
if fields:
submsg_class = roslib.message.get_message_class(type_information.split('[', 1)[0])
if not submsg_class:
raise ROSTopicException("Cannot load message class for [%s]. Are your messages built?" % type_information)
use_sim_time = rospy.get_param('/use_sim_time', False)
sub = rospy.Subscriber(real_topic, msg_class, callback_echo.callback, {'topic': topic, 'type_information': type_information})
if use_sim_time:
# #2950: print warning if nothing received for two seconds
timeout_t = time.time() + 2.
while time.time() < timeout_t and \
callback_echo.count == 0 and \
not rospy.is_shutdown() and \
not callback_echo.done:
_sleep(0.1)
if callback_echo.count == 0 and \
not rospy.is_shutdown() and \
not callback_echo.done:
sys.stderr.write("WARNING: no messages received and simulated time is active.\nIs /clock being published?\n")
while not rospy.is_shutdown() and not callback_echo.done:
_sleep(0.1) | [
"def",
"_rostopic_echo",
"(",
"topic",
",",
"callback_echo",
",",
"bag_file",
"=",
"None",
",",
"echo_all_topics",
"=",
"False",
")",
":",
"# we have to init a node regardless and bag echoing can print timestamps",
"if",
"bag_file",
":",
"# initialize rospy time due to potential timestamp printing",
"rospy",
".",
"rostime",
".",
"set_rostime_initialized",
"(",
"True",
")",
"_rostopic_echo_bag",
"(",
"callback_echo",
",",
"bag_file",
")",
"else",
":",
"_check_master",
"(",
")",
"rospy",
".",
"init_node",
"(",
"NAME",
",",
"anonymous",
"=",
"True",
")",
"msg_class",
",",
"real_topic",
",",
"msg_eval",
"=",
"get_topic_class",
"(",
"topic",
",",
"blocking",
"=",
"True",
")",
"if",
"msg_class",
"is",
"None",
":",
"# occurs on ctrl-C",
"return",
"callback_echo",
".",
"msg_eval",
"=",
"msg_eval",
"# extract type information for submessages",
"type_information",
"=",
"None",
"if",
"len",
"(",
"topic",
")",
">",
"len",
"(",
"real_topic",
")",
":",
"subtopic",
"=",
"topic",
"[",
"len",
"(",
"real_topic",
")",
":",
"]",
"subtopic",
"=",
"subtopic",
".",
"strip",
"(",
"'/'",
")",
"if",
"subtopic",
":",
"fields",
"=",
"subtopic",
".",
"split",
"(",
"'/'",
")",
"submsg_class",
"=",
"msg_class",
"while",
"fields",
":",
"field",
"=",
"fields",
"[",
"0",
"]",
".",
"split",
"(",
"'['",
")",
"[",
"0",
"]",
"del",
"fields",
"[",
"0",
"]",
"index",
"=",
"submsg_class",
".",
"__slots__",
".",
"index",
"(",
"field",
")",
"type_information",
"=",
"submsg_class",
".",
"_slot_types",
"[",
"index",
"]",
"if",
"fields",
":",
"submsg_class",
"=",
"roslib",
".",
"message",
".",
"get_message_class",
"(",
"type_information",
".",
"split",
"(",
"'['",
",",
"1",
")",
"[",
"0",
"]",
")",
"if",
"not",
"submsg_class",
":",
"raise",
"ROSTopicException",
"(",
"\"Cannot load message class for [%s]. Are your messages built?\"",
"%",
"type_information",
")",
"use_sim_time",
"=",
"rospy",
".",
"get_param",
"(",
"'/use_sim_time'",
",",
"False",
")",
"sub",
"=",
"rospy",
".",
"Subscriber",
"(",
"real_topic",
",",
"msg_class",
",",
"callback_echo",
".",
"callback",
",",
"{",
"'topic'",
":",
"topic",
",",
"'type_information'",
":",
"type_information",
"}",
")",
"if",
"use_sim_time",
":",
"# #2950: print warning if nothing received for two seconds",
"timeout_t",
"=",
"time",
".",
"time",
"(",
")",
"+",
"2.",
"while",
"time",
".",
"time",
"(",
")",
"<",
"timeout_t",
"and",
"callback_echo",
".",
"count",
"==",
"0",
"and",
"not",
"rospy",
".",
"is_shutdown",
"(",
")",
"and",
"not",
"callback_echo",
".",
"done",
":",
"_sleep",
"(",
"0.1",
")",
"if",
"callback_echo",
".",
"count",
"==",
"0",
"and",
"not",
"rospy",
".",
"is_shutdown",
"(",
")",
"and",
"not",
"callback_echo",
".",
"done",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"WARNING: no messages received and simulated time is active.\\nIs /clock being published?\\n\"",
")",
"while",
"not",
"rospy",
".",
"is_shutdown",
"(",
")",
"and",
"not",
"callback_echo",
".",
"done",
":",
"_sleep",
"(",
"0.1",
")"
] | https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros_comm/rostopic/src/rostopic/__init__.py#L878-L937 |
||
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | buildscripts/idl/idl/common.py | python | camel_case | (name) | return name[0:1].lower() + name[1:] | Return a camelCased version of a string. | Return a camelCased version of a string. | [
"Return",
"a",
"camelCased",
"version",
"of",
"a",
"string",
"."
] | def camel_case(name):
# type: (str) -> str
"""Return a camelCased version of a string."""
return name[0:1].lower() + name[1:] | [
"def",
"camel_case",
"(",
"name",
")",
":",
"# type: (str) -> str",
"return",
"name",
"[",
"0",
":",
"1",
"]",
".",
"lower",
"(",
")",
"+",
"name",
"[",
"1",
":",
"]"
] | https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/idl/idl/common.py#L56-L59 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/x86/toolchain/lib/python2.7/mailbox.py | python | MaildirMessage.set_flags | (self, flags) | Set the given flags and unset all others. | Set the given flags and unset all others. | [
"Set",
"the",
"given",
"flags",
"and",
"unset",
"all",
"others",
"."
] | def set_flags(self, flags):
"""Set the given flags and unset all others."""
self._info = '2,' + ''.join(sorted(flags)) | [
"def",
"set_flags",
"(",
"self",
",",
"flags",
")",
":",
"self",
".",
"_info",
"=",
"'2,'",
"+",
"''",
".",
"join",
"(",
"sorted",
"(",
"flags",
")",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/mailbox.py#L1499-L1501 |
||
mapnik/mapnik | f3da900c355e1d15059c4a91b00203dcc9d9f0ef | scons/scons-local-4.1.0/SCons/Tool/tex.py | python | tex_emitter_core | (target, source, env, graphics_extensions) | return (target, source) | An emitter for TeX and LaTeX sources.
For LaTeX sources we try and find the common created files that
are needed on subsequent runs of latex to finish tables of contents,
bibliographies, indices, lists of figures, and hyperlink references. | An emitter for TeX and LaTeX sources.
For LaTeX sources we try and find the common created files that
are needed on subsequent runs of latex to finish tables of contents,
bibliographies, indices, lists of figures, and hyperlink references. | [
"An",
"emitter",
"for",
"TeX",
"and",
"LaTeX",
"sources",
".",
"For",
"LaTeX",
"sources",
"we",
"try",
"and",
"find",
"the",
"common",
"created",
"files",
"that",
"are",
"needed",
"on",
"subsequent",
"runs",
"of",
"latex",
"to",
"finish",
"tables",
"of",
"contents",
"bibliographies",
"indices",
"lists",
"of",
"figures",
"and",
"hyperlink",
"references",
"."
] | def tex_emitter_core(target, source, env, graphics_extensions):
"""An emitter for TeX and LaTeX sources.
For LaTeX sources we try and find the common created files that
are needed on subsequent runs of latex to finish tables of contents,
bibliographies, indices, lists of figures, and hyperlink references.
"""
basename = SCons.Util.splitext(str(source[0]))[0]
basefile = os.path.split(str(basename))[1]
targetdir = os.path.split(str(target[0]))[0]
targetbase = os.path.join(targetdir, basefile)
basedir = os.path.split(str(source[0]))[0]
abspath = os.path.abspath(basedir)
target[0].attributes.path = abspath
#
# file names we will make use of in searching the sources and log file
#
emit_suffixes = ['.aux', '.log', '.ilg', '.blg', '.nls', '.nlg', '.gls', '.glg', '.alg'] + all_suffixes
auxfilename = targetbase + '.aux'
logfilename = targetbase + '.log'
flsfilename = targetbase + '.fls'
syncfilename = targetbase + '.synctex.gz'
env.SideEffect(auxfilename,target[0])
env.SideEffect(logfilename,target[0])
env.SideEffect(flsfilename,target[0])
env.SideEffect(syncfilename,target[0])
if Verbose:
print("side effect :",auxfilename,logfilename,flsfilename,syncfilename)
env.Clean(target[0],auxfilename)
env.Clean(target[0],logfilename)
env.Clean(target[0],flsfilename)
env.Clean(target[0],syncfilename)
content = source[0].get_text_contents()
# set up list with the regular expressions
# we use to find features used
file_tests_search = [auxfile_re,
makeindex_re,
bibliography_re,
bibunit_re,
multibib_re,
addbibresource_re,
tableofcontents_re,
listoffigures_re,
listoftables_re,
hyperref_re,
makenomenclature_re,
makeglossary_re,
makeglossaries_re,
makeacronyms_re,
beamer_re,
newglossary_re,
biblatex_re ]
# set up list with the file suffixes that need emitting
# when a feature is found
file_tests_suff = [['.aux','aux_file'],
['.idx', '.ind', '.ilg','makeindex'],
['.bbl', '.blg','bibliography'],
['.bbl', '.blg','bibunit'],
['.bbl', '.blg','multibib'],
['.bbl', '.blg','.bcf','addbibresource'],
['.toc','contents'],
['.lof','figures'],
['.lot','tables'],
['.out','hyperref'],
['.nlo', '.nls', '.nlg','nomenclature'],
['.glo', '.gls', '.glg','glossary'],
['.glo', '.gls', '.glg','glossaries'],
['.acn', '.acr', '.alg','acronyms'],
['.nav', '.snm', '.out', '.toc','beamer'],
['newglossary',],
['.bcf', '.blg','biblatex'] ]
# for newglossary the suffixes are added as we find the command
# build the list of lists
file_tests = []
for i in range(len(file_tests_search)):
file_tests.append( [None, file_tests_suff[i]] )
# TO-DO: need to add a way for the user to extend this list for whatever
# auxiliary files they create in other (or their own) packages
# get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS']
savedpath = modify_env_var(env, 'TEXINPUTS', abspath)
paths = env['ENV']['TEXINPUTS']
if SCons.Util.is_List(paths):
pass
else:
# Split at os.pathsep to convert into absolute path
paths = paths.split(os.pathsep)
# now that we have the path list restore the env
if savedpath is _null:
try:
del env['ENV']['TEXINPUTS']
except KeyError:
pass # was never set
else:
env['ENV']['TEXINPUTS'] = savedpath
if Verbose:
print("search path ",paths)
# scan all sources for side effect files
aux_files = []
file_tests = ScanFiles(source[0], target, paths, file_tests, file_tests_search, env, graphics_extensions, targetdir, aux_files)
for (theSearch,suffix_list) in file_tests:
# add side effects if feature is present.If file is to be generated,add all side effects
if Verbose and theSearch:
print("check side effects for ",suffix_list[-1])
if theSearch is not None or not source[0].exists():
file_list = [targetbase,]
# for bibunit we need a list of files
if suffix_list[-1] == 'bibunit':
file_basename = os.path.join(targetdir, 'bu*.aux')
file_list = glob.glob(file_basename)
# remove the suffix '.aux'
for fl in file_list.copy():
file_list.append(SCons.Util.splitext(fl)[0])
# for multibib we need a list of files
if suffix_list[-1] == 'multibib':
for multibibmatch in multibib_re.finditer(content):
if Verbose:
print("multibib match ",multibibmatch.group(1))
if multibibmatch is not None:
baselist = multibibmatch.group(1).split(',')
if Verbose:
print("multibib list ", baselist)
for bl in baselist:
file_list.append(os.path.join(targetdir, bl))
# now define the side effects
for file_name in file_list:
for suffix in suffix_list[:-1]:
env.SideEffect(file_name + suffix,target[0])
if Verbose:
print("side effect tst :",file_name + suffix, " target is ",str(target[0]))
env.Clean(target[0],file_name + suffix)
for aFile in aux_files:
aFile_base = SCons.Util.splitext(aFile)[0]
env.SideEffect(aFile_base + '.aux',target[0])
if Verbose:
print("side effect aux :",aFile_base + '.aux')
env.Clean(target[0],aFile_base + '.aux')
# read fls file to get all other files that latex creates and will read on the next pass
# remove files from list that we explicitly dealt with above
if os.path.isfile(flsfilename):
with open(flsfilename, "r") as f:
content = f.read()
out_files = openout_re.findall(content)
myfiles = [auxfilename, logfilename, flsfilename, targetbase+'.dvi',targetbase+'.pdf']
for filename in out_files[:]:
if filename in myfiles:
out_files.remove(filename)
env.SideEffect(out_files,target[0])
if Verbose:
print("side effect fls :",out_files)
env.Clean(target[0],out_files)
return (target, source) | [
"def",
"tex_emitter_core",
"(",
"target",
",",
"source",
",",
"env",
",",
"graphics_extensions",
")",
":",
"basename",
"=",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"str",
"(",
"source",
"[",
"0",
"]",
")",
")",
"[",
"0",
"]",
"basefile",
"=",
"os",
".",
"path",
".",
"split",
"(",
"str",
"(",
"basename",
")",
")",
"[",
"1",
"]",
"targetdir",
"=",
"os",
".",
"path",
".",
"split",
"(",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"[",
"0",
"]",
"targetbase",
"=",
"os",
".",
"path",
".",
"join",
"(",
"targetdir",
",",
"basefile",
")",
"basedir",
"=",
"os",
".",
"path",
".",
"split",
"(",
"str",
"(",
"source",
"[",
"0",
"]",
")",
")",
"[",
"0",
"]",
"abspath",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"basedir",
")",
"target",
"[",
"0",
"]",
".",
"attributes",
".",
"path",
"=",
"abspath",
"#",
"# file names we will make use of in searching the sources and log file",
"#",
"emit_suffixes",
"=",
"[",
"'.aux'",
",",
"'.log'",
",",
"'.ilg'",
",",
"'.blg'",
",",
"'.nls'",
",",
"'.nlg'",
",",
"'.gls'",
",",
"'.glg'",
",",
"'.alg'",
"]",
"+",
"all_suffixes",
"auxfilename",
"=",
"targetbase",
"+",
"'.aux'",
"logfilename",
"=",
"targetbase",
"+",
"'.log'",
"flsfilename",
"=",
"targetbase",
"+",
"'.fls'",
"syncfilename",
"=",
"targetbase",
"+",
"'.synctex.gz'",
"env",
".",
"SideEffect",
"(",
"auxfilename",
",",
"target",
"[",
"0",
"]",
")",
"env",
".",
"SideEffect",
"(",
"logfilename",
",",
"target",
"[",
"0",
"]",
")",
"env",
".",
"SideEffect",
"(",
"flsfilename",
",",
"target",
"[",
"0",
"]",
")",
"env",
".",
"SideEffect",
"(",
"syncfilename",
",",
"target",
"[",
"0",
"]",
")",
"if",
"Verbose",
":",
"print",
"(",
"\"side effect :\"",
",",
"auxfilename",
",",
"logfilename",
",",
"flsfilename",
",",
"syncfilename",
")",
"env",
".",
"Clean",
"(",
"target",
"[",
"0",
"]",
",",
"auxfilename",
")",
"env",
".",
"Clean",
"(",
"target",
"[",
"0",
"]",
",",
"logfilename",
")",
"env",
".",
"Clean",
"(",
"target",
"[",
"0",
"]",
",",
"flsfilename",
")",
"env",
".",
"Clean",
"(",
"target",
"[",
"0",
"]",
",",
"syncfilename",
")",
"content",
"=",
"source",
"[",
"0",
"]",
".",
"get_text_contents",
"(",
")",
"# set up list with the regular expressions",
"# we use to find features used",
"file_tests_search",
"=",
"[",
"auxfile_re",
",",
"makeindex_re",
",",
"bibliography_re",
",",
"bibunit_re",
",",
"multibib_re",
",",
"addbibresource_re",
",",
"tableofcontents_re",
",",
"listoffigures_re",
",",
"listoftables_re",
",",
"hyperref_re",
",",
"makenomenclature_re",
",",
"makeglossary_re",
",",
"makeglossaries_re",
",",
"makeacronyms_re",
",",
"beamer_re",
",",
"newglossary_re",
",",
"biblatex_re",
"]",
"# set up list with the file suffixes that need emitting",
"# when a feature is found",
"file_tests_suff",
"=",
"[",
"[",
"'.aux'",
",",
"'aux_file'",
"]",
",",
"[",
"'.idx'",
",",
"'.ind'",
",",
"'.ilg'",
",",
"'makeindex'",
"]",
",",
"[",
"'.bbl'",
",",
"'.blg'",
",",
"'bibliography'",
"]",
",",
"[",
"'.bbl'",
",",
"'.blg'",
",",
"'bibunit'",
"]",
",",
"[",
"'.bbl'",
",",
"'.blg'",
",",
"'multibib'",
"]",
",",
"[",
"'.bbl'",
",",
"'.blg'",
",",
"'.bcf'",
",",
"'addbibresource'",
"]",
",",
"[",
"'.toc'",
",",
"'contents'",
"]",
",",
"[",
"'.lof'",
",",
"'figures'",
"]",
",",
"[",
"'.lot'",
",",
"'tables'",
"]",
",",
"[",
"'.out'",
",",
"'hyperref'",
"]",
",",
"[",
"'.nlo'",
",",
"'.nls'",
",",
"'.nlg'",
",",
"'nomenclature'",
"]",
",",
"[",
"'.glo'",
",",
"'.gls'",
",",
"'.glg'",
",",
"'glossary'",
"]",
",",
"[",
"'.glo'",
",",
"'.gls'",
",",
"'.glg'",
",",
"'glossaries'",
"]",
",",
"[",
"'.acn'",
",",
"'.acr'",
",",
"'.alg'",
",",
"'acronyms'",
"]",
",",
"[",
"'.nav'",
",",
"'.snm'",
",",
"'.out'",
",",
"'.toc'",
",",
"'beamer'",
"]",
",",
"[",
"'newglossary'",
",",
"]",
",",
"[",
"'.bcf'",
",",
"'.blg'",
",",
"'biblatex'",
"]",
"]",
"# for newglossary the suffixes are added as we find the command",
"# build the list of lists",
"file_tests",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"file_tests_search",
")",
")",
":",
"file_tests",
".",
"append",
"(",
"[",
"None",
",",
"file_tests_suff",
"[",
"i",
"]",
"]",
")",
"# TO-DO: need to add a way for the user to extend this list for whatever",
"# auxiliary files they create in other (or their own) packages",
"# get path list from both env['TEXINPUTS'] and env['ENV']['TEXINPUTS']",
"savedpath",
"=",
"modify_env_var",
"(",
"env",
",",
"'TEXINPUTS'",
",",
"abspath",
")",
"paths",
"=",
"env",
"[",
"'ENV'",
"]",
"[",
"'TEXINPUTS'",
"]",
"if",
"SCons",
".",
"Util",
".",
"is_List",
"(",
"paths",
")",
":",
"pass",
"else",
":",
"# Split at os.pathsep to convert into absolute path",
"paths",
"=",
"paths",
".",
"split",
"(",
"os",
".",
"pathsep",
")",
"# now that we have the path list restore the env",
"if",
"savedpath",
"is",
"_null",
":",
"try",
":",
"del",
"env",
"[",
"'ENV'",
"]",
"[",
"'TEXINPUTS'",
"]",
"except",
"KeyError",
":",
"pass",
"# was never set",
"else",
":",
"env",
"[",
"'ENV'",
"]",
"[",
"'TEXINPUTS'",
"]",
"=",
"savedpath",
"if",
"Verbose",
":",
"print",
"(",
"\"search path \"",
",",
"paths",
")",
"# scan all sources for side effect files",
"aux_files",
"=",
"[",
"]",
"file_tests",
"=",
"ScanFiles",
"(",
"source",
"[",
"0",
"]",
",",
"target",
",",
"paths",
",",
"file_tests",
",",
"file_tests_search",
",",
"env",
",",
"graphics_extensions",
",",
"targetdir",
",",
"aux_files",
")",
"for",
"(",
"theSearch",
",",
"suffix_list",
")",
"in",
"file_tests",
":",
"# add side effects if feature is present.If file is to be generated,add all side effects",
"if",
"Verbose",
"and",
"theSearch",
":",
"print",
"(",
"\"check side effects for \"",
",",
"suffix_list",
"[",
"-",
"1",
"]",
")",
"if",
"theSearch",
"is",
"not",
"None",
"or",
"not",
"source",
"[",
"0",
"]",
".",
"exists",
"(",
")",
":",
"file_list",
"=",
"[",
"targetbase",
",",
"]",
"# for bibunit we need a list of files",
"if",
"suffix_list",
"[",
"-",
"1",
"]",
"==",
"'bibunit'",
":",
"file_basename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"targetdir",
",",
"'bu*.aux'",
")",
"file_list",
"=",
"glob",
".",
"glob",
"(",
"file_basename",
")",
"# remove the suffix '.aux'",
"for",
"fl",
"in",
"file_list",
".",
"copy",
"(",
")",
":",
"file_list",
".",
"append",
"(",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"fl",
")",
"[",
"0",
"]",
")",
"# for multibib we need a list of files",
"if",
"suffix_list",
"[",
"-",
"1",
"]",
"==",
"'multibib'",
":",
"for",
"multibibmatch",
"in",
"multibib_re",
".",
"finditer",
"(",
"content",
")",
":",
"if",
"Verbose",
":",
"print",
"(",
"\"multibib match \"",
",",
"multibibmatch",
".",
"group",
"(",
"1",
")",
")",
"if",
"multibibmatch",
"is",
"not",
"None",
":",
"baselist",
"=",
"multibibmatch",
".",
"group",
"(",
"1",
")",
".",
"split",
"(",
"','",
")",
"if",
"Verbose",
":",
"print",
"(",
"\"multibib list \"",
",",
"baselist",
")",
"for",
"bl",
"in",
"baselist",
":",
"file_list",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"targetdir",
",",
"bl",
")",
")",
"# now define the side effects",
"for",
"file_name",
"in",
"file_list",
":",
"for",
"suffix",
"in",
"suffix_list",
"[",
":",
"-",
"1",
"]",
":",
"env",
".",
"SideEffect",
"(",
"file_name",
"+",
"suffix",
",",
"target",
"[",
"0",
"]",
")",
"if",
"Verbose",
":",
"print",
"(",
"\"side effect tst :\"",
",",
"file_name",
"+",
"suffix",
",",
"\" target is \"",
",",
"str",
"(",
"target",
"[",
"0",
"]",
")",
")",
"env",
".",
"Clean",
"(",
"target",
"[",
"0",
"]",
",",
"file_name",
"+",
"suffix",
")",
"for",
"aFile",
"in",
"aux_files",
":",
"aFile_base",
"=",
"SCons",
".",
"Util",
".",
"splitext",
"(",
"aFile",
")",
"[",
"0",
"]",
"env",
".",
"SideEffect",
"(",
"aFile_base",
"+",
"'.aux'",
",",
"target",
"[",
"0",
"]",
")",
"if",
"Verbose",
":",
"print",
"(",
"\"side effect aux :\"",
",",
"aFile_base",
"+",
"'.aux'",
")",
"env",
".",
"Clean",
"(",
"target",
"[",
"0",
"]",
",",
"aFile_base",
"+",
"'.aux'",
")",
"# read fls file to get all other files that latex creates and will read on the next pass",
"# remove files from list that we explicitly dealt with above",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"flsfilename",
")",
":",
"with",
"open",
"(",
"flsfilename",
",",
"\"r\"",
")",
"as",
"f",
":",
"content",
"=",
"f",
".",
"read",
"(",
")",
"out_files",
"=",
"openout_re",
".",
"findall",
"(",
"content",
")",
"myfiles",
"=",
"[",
"auxfilename",
",",
"logfilename",
",",
"flsfilename",
",",
"targetbase",
"+",
"'.dvi'",
",",
"targetbase",
"+",
"'.pdf'",
"]",
"for",
"filename",
"in",
"out_files",
"[",
":",
"]",
":",
"if",
"filename",
"in",
"myfiles",
":",
"out_files",
".",
"remove",
"(",
"filename",
")",
"env",
".",
"SideEffect",
"(",
"out_files",
",",
"target",
"[",
"0",
"]",
")",
"if",
"Verbose",
":",
"print",
"(",
"\"side effect fls :\"",
",",
"out_files",
")",
"env",
".",
"Clean",
"(",
"target",
"[",
"0",
"]",
",",
"out_files",
")",
"return",
"(",
"target",
",",
"source",
")"
] | https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Tool/tex.py#L675-L836 |
|
ros-perception/image_pipeline | cd4aa7ab38726d88e8e0144aa0d45ad2f236535a | camera_calibration/src/camera_calibration/calibrator.py | python | _calculate_skew | (corners) | return skew | Get skew for given checkerboard detection.
Scaled to [0,1], which 0 = no skew, 1 = high skew
Skew is proportional to the divergence of three outside corners from 90 degrees. | Get skew for given checkerboard detection.
Scaled to [0,1], which 0 = no skew, 1 = high skew
Skew is proportional to the divergence of three outside corners from 90 degrees. | [
"Get",
"skew",
"for",
"given",
"checkerboard",
"detection",
".",
"Scaled",
"to",
"[",
"0",
"1",
"]",
"which",
"0",
"=",
"no",
"skew",
"1",
"=",
"high",
"skew",
"Skew",
"is",
"proportional",
"to",
"the",
"divergence",
"of",
"three",
"outside",
"corners",
"from",
"90",
"degrees",
"."
] | def _calculate_skew(corners):
"""
Get skew for given checkerboard detection.
Scaled to [0,1], which 0 = no skew, 1 = high skew
Skew is proportional to the divergence of three outside corners from 90 degrees.
"""
# TODO Using three nearby interior corners might be more robust, outside corners occasionally
# get mis-detected
up_left, up_right, down_right, _ = corners
def angle(a, b, c):
"""
Return angle between lines ab, bc
"""
ab = a - b
cb = c - b
return math.acos(numpy.dot(ab,cb) / (numpy.linalg.norm(ab) * numpy.linalg.norm(cb)))
skew = min(1.0, 2. * abs((math.pi / 2.) - angle(up_left, up_right, down_right)))
return skew | [
"def",
"_calculate_skew",
"(",
"corners",
")",
":",
"# TODO Using three nearby interior corners might be more robust, outside corners occasionally",
"# get mis-detected",
"up_left",
",",
"up_right",
",",
"down_right",
",",
"_",
"=",
"corners",
"def",
"angle",
"(",
"a",
",",
"b",
",",
"c",
")",
":",
"\"\"\"\n Return angle between lines ab, bc\n \"\"\"",
"ab",
"=",
"a",
"-",
"b",
"cb",
"=",
"c",
"-",
"b",
"return",
"math",
".",
"acos",
"(",
"numpy",
".",
"dot",
"(",
"ab",
",",
"cb",
")",
"/",
"(",
"numpy",
".",
"linalg",
".",
"norm",
"(",
"ab",
")",
"*",
"numpy",
".",
"linalg",
".",
"norm",
"(",
"cb",
")",
")",
")",
"skew",
"=",
"min",
"(",
"1.0",
",",
"2.",
"*",
"abs",
"(",
"(",
"math",
".",
"pi",
"/",
"2.",
")",
"-",
"angle",
"(",
"up_left",
",",
"up_right",
",",
"down_right",
")",
")",
")",
"return",
"skew"
] | https://github.com/ros-perception/image_pipeline/blob/cd4aa7ab38726d88e8e0144aa0d45ad2f236535a/camera_calibration/src/camera_calibration/calibrator.py#L157-L176 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_core.py | python | Window.IsShownOnScreen | (*args, **kwargs) | return _core_.Window_IsShownOnScreen(*args, **kwargs) | IsShownOnScreen(self) -> bool
Returns ``True`` if the window is physically visible on the screen,
i.e. it is shown and all its parents up to the toplevel window are
shown as well. | IsShownOnScreen(self) -> bool | [
"IsShownOnScreen",
"(",
"self",
")",
"-",
">",
"bool"
] | def IsShownOnScreen(*args, **kwargs):
"""
IsShownOnScreen(self) -> bool
Returns ``True`` if the window is physically visible on the screen,
i.e. it is shown and all its parents up to the toplevel window are
shown as well.
"""
return _core_.Window_IsShownOnScreen(*args, **kwargs) | [
"def",
"IsShownOnScreen",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_IsShownOnScreen",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L10006-L10014 |
|
BDLDev/bdlauncher | d10fb098852ebcf9fb71afb23052a463ee7b5d0a | scripts/cppheaderparser.py | python | CppHeader.__init__ | (self, headerFileName, argType="file", **kwargs) | Create the parsed C++ header file parse tree
headerFileName - Name of the file to parse OR actual file contents (depends on argType)
argType - Indicates how to interpret headerFileName as a file string or file name
kwargs - Supports the following keywords | Create the parsed C++ header file parse tree
headerFileName - Name of the file to parse OR actual file contents (depends on argType)
argType - Indicates how to interpret headerFileName as a file string or file name
kwargs - Supports the following keywords | [
"Create",
"the",
"parsed",
"C",
"++",
"header",
"file",
"parse",
"tree",
"headerFileName",
"-",
"Name",
"of",
"the",
"file",
"to",
"parse",
"OR",
"actual",
"file",
"contents",
"(",
"depends",
"on",
"argType",
")",
"argType",
"-",
"Indicates",
"how",
"to",
"interpret",
"headerFileName",
"as",
"a",
"file",
"string",
"or",
"file",
"name",
"kwargs",
"-",
"Supports",
"the",
"following",
"keywords"
] | def __init__(self, headerFileName, argType="file", **kwargs):
"""Create the parsed C++ header file parse tree
headerFileName - Name of the file to parse OR actual file contents (depends on argType)
argType - Indicates how to interpret headerFileName as a file string or file name
kwargs - Supports the following keywords
"""
## reset global state ##
global doxygenCommentCache
doxygenCommentCache = ""
CppVariable.Vars = []
CppStruct.Structs = []
if (argType == "file"):
self.headerFileName = os.path.expandvars(headerFileName)
self.mainClass = os.path.split(self.headerFileName)[1][:-2]
headerFileStr = ""
elif argType == "string":
self.headerFileName = ""
self.mainClass = "???"
headerFileStr = headerFileName
else:
raise Exception("Arg type must be either file or string")
self.curClass = ""
# nested classes have parent::nested, but no extra namespace,
# this keeps the API compatible, TODO proper namespace for everything.
Resolver.CLASSES = {}
self.classes = Resolver.CLASSES
#Functions that are not part of a class
self.functions = []
self.pragmas = []
self.defines = []
self.includes = []
self._precomp_macro_buf = [] #for internal purposes, will end up filling out pragmras and defines at the end
self.enums = []
self.variables = []
self.global_enums = {}
self.nameStack = []
self.nameSpaces = []
self.curAccessSpecifier = 'private' # private is default
self.curTemplate = None
self.accessSpecifierStack = []
self.accessSpecifierScratch = []
debug_print("curAccessSpecifier changed/defaulted to %s"%self.curAccessSpecifier)
self.initextra()
# Old namestacks for a given level
self.nameStackHistory = []
self.anon_struct_counter = 0
self.anon_union_counter = [-1, 0]
self.templateRegistry = []
if (len(self.headerFileName)):
fd = open(self.headerFileName)
headerFileStr = "".join(fd.readlines())
fd.close()
# Make sure supportedAccessSpecifier are sane
for i in range(0, len(supportedAccessSpecifier)):
if " " not in supportedAccessSpecifier[i]: continue
supportedAccessSpecifier[i] = re.sub("[ ]+", " ", supportedAccessSpecifier[i]).strip()
# Strip out template declarations
templateSectionsToSliceOut = []
try:
for m in re.finditer("template[\t ]*<[^>]*>", headerFileStr):
start = m.start()
# Search for the final '>' which may or may not be caught in the case of nexted <>'s
for i in range(start, len(headerFileStr)):
if headerFileStr[i] == '<':
firstBracket = i
break
ltgtStackCount = 1
#Now look for fianl '>'
for i in range(firstBracket + 1, len(headerFileStr)):
if headerFileStr[i] == '<':
ltgtStackCount += 1
elif headerFileStr[i] == '>':
ltgtStackCount -= 1
if ltgtStackCount == 0:
end = i
break
templateSectionsToSliceOut.append((start, end))
# Now strip out all instances of the template
templateSectionsToSliceOut.reverse()
for tslice in templateSectionsToSliceOut:
# Replace the template symbol with a single symbol
template_symbol="CppHeaderParser_template_%d"%len(self.templateRegistry)
self.templateRegistry.append(headerFileStr[tslice[0]: tslice[1]+1])
newlines = headerFileStr[tslice[0]: tslice[1]].count("\n") * "\n" #Keep line numbers the same
headerFileStr = headerFileStr[:tslice[0]] + newlines + " " + template_symbol + " " + headerFileStr[tslice[1] + 1:]
except:
pass
# Change multi line #defines and expressions to single lines maintaining line nubmers
# Based from http://stackoverflow.com/questions/2424458/regular-expression-to-match-cs-multiline-preprocessor-statements
matches = re.findall(r'(?m)^(?:.*\\\r?\n)+.*$', headerFileStr)
is_define = re.compile(r'[ \t\v]*#[Dd][Ee][Ff][Ii][Nn][Ee]')
for m in matches:
#Keep the newlines so that linecount doesnt break
num_newlines = len([a for a in m if a=="\n"])
if is_define.match(m):
new_m = m.replace("\n", "<CppHeaderParser_newline_temp_replacement>\\n")
else:
# Just expression taking up multiple lines, make it take 1 line for easier parsing
new_m = m.replace("\\\n", " ")
if (num_newlines > 0):
new_m += "\n"*(num_newlines)
headerFileStr = headerFileStr.replace(m, new_m)
#Filter out Extern "C" statements. These are order dependent
matches = re.findall(re.compile(r'extern[\t ]+"[Cc]"[\t \n\r]*{', re.DOTALL), headerFileStr)
for m in matches:
#Keep the newlines so that linecount doesnt break
num_newlines = len([a for a in m if a=="\n"])
headerFileStr = headerFileStr.replace(m, "\n" * num_newlines)
headerFileStr = re.sub(r'extern[ ]+"[Cc]"[ ]*', "", headerFileStr)
#Filter out any ignore symbols that end with "()" to account for #define magic functions
for ignore in ignoreSymbols:
if not ignore.endswith("()"): continue
while True:
locStart = headerFileStr.find(ignore[:-1])
if locStart == -1:
break;
locEnd = None
#Now walk till we find the last paren and account for sub parens
parenCount = 1
inQuotes = False
for i in range(locStart + len(ignore) - 1, len(headerFileStr)):
c = headerFileStr[i]
if not inQuotes:
if c == "(":
parenCount += 1
elif c == ")":
parenCount -= 1
elif c == '"':
inQuotes = True
if parenCount == 0:
locEnd = i + 1
break;
else:
if c == '"' and headerFileStr[i-1] != '\\':
inQuotes = False
if locEnd:
#Strip it out but keep the linecount the same so line numbers are right
match_str = headerFileStr[locStart:locEnd]
debug_print("Striping out '%s'"%match_str)
num_newlines = len([a for a in match_str if a=="\n"])
headerFileStr = headerFileStr.replace(headerFileStr[locStart:locEnd], "\n"*num_newlines)
self.braceDepth = 0
lex.lex()
lex.input(headerFileStr)
global curLine
global curChar
curLine = 0
curChar = 0
try:
while True:
tok = lex.token()
if not tok: break
if self.anon_union_counter[0] == self.braceDepth and self.anon_union_counter[1]:
self.anon_union_counter[1] -= 1
tok.value = TagStr(tok.value, lineno=tok.lineno)
#debug_print("TOK: %s"%tok)
if tok.type == 'NAME' and tok.value in self.IGNORE_NAMES: continue
if tok.type != 'TEMPLATE_NAME':
self.stack.append( tok.value )
curLine = tok.lineno
curChar = tok.lexpos
if (tok.type in ('PRECOMP_MACRO', 'PRECOMP_MACRO_CONT')):
debug_print("PRECOMP: %s"%tok)
self._precomp_macro_buf.append(tok.value)
self.stack = []
self.nameStack = []
continue
if tok.type == 'TEMPLATE_NAME':
try:
templateId = int(tok.value.replace("CppHeaderParser_template_",""))
self.curTemplate = self.templateRegistry[templateId]
except: pass
if (tok.type == 'OPEN_BRACE'):
if len(self.nameStack) >= 2 and is_namespace(self.nameStack): # namespace {} with no name used in boost, this sets default?
if self.nameStack[1] == "__IGNORED_NAMESPACE__CppHeaderParser__":#Used in filtering extern "C"
self.nameStack[1] = ""
self.nameSpaces.append(self.nameStack[1])
ns = self.cur_namespace(); self.stack = []
if ns not in self.namespaces: self.namespaces.append( ns )
# Detect special condition of macro magic before class declaration so we
# can filter it out
if 'class' in self.nameStack and self.nameStack[0] != 'class' and self.nameStack[0] != 'enum':
classLocationNS = self.nameStack.index("class")
classLocationS = self.stack.index("class")
if "(" not in self.nameStack[classLocationNS:]:
debug_print("keyword 'class' found in unexpected location in nameStack, must be following #define magic. Process that before moving on")
origNameStack = self.nameStack
origStack = self.stack
#Process first part of stack which is probably #define macro magic and may cause issues
self.nameStack = self.nameStack[:classLocationNS]
self.stack = self.stack[:classLocationS]
try:
self.evaluate_stack()
except:
debug_print("Error processing #define magic... Oh well")
#Process rest of stack
self.nameStack = origNameStack[classLocationNS:]
self.stack = origStack[classLocationS:]
if len(self.nameStack) and not is_enum_namestack(self.nameStack):
self.evaluate_stack()
else:
self.nameStack.append(tok.value)
if self.stack and self.stack[0] == 'class': self.stack = []
self.braceDepth += 1
elif (tok.type == 'CLOSE_BRACE'):
if self.braceDepth == 0:
continue
if (self.braceDepth == len(self.nameSpaces)):
tmp = self.nameSpaces.pop()
self.stack = [] # clear stack when namespace ends?
if len(self.nameStack) and is_enum_namestack(self.nameStack):
self.nameStack.append(tok.value)
elif self.braceDepth < 10:
self.evaluate_stack()
else:
self.nameStack = []
self.braceDepth -= 1
#self.stack = []; print 'BRACE DEPTH', self.braceDepth, 'NS', len(self.nameSpaces)
if self.curClass: debug_print( 'CURBD %s'%self._classes_brace_level[ self.curClass ] )
if (self.braceDepth == 0) or (self.curClass and self._classes_brace_level[self.curClass]==self.braceDepth):
trace_print( 'END OF CLASS DEF' )
if self.accessSpecifierStack:
self.curAccessSpecifier = self.accessSpecifierStack[-1]
self.accessSpecifierStack = self.accessSpecifierStack[:-1]
if self.curClass and self.classes[ self.curClass ]['parent']: self.curClass = self.classes[ self.curClass ]['parent']
else: self.curClass = ""; #self.curStruct = None
self.stack = []
#if self.curStruct: self.curStruct = None
if self.braceDepth == 0 or (self.curStruct and self._structs_brace_level[self.curStruct['type']]==self.braceDepth):
trace_print( 'END OF STRUCT DEF' )
self.curStruct = None
if self._method_body and (self.braceDepth + 1) <= self._method_body:
self._method_body = None; self.stack = []; self.nameStack = []; trace_print( 'FORCE CLEAR METHBODY' )
if (tok.type == 'OPEN_PAREN'):
self.nameStack.append(tok.value)
elif (tok.type == 'CLOSE_PAREN'):
self.nameStack.append(tok.value)
elif (tok.type == 'OPEN_SQUARE_BRACKET'):
self.nameStack.append(tok.value)
elif (tok.type == 'CLOSE_SQUARE_BRACKET'):
self.nameStack.append(tok.value)
elif (tok.type == 'TAB'): pass
elif (tok.type == 'EQUALS'):
self.nameStack.append(tok.value)
elif (tok.type == 'COMMA'):
self.nameStack.append(tok.value)
elif (tok.type == 'BACKSLASH'):
self.nameStack.append(tok.value)
elif (tok.type == 'DIVIDE'):
self.nameStack.append(tok.value)
elif (tok.type == 'PIPE'):
self.nameStack.append(tok.value)
elif (tok.type == 'PERCENT'):
self.nameStack.append(tok.value)
elif (tok.type == 'CARET'):
self.nameStack.append(tok.value)
elif (tok.type == 'EXCLAMATION'):
self.nameStack.append(tok.value)
elif (tok.type == 'SQUOTE'): pass
elif (tok.type == 'NUMBER' or tok.type == 'FLOAT_NUMBER'):
self.nameStack.append(tok.value)
elif (tok.type == 'MINUS'):
self.nameStack.append(tok.value)
elif (tok.type == 'PLUS'):
self.nameStack.append(tok.value)
elif (tok.type == 'STRING_LITERAL'):
self.nameStack.append(tok.value)
elif (tok.type == 'NAME' or tok.type == 'AMPERSTAND' or tok.type == 'ASTERISK' or tok.type == 'CHAR_LITERAL'):
if tok.value in ignoreSymbols:
debug_print("Ignore symbol %s"%tok.value)
elif (tok.value == 'class'):
self.nameStack.append(tok.value)
elif tok.value in supportedAccessSpecifier:
if len(self.nameStack) and self.nameStack[0] in ("class", "struct", "union"):
self.nameStack.append(tok.value)
elif self.braceDepth == len(self.nameSpaces) + 1 or self.braceDepth == (len(self.nameSpaces) + len(self.curClass.split("::"))):
self.curAccessSpecifier = tok.value;
self.accessSpecifierScratch.append(tok.value)
debug_print("curAccessSpecifier updated to %s"%self.curAccessSpecifier)
self.stack = []
else:
self.nameStack.append(tok.value)
if self.anon_union_counter[0] == self.braceDepth:
self.anon_union_counter = [-1, 0]
elif (tok.type == 'COLON'):
#Dont want colon to be first in stack
if len(self.nameStack) == 0:
self.accessSpecifierScratch = []
continue
# Handle situation where access specifiers can be multi words such as "public slots"
jns = " ".join(self.accessSpecifierScratch + self.nameStack)
if jns in supportedAccessSpecifier:
self.curAccessSpecifier = jns;
debug_print("curAccessSpecifier updated to %s"%self.curAccessSpecifier)
self.stack = []
self.nameStack = []
else:
self.nameStack.append(tok.value)
self.accessSpecifierScratch = []
elif (tok.type == 'SEMI_COLON'):
if self.anon_union_counter[0] == self.braceDepth and self.anon_union_counter[1]:
debug_print("Creating anonymous union")
#Force the processing of an anonymous union
saved_namestack = self.nameStack[:]
saved_stack = self.stack[:]
self.nameStack = [""]
self.stack = self.nameStack + [";"]
self.nameStack = self.nameStack[0:1]
debug_print("pre eval anon stack")
self.evaluate_stack( tok.type )
debug_print("post eval anon stack")
self.nameStack = saved_namestack
self.stack = saved_stack
self.anon_union_counter = [-1, 0];
if (self.braceDepth < 10): self.evaluate_stack( tok.type )
self.stack = []
self.nameStack = []
except:
if (debug): raise
raise CppParseError("Not able to parse %s on line %d evaluating \"%s\"\nError around: %s"
% (self.headerFileName, tok.lineno, tok.value, " ".join(self.nameStack)))
self.finalize()
global parseHistory
parseHistory = []
# Delete some temporary variables
for key in ["_precomp_macro_buf", "nameStack", "nameSpaces", "curAccessSpecifier", "accessSpecifierStack",
"accessSpecifierScratch", "nameStackHistory", "anon_struct_counter", "anon_union_counter",
"_classes_brace_level", "_forward_decls", "stack", "mainClass", "curStruct", "_template_typenames",
"_method_body", "braceDepth", "_structs_brace_level", "typedefs_order", "curTemplate", "templateRegistry"]:
del self.__dict__[key] | [
"def",
"__init__",
"(",
"self",
",",
"headerFileName",
",",
"argType",
"=",
"\"file\"",
",",
"*",
"*",
"kwargs",
")",
":",
"## reset global state ##",
"global",
"doxygenCommentCache",
"doxygenCommentCache",
"=",
"\"\"",
"CppVariable",
".",
"Vars",
"=",
"[",
"]",
"CppStruct",
".",
"Structs",
"=",
"[",
"]",
"if",
"(",
"argType",
"==",
"\"file\"",
")",
":",
"self",
".",
"headerFileName",
"=",
"os",
".",
"path",
".",
"expandvars",
"(",
"headerFileName",
")",
"self",
".",
"mainClass",
"=",
"os",
".",
"path",
".",
"split",
"(",
"self",
".",
"headerFileName",
")",
"[",
"1",
"]",
"[",
":",
"-",
"2",
"]",
"headerFileStr",
"=",
"\"\"",
"elif",
"argType",
"==",
"\"string\"",
":",
"self",
".",
"headerFileName",
"=",
"\"\"",
"self",
".",
"mainClass",
"=",
"\"???\"",
"headerFileStr",
"=",
"headerFileName",
"else",
":",
"raise",
"Exception",
"(",
"\"Arg type must be either file or string\"",
")",
"self",
".",
"curClass",
"=",
"\"\"",
"# nested classes have parent::nested, but no extra namespace,",
"# this keeps the API compatible, TODO proper namespace for everything. ",
"Resolver",
".",
"CLASSES",
"=",
"{",
"}",
"self",
".",
"classes",
"=",
"Resolver",
".",
"CLASSES",
"#Functions that are not part of a class",
"self",
".",
"functions",
"=",
"[",
"]",
"self",
".",
"pragmas",
"=",
"[",
"]",
"self",
".",
"defines",
"=",
"[",
"]",
"self",
".",
"includes",
"=",
"[",
"]",
"self",
".",
"_precomp_macro_buf",
"=",
"[",
"]",
"#for internal purposes, will end up filling out pragmras and defines at the end",
"self",
".",
"enums",
"=",
"[",
"]",
"self",
".",
"variables",
"=",
"[",
"]",
"self",
".",
"global_enums",
"=",
"{",
"}",
"self",
".",
"nameStack",
"=",
"[",
"]",
"self",
".",
"nameSpaces",
"=",
"[",
"]",
"self",
".",
"curAccessSpecifier",
"=",
"'private'",
"# private is default",
"self",
".",
"curTemplate",
"=",
"None",
"self",
".",
"accessSpecifierStack",
"=",
"[",
"]",
"self",
".",
"accessSpecifierScratch",
"=",
"[",
"]",
"debug_print",
"(",
"\"curAccessSpecifier changed/defaulted to %s\"",
"%",
"self",
".",
"curAccessSpecifier",
")",
"self",
".",
"initextra",
"(",
")",
"# Old namestacks for a given level",
"self",
".",
"nameStackHistory",
"=",
"[",
"]",
"self",
".",
"anon_struct_counter",
"=",
"0",
"self",
".",
"anon_union_counter",
"=",
"[",
"-",
"1",
",",
"0",
"]",
"self",
".",
"templateRegistry",
"=",
"[",
"]",
"if",
"(",
"len",
"(",
"self",
".",
"headerFileName",
")",
")",
":",
"fd",
"=",
"open",
"(",
"self",
".",
"headerFileName",
")",
"headerFileStr",
"=",
"\"\"",
".",
"join",
"(",
"fd",
".",
"readlines",
"(",
")",
")",
"fd",
".",
"close",
"(",
")",
"# Make sure supportedAccessSpecifier are sane",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"supportedAccessSpecifier",
")",
")",
":",
"if",
"\" \"",
"not",
"in",
"supportedAccessSpecifier",
"[",
"i",
"]",
":",
"continue",
"supportedAccessSpecifier",
"[",
"i",
"]",
"=",
"re",
".",
"sub",
"(",
"\"[ ]+\"",
",",
"\" \"",
",",
"supportedAccessSpecifier",
"[",
"i",
"]",
")",
".",
"strip",
"(",
")",
"# Strip out template declarations",
"templateSectionsToSliceOut",
"=",
"[",
"]",
"try",
":",
"for",
"m",
"in",
"re",
".",
"finditer",
"(",
"\"template[\\t ]*<[^>]*>\"",
",",
"headerFileStr",
")",
":",
"start",
"=",
"m",
".",
"start",
"(",
")",
"# Search for the final '>' which may or may not be caught in the case of nexted <>'s",
"for",
"i",
"in",
"range",
"(",
"start",
",",
"len",
"(",
"headerFileStr",
")",
")",
":",
"if",
"headerFileStr",
"[",
"i",
"]",
"==",
"'<'",
":",
"firstBracket",
"=",
"i",
"break",
"ltgtStackCount",
"=",
"1",
"#Now look for fianl '>'",
"for",
"i",
"in",
"range",
"(",
"firstBracket",
"+",
"1",
",",
"len",
"(",
"headerFileStr",
")",
")",
":",
"if",
"headerFileStr",
"[",
"i",
"]",
"==",
"'<'",
":",
"ltgtStackCount",
"+=",
"1",
"elif",
"headerFileStr",
"[",
"i",
"]",
"==",
"'>'",
":",
"ltgtStackCount",
"-=",
"1",
"if",
"ltgtStackCount",
"==",
"0",
":",
"end",
"=",
"i",
"break",
"templateSectionsToSliceOut",
".",
"append",
"(",
"(",
"start",
",",
"end",
")",
")",
"# Now strip out all instances of the template",
"templateSectionsToSliceOut",
".",
"reverse",
"(",
")",
"for",
"tslice",
"in",
"templateSectionsToSliceOut",
":",
"# Replace the template symbol with a single symbol",
"template_symbol",
"=",
"\"CppHeaderParser_template_%d\"",
"%",
"len",
"(",
"self",
".",
"templateRegistry",
")",
"self",
".",
"templateRegistry",
".",
"append",
"(",
"headerFileStr",
"[",
"tslice",
"[",
"0",
"]",
":",
"tslice",
"[",
"1",
"]",
"+",
"1",
"]",
")",
"newlines",
"=",
"headerFileStr",
"[",
"tslice",
"[",
"0",
"]",
":",
"tslice",
"[",
"1",
"]",
"]",
".",
"count",
"(",
"\"\\n\"",
")",
"*",
"\"\\n\"",
"#Keep line numbers the same",
"headerFileStr",
"=",
"headerFileStr",
"[",
":",
"tslice",
"[",
"0",
"]",
"]",
"+",
"newlines",
"+",
"\" \"",
"+",
"template_symbol",
"+",
"\" \"",
"+",
"headerFileStr",
"[",
"tslice",
"[",
"1",
"]",
"+",
"1",
":",
"]",
"except",
":",
"pass",
"# Change multi line #defines and expressions to single lines maintaining line nubmers",
"# Based from http://stackoverflow.com/questions/2424458/regular-expression-to-match-cs-multiline-preprocessor-statements",
"matches",
"=",
"re",
".",
"findall",
"(",
"r'(?m)^(?:.*\\\\\\r?\\n)+.*$'",
",",
"headerFileStr",
")",
"is_define",
"=",
"re",
".",
"compile",
"(",
"r'[ \\t\\v]*#[Dd][Ee][Ff][Ii][Nn][Ee]'",
")",
"for",
"m",
"in",
"matches",
":",
"#Keep the newlines so that linecount doesnt break",
"num_newlines",
"=",
"len",
"(",
"[",
"a",
"for",
"a",
"in",
"m",
"if",
"a",
"==",
"\"\\n\"",
"]",
")",
"if",
"is_define",
".",
"match",
"(",
"m",
")",
":",
"new_m",
"=",
"m",
".",
"replace",
"(",
"\"\\n\"",
",",
"\"<CppHeaderParser_newline_temp_replacement>\\\\n\"",
")",
"else",
":",
"# Just expression taking up multiple lines, make it take 1 line for easier parsing",
"new_m",
"=",
"m",
".",
"replace",
"(",
"\"\\\\\\n\"",
",",
"\" \"",
")",
"if",
"(",
"num_newlines",
">",
"0",
")",
":",
"new_m",
"+=",
"\"\\n\"",
"*",
"(",
"num_newlines",
")",
"headerFileStr",
"=",
"headerFileStr",
".",
"replace",
"(",
"m",
",",
"new_m",
")",
"#Filter out Extern \"C\" statements. These are order dependent",
"matches",
"=",
"re",
".",
"findall",
"(",
"re",
".",
"compile",
"(",
"r'extern[\\t ]+\"[Cc]\"[\\t \\n\\r]*{'",
",",
"re",
".",
"DOTALL",
")",
",",
"headerFileStr",
")",
"for",
"m",
"in",
"matches",
":",
"#Keep the newlines so that linecount doesnt break",
"num_newlines",
"=",
"len",
"(",
"[",
"a",
"for",
"a",
"in",
"m",
"if",
"a",
"==",
"\"\\n\"",
"]",
")",
"headerFileStr",
"=",
"headerFileStr",
".",
"replace",
"(",
"m",
",",
"\"\\n\"",
"*",
"num_newlines",
")",
"headerFileStr",
"=",
"re",
".",
"sub",
"(",
"r'extern[ ]+\"[Cc]\"[ ]*'",
",",
"\"\"",
",",
"headerFileStr",
")",
"#Filter out any ignore symbols that end with \"()\" to account for #define magic functions",
"for",
"ignore",
"in",
"ignoreSymbols",
":",
"if",
"not",
"ignore",
".",
"endswith",
"(",
"\"()\"",
")",
":",
"continue",
"while",
"True",
":",
"locStart",
"=",
"headerFileStr",
".",
"find",
"(",
"ignore",
"[",
":",
"-",
"1",
"]",
")",
"if",
"locStart",
"==",
"-",
"1",
":",
"break",
"locEnd",
"=",
"None",
"#Now walk till we find the last paren and account for sub parens",
"parenCount",
"=",
"1",
"inQuotes",
"=",
"False",
"for",
"i",
"in",
"range",
"(",
"locStart",
"+",
"len",
"(",
"ignore",
")",
"-",
"1",
",",
"len",
"(",
"headerFileStr",
")",
")",
":",
"c",
"=",
"headerFileStr",
"[",
"i",
"]",
"if",
"not",
"inQuotes",
":",
"if",
"c",
"==",
"\"(\"",
":",
"parenCount",
"+=",
"1",
"elif",
"c",
"==",
"\")\"",
":",
"parenCount",
"-=",
"1",
"elif",
"c",
"==",
"'\"'",
":",
"inQuotes",
"=",
"True",
"if",
"parenCount",
"==",
"0",
":",
"locEnd",
"=",
"i",
"+",
"1",
"break",
"else",
":",
"if",
"c",
"==",
"'\"'",
"and",
"headerFileStr",
"[",
"i",
"-",
"1",
"]",
"!=",
"'\\\\'",
":",
"inQuotes",
"=",
"False",
"if",
"locEnd",
":",
"#Strip it out but keep the linecount the same so line numbers are right",
"match_str",
"=",
"headerFileStr",
"[",
"locStart",
":",
"locEnd",
"]",
"debug_print",
"(",
"\"Striping out '%s'\"",
"%",
"match_str",
")",
"num_newlines",
"=",
"len",
"(",
"[",
"a",
"for",
"a",
"in",
"match_str",
"if",
"a",
"==",
"\"\\n\"",
"]",
")",
"headerFileStr",
"=",
"headerFileStr",
".",
"replace",
"(",
"headerFileStr",
"[",
"locStart",
":",
"locEnd",
"]",
",",
"\"\\n\"",
"*",
"num_newlines",
")",
"self",
".",
"braceDepth",
"=",
"0",
"lex",
".",
"lex",
"(",
")",
"lex",
".",
"input",
"(",
"headerFileStr",
")",
"global",
"curLine",
"global",
"curChar",
"curLine",
"=",
"0",
"curChar",
"=",
"0",
"try",
":",
"while",
"True",
":",
"tok",
"=",
"lex",
".",
"token",
"(",
")",
"if",
"not",
"tok",
":",
"break",
"if",
"self",
".",
"anon_union_counter",
"[",
"0",
"]",
"==",
"self",
".",
"braceDepth",
"and",
"self",
".",
"anon_union_counter",
"[",
"1",
"]",
":",
"self",
".",
"anon_union_counter",
"[",
"1",
"]",
"-=",
"1",
"tok",
".",
"value",
"=",
"TagStr",
"(",
"tok",
".",
"value",
",",
"lineno",
"=",
"tok",
".",
"lineno",
")",
"#debug_print(\"TOK: %s\"%tok)",
"if",
"tok",
".",
"type",
"==",
"'NAME'",
"and",
"tok",
".",
"value",
"in",
"self",
".",
"IGNORE_NAMES",
":",
"continue",
"if",
"tok",
".",
"type",
"!=",
"'TEMPLATE_NAME'",
":",
"self",
".",
"stack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"curLine",
"=",
"tok",
".",
"lineno",
"curChar",
"=",
"tok",
".",
"lexpos",
"if",
"(",
"tok",
".",
"type",
"in",
"(",
"'PRECOMP_MACRO'",
",",
"'PRECOMP_MACRO_CONT'",
")",
")",
":",
"debug_print",
"(",
"\"PRECOMP: %s\"",
"%",
"tok",
")",
"self",
".",
"_precomp_macro_buf",
".",
"append",
"(",
"tok",
".",
"value",
")",
"self",
".",
"stack",
"=",
"[",
"]",
"self",
".",
"nameStack",
"=",
"[",
"]",
"continue",
"if",
"tok",
".",
"type",
"==",
"'TEMPLATE_NAME'",
":",
"try",
":",
"templateId",
"=",
"int",
"(",
"tok",
".",
"value",
".",
"replace",
"(",
"\"CppHeaderParser_template_\"",
",",
"\"\"",
")",
")",
"self",
".",
"curTemplate",
"=",
"self",
".",
"templateRegistry",
"[",
"templateId",
"]",
"except",
":",
"pass",
"if",
"(",
"tok",
".",
"type",
"==",
"'OPEN_BRACE'",
")",
":",
"if",
"len",
"(",
"self",
".",
"nameStack",
")",
">=",
"2",
"and",
"is_namespace",
"(",
"self",
".",
"nameStack",
")",
":",
"# namespace {} with no name used in boost, this sets default?",
"if",
"self",
".",
"nameStack",
"[",
"1",
"]",
"==",
"\"__IGNORED_NAMESPACE__CppHeaderParser__\"",
":",
"#Used in filtering extern \"C\"",
"self",
".",
"nameStack",
"[",
"1",
"]",
"=",
"\"\"",
"self",
".",
"nameSpaces",
".",
"append",
"(",
"self",
".",
"nameStack",
"[",
"1",
"]",
")",
"ns",
"=",
"self",
".",
"cur_namespace",
"(",
")",
"self",
".",
"stack",
"=",
"[",
"]",
"if",
"ns",
"not",
"in",
"self",
".",
"namespaces",
":",
"self",
".",
"namespaces",
".",
"append",
"(",
"ns",
")",
"# Detect special condition of macro magic before class declaration so we",
"# can filter it out",
"if",
"'class'",
"in",
"self",
".",
"nameStack",
"and",
"self",
".",
"nameStack",
"[",
"0",
"]",
"!=",
"'class'",
"and",
"self",
".",
"nameStack",
"[",
"0",
"]",
"!=",
"'enum'",
":",
"classLocationNS",
"=",
"self",
".",
"nameStack",
".",
"index",
"(",
"\"class\"",
")",
"classLocationS",
"=",
"self",
".",
"stack",
".",
"index",
"(",
"\"class\"",
")",
"if",
"\"(\"",
"not",
"in",
"self",
".",
"nameStack",
"[",
"classLocationNS",
":",
"]",
":",
"debug_print",
"(",
"\"keyword 'class' found in unexpected location in nameStack, must be following #define magic. Process that before moving on\"",
")",
"origNameStack",
"=",
"self",
".",
"nameStack",
"origStack",
"=",
"self",
".",
"stack",
"#Process first part of stack which is probably #define macro magic and may cause issues",
"self",
".",
"nameStack",
"=",
"self",
".",
"nameStack",
"[",
":",
"classLocationNS",
"]",
"self",
".",
"stack",
"=",
"self",
".",
"stack",
"[",
":",
"classLocationS",
"]",
"try",
":",
"self",
".",
"evaluate_stack",
"(",
")",
"except",
":",
"debug_print",
"(",
"\"Error processing #define magic... Oh well\"",
")",
"#Process rest of stack",
"self",
".",
"nameStack",
"=",
"origNameStack",
"[",
"classLocationNS",
":",
"]",
"self",
".",
"stack",
"=",
"origStack",
"[",
"classLocationS",
":",
"]",
"if",
"len",
"(",
"self",
".",
"nameStack",
")",
"and",
"not",
"is_enum_namestack",
"(",
"self",
".",
"nameStack",
")",
":",
"self",
".",
"evaluate_stack",
"(",
")",
"else",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"if",
"self",
".",
"stack",
"and",
"self",
".",
"stack",
"[",
"0",
"]",
"==",
"'class'",
":",
"self",
".",
"stack",
"=",
"[",
"]",
"self",
".",
"braceDepth",
"+=",
"1",
"elif",
"(",
"tok",
".",
"type",
"==",
"'CLOSE_BRACE'",
")",
":",
"if",
"self",
".",
"braceDepth",
"==",
"0",
":",
"continue",
"if",
"(",
"self",
".",
"braceDepth",
"==",
"len",
"(",
"self",
".",
"nameSpaces",
")",
")",
":",
"tmp",
"=",
"self",
".",
"nameSpaces",
".",
"pop",
"(",
")",
"self",
".",
"stack",
"=",
"[",
"]",
"# clear stack when namespace ends?",
"if",
"len",
"(",
"self",
".",
"nameStack",
")",
"and",
"is_enum_namestack",
"(",
"self",
".",
"nameStack",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"self",
".",
"braceDepth",
"<",
"10",
":",
"self",
".",
"evaluate_stack",
"(",
")",
"else",
":",
"self",
".",
"nameStack",
"=",
"[",
"]",
"self",
".",
"braceDepth",
"-=",
"1",
"#self.stack = []; print 'BRACE DEPTH', self.braceDepth, 'NS', len(self.nameSpaces)",
"if",
"self",
".",
"curClass",
":",
"debug_print",
"(",
"'CURBD %s'",
"%",
"self",
".",
"_classes_brace_level",
"[",
"self",
".",
"curClass",
"]",
")",
"if",
"(",
"self",
".",
"braceDepth",
"==",
"0",
")",
"or",
"(",
"self",
".",
"curClass",
"and",
"self",
".",
"_classes_brace_level",
"[",
"self",
".",
"curClass",
"]",
"==",
"self",
".",
"braceDepth",
")",
":",
"trace_print",
"(",
"'END OF CLASS DEF'",
")",
"if",
"self",
".",
"accessSpecifierStack",
":",
"self",
".",
"curAccessSpecifier",
"=",
"self",
".",
"accessSpecifierStack",
"[",
"-",
"1",
"]",
"self",
".",
"accessSpecifierStack",
"=",
"self",
".",
"accessSpecifierStack",
"[",
":",
"-",
"1",
"]",
"if",
"self",
".",
"curClass",
"and",
"self",
".",
"classes",
"[",
"self",
".",
"curClass",
"]",
"[",
"'parent'",
"]",
":",
"self",
".",
"curClass",
"=",
"self",
".",
"classes",
"[",
"self",
".",
"curClass",
"]",
"[",
"'parent'",
"]",
"else",
":",
"self",
".",
"curClass",
"=",
"\"\"",
"#self.curStruct = None",
"self",
".",
"stack",
"=",
"[",
"]",
"#if self.curStruct: self.curStruct = None",
"if",
"self",
".",
"braceDepth",
"==",
"0",
"or",
"(",
"self",
".",
"curStruct",
"and",
"self",
".",
"_structs_brace_level",
"[",
"self",
".",
"curStruct",
"[",
"'type'",
"]",
"]",
"==",
"self",
".",
"braceDepth",
")",
":",
"trace_print",
"(",
"'END OF STRUCT DEF'",
")",
"self",
".",
"curStruct",
"=",
"None",
"if",
"self",
".",
"_method_body",
"and",
"(",
"self",
".",
"braceDepth",
"+",
"1",
")",
"<=",
"self",
".",
"_method_body",
":",
"self",
".",
"_method_body",
"=",
"None",
"self",
".",
"stack",
"=",
"[",
"]",
"self",
".",
"nameStack",
"=",
"[",
"]",
"trace_print",
"(",
"'FORCE CLEAR METHBODY'",
")",
"if",
"(",
"tok",
".",
"type",
"==",
"'OPEN_PAREN'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'CLOSE_PAREN'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'OPEN_SQUARE_BRACKET'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'CLOSE_SQUARE_BRACKET'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'TAB'",
")",
":",
"pass",
"elif",
"(",
"tok",
".",
"type",
"==",
"'EQUALS'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'COMMA'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'BACKSLASH'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'DIVIDE'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'PIPE'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'PERCENT'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'CARET'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'EXCLAMATION'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'SQUOTE'",
")",
":",
"pass",
"elif",
"(",
"tok",
".",
"type",
"==",
"'NUMBER'",
"or",
"tok",
".",
"type",
"==",
"'FLOAT_NUMBER'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'MINUS'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'PLUS'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'STRING_LITERAL'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"type",
"==",
"'NAME'",
"or",
"tok",
".",
"type",
"==",
"'AMPERSTAND'",
"or",
"tok",
".",
"type",
"==",
"'ASTERISK'",
"or",
"tok",
".",
"type",
"==",
"'CHAR_LITERAL'",
")",
":",
"if",
"tok",
".",
"value",
"in",
"ignoreSymbols",
":",
"debug_print",
"(",
"\"Ignore symbol %s\"",
"%",
"tok",
".",
"value",
")",
"elif",
"(",
"tok",
".",
"value",
"==",
"'class'",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"tok",
".",
"value",
"in",
"supportedAccessSpecifier",
":",
"if",
"len",
"(",
"self",
".",
"nameStack",
")",
"and",
"self",
".",
"nameStack",
"[",
"0",
"]",
"in",
"(",
"\"class\"",
",",
"\"struct\"",
",",
"\"union\"",
")",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"elif",
"self",
".",
"braceDepth",
"==",
"len",
"(",
"self",
".",
"nameSpaces",
")",
"+",
"1",
"or",
"self",
".",
"braceDepth",
"==",
"(",
"len",
"(",
"self",
".",
"nameSpaces",
")",
"+",
"len",
"(",
"self",
".",
"curClass",
".",
"split",
"(",
"\"::\"",
")",
")",
")",
":",
"self",
".",
"curAccessSpecifier",
"=",
"tok",
".",
"value",
"self",
".",
"accessSpecifierScratch",
".",
"append",
"(",
"tok",
".",
"value",
")",
"debug_print",
"(",
"\"curAccessSpecifier updated to %s\"",
"%",
"self",
".",
"curAccessSpecifier",
")",
"self",
".",
"stack",
"=",
"[",
"]",
"else",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"if",
"self",
".",
"anon_union_counter",
"[",
"0",
"]",
"==",
"self",
".",
"braceDepth",
":",
"self",
".",
"anon_union_counter",
"=",
"[",
"-",
"1",
",",
"0",
"]",
"elif",
"(",
"tok",
".",
"type",
"==",
"'COLON'",
")",
":",
"#Dont want colon to be first in stack",
"if",
"len",
"(",
"self",
".",
"nameStack",
")",
"==",
"0",
":",
"self",
".",
"accessSpecifierScratch",
"=",
"[",
"]",
"continue",
"# Handle situation where access specifiers can be multi words such as \"public slots\"",
"jns",
"=",
"\" \"",
".",
"join",
"(",
"self",
".",
"accessSpecifierScratch",
"+",
"self",
".",
"nameStack",
")",
"if",
"jns",
"in",
"supportedAccessSpecifier",
":",
"self",
".",
"curAccessSpecifier",
"=",
"jns",
"debug_print",
"(",
"\"curAccessSpecifier updated to %s\"",
"%",
"self",
".",
"curAccessSpecifier",
")",
"self",
".",
"stack",
"=",
"[",
"]",
"self",
".",
"nameStack",
"=",
"[",
"]",
"else",
":",
"self",
".",
"nameStack",
".",
"append",
"(",
"tok",
".",
"value",
")",
"self",
".",
"accessSpecifierScratch",
"=",
"[",
"]",
"elif",
"(",
"tok",
".",
"type",
"==",
"'SEMI_COLON'",
")",
":",
"if",
"self",
".",
"anon_union_counter",
"[",
"0",
"]",
"==",
"self",
".",
"braceDepth",
"and",
"self",
".",
"anon_union_counter",
"[",
"1",
"]",
":",
"debug_print",
"(",
"\"Creating anonymous union\"",
")",
"#Force the processing of an anonymous union",
"saved_namestack",
"=",
"self",
".",
"nameStack",
"[",
":",
"]",
"saved_stack",
"=",
"self",
".",
"stack",
"[",
":",
"]",
"self",
".",
"nameStack",
"=",
"[",
"\"\"",
"]",
"self",
".",
"stack",
"=",
"self",
".",
"nameStack",
"+",
"[",
"\";\"",
"]",
"self",
".",
"nameStack",
"=",
"self",
".",
"nameStack",
"[",
"0",
":",
"1",
"]",
"debug_print",
"(",
"\"pre eval anon stack\"",
")",
"self",
".",
"evaluate_stack",
"(",
"tok",
".",
"type",
")",
"debug_print",
"(",
"\"post eval anon stack\"",
")",
"self",
".",
"nameStack",
"=",
"saved_namestack",
"self",
".",
"stack",
"=",
"saved_stack",
"self",
".",
"anon_union_counter",
"=",
"[",
"-",
"1",
",",
"0",
"]",
"if",
"(",
"self",
".",
"braceDepth",
"<",
"10",
")",
":",
"self",
".",
"evaluate_stack",
"(",
"tok",
".",
"type",
")",
"self",
".",
"stack",
"=",
"[",
"]",
"self",
".",
"nameStack",
"=",
"[",
"]",
"except",
":",
"if",
"(",
"debug",
")",
":",
"raise",
"raise",
"CppParseError",
"(",
"\"Not able to parse %s on line %d evaluating \\\"%s\\\"\\nError around: %s\"",
"%",
"(",
"self",
".",
"headerFileName",
",",
"tok",
".",
"lineno",
",",
"tok",
".",
"value",
",",
"\" \"",
".",
"join",
"(",
"self",
".",
"nameStack",
")",
")",
")",
"self",
".",
"finalize",
"(",
")",
"global",
"parseHistory",
"parseHistory",
"=",
"[",
"]",
"# Delete some temporary variables",
"for",
"key",
"in",
"[",
"\"_precomp_macro_buf\"",
",",
"\"nameStack\"",
",",
"\"nameSpaces\"",
",",
"\"curAccessSpecifier\"",
",",
"\"accessSpecifierStack\"",
",",
"\"accessSpecifierScratch\"",
",",
"\"nameStackHistory\"",
",",
"\"anon_struct_counter\"",
",",
"\"anon_union_counter\"",
",",
"\"_classes_brace_level\"",
",",
"\"_forward_decls\"",
",",
"\"stack\"",
",",
"\"mainClass\"",
",",
"\"curStruct\"",
",",
"\"_template_typenames\"",
",",
"\"_method_body\"",
",",
"\"braceDepth\"",
",",
"\"_structs_brace_level\"",
",",
"\"typedefs_order\"",
",",
"\"curTemplate\"",
",",
"\"templateRegistry\"",
"]",
":",
"del",
"self",
".",
"__dict__",
"[",
"key",
"]"
] | https://github.com/BDLDev/bdlauncher/blob/d10fb098852ebcf9fb71afb23052a463ee7b5d0a/scripts/cppheaderparser.py#L2070-L2426 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/ops/control_flow_ops.py | python | GradLoopState.forward_sync | (self) | return self._forward_sync | A control trigger node for synchronization in the forward loop.
One main use is to keep the push ops of a stack executed in the
iteration order. | A control trigger node for synchronization in the forward loop. | [
"A",
"control",
"trigger",
"node",
"for",
"synchronization",
"in",
"the",
"forward",
"loop",
"."
] | def forward_sync(self):
"""A control trigger node for synchronization in the forward loop.
One main use is to keep the push ops of a stack executed in the
iteration order.
"""
if self._forward_sync is None:
with ops.control_dependencies(None):
self._forward_sync = control_trigger(name="f_sync")
self._forward_sync._set_control_flow_context(self._forward_context)
self._forward_index.op._add_control_input(self._forward_sync)
return self._forward_sync | [
"def",
"forward_sync",
"(",
"self",
")",
":",
"if",
"self",
".",
"_forward_sync",
"is",
"None",
":",
"with",
"ops",
".",
"control_dependencies",
"(",
"None",
")",
":",
"self",
".",
"_forward_sync",
"=",
"control_trigger",
"(",
"name",
"=",
"\"f_sync\"",
")",
"self",
".",
"_forward_sync",
".",
"_set_control_flow_context",
"(",
"self",
".",
"_forward_context",
")",
"self",
".",
"_forward_index",
".",
"op",
".",
"_add_control_input",
"(",
"self",
".",
"_forward_sync",
")",
"return",
"self",
".",
"_forward_sync"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/ops/control_flow_ops.py#L762-L773 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/grid.py | python | GridSizeEvent.GetRowOrCol | (*args, **kwargs) | return _grid.GridSizeEvent_GetRowOrCol(*args, **kwargs) | GetRowOrCol(self) -> int | GetRowOrCol(self) -> int | [
"GetRowOrCol",
"(",
"self",
")",
"-",
">",
"int"
] | def GetRowOrCol(*args, **kwargs):
"""GetRowOrCol(self) -> int"""
return _grid.GridSizeEvent_GetRowOrCol(*args, **kwargs) | [
"def",
"GetRowOrCol",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"GridSizeEvent_GetRowOrCol",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/grid.py#L2357-L2359 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | scripts/abins/spowdersemiempiricalcalculator.py | python | SPowderSemiEmpiricalCalculator._calculate_s | (self) | Calculate structure factor by dispatching to appropriate 1d or 2d workflow | Calculate structure factor by dispatching to appropriate 1d or 2d workflow | [
"Calculate",
"structure",
"factor",
"by",
"dispatching",
"to",
"appropriate",
"1d",
"or",
"2d",
"workflow"
] | def _calculate_s(self) -> SData:
"""Calculate structure factor by dispatching to appropriate 1d or 2d workflow"""
from abins.constants import ONE_DIMENSIONAL_INSTRUMENTS, TWO_DIMENSIONAL_INSTRUMENTS
# Compute tensors and traces, write to cache for access during atomic s calculations
powder_calculator = abins.PowderCalculator(filename=self._input_filename, abins_data=self._abins_data)
self._powder_data = powder_calculator.get_formatted_data()
# Dispatch to appropriate routine
if self._instrument.get_name() in ONE_DIMENSIONAL_INSTRUMENTS:
return self._calculate_s_powder_1d(
isotropic_fundamentals=abins.parameters.development.get('isotropic_fundamentals', False))
elif self._instrument.get_name() in TWO_DIMENSIONAL_INSTRUMENTS:
return self._calculate_s_powder_2d()
else:
raise ValueError('Instrument "{}" is not recognised, cannot perform semi-empirical '
'powder averaging.'.format(self._instrument.get_name())) | [
"def",
"_calculate_s",
"(",
"self",
")",
"->",
"SData",
":",
"from",
"abins",
".",
"constants",
"import",
"ONE_DIMENSIONAL_INSTRUMENTS",
",",
"TWO_DIMENSIONAL_INSTRUMENTS",
"# Compute tensors and traces, write to cache for access during atomic s calculations",
"powder_calculator",
"=",
"abins",
".",
"PowderCalculator",
"(",
"filename",
"=",
"self",
".",
"_input_filename",
",",
"abins_data",
"=",
"self",
".",
"_abins_data",
")",
"self",
".",
"_powder_data",
"=",
"powder_calculator",
".",
"get_formatted_data",
"(",
")",
"# Dispatch to appropriate routine",
"if",
"self",
".",
"_instrument",
".",
"get_name",
"(",
")",
"in",
"ONE_DIMENSIONAL_INSTRUMENTS",
":",
"return",
"self",
".",
"_calculate_s_powder_1d",
"(",
"isotropic_fundamentals",
"=",
"abins",
".",
"parameters",
".",
"development",
".",
"get",
"(",
"'isotropic_fundamentals'",
",",
"False",
")",
")",
"elif",
"self",
".",
"_instrument",
".",
"get_name",
"(",
")",
"in",
"TWO_DIMENSIONAL_INSTRUMENTS",
":",
"return",
"self",
".",
"_calculate_s_powder_2d",
"(",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Instrument \"{}\" is not recognised, cannot perform semi-empirical '",
"'powder averaging.'",
".",
"format",
"(",
"self",
".",
"_instrument",
".",
"get_name",
"(",
")",
")",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/abins/spowdersemiempiricalcalculator.py#L242-L258 |
||
apple/turicreate | cce55aa5311300e3ce6af93cb45ba791fd1bdf49 | src/external/coremltools_wrap/coremltools/coremltools/converters/onnx/_error_utils.py | python | ErrorHandling.unsupported_op | (
self, node, # type: Node
) | Either raise an error for an unsupported op type or return custom layer add function | Either raise an error for an unsupported op type or return custom layer add function | [
"Either",
"raise",
"an",
"error",
"for",
"an",
"unsupported",
"op",
"type",
"or",
"return",
"custom",
"layer",
"add",
"function"
] | def unsupported_op(
self, node, # type: Node
):
# type: (...) -> Callable[[Any, Node, Graph, ErrorHandling], None]
"""
Either raise an error for an unsupported op type or return custom layer add function
"""
if self.add_custom_layers:
from ._operators import _convert_custom
return _convert_custom
else:
raise TypeError(
"ONNX node of type {} is not supported. {}\n".format(
node.op_type, self.rerun_suggestion
)
) | [
"def",
"unsupported_op",
"(",
"self",
",",
"node",
",",
"# type: Node",
")",
":",
"# type: (...) -> Callable[[Any, Node, Graph, ErrorHandling], None]",
"if",
"self",
".",
"add_custom_layers",
":",
"from",
".",
"_operators",
"import",
"_convert_custom",
"return",
"_convert_custom",
"else",
":",
"raise",
"TypeError",
"(",
"\"ONNX node of type {} is not supported. {}\\n\"",
".",
"format",
"(",
"node",
".",
"op_type",
",",
"self",
".",
"rerun_suggestion",
")",
")"
] | https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/coremltools/converters/onnx/_error_utils.py#L31-L47 |
||
Polidea/SiriusObfuscator | b0e590d8130e97856afe578869b83a209e2b19be | SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py | python | IsCppString | (line) | return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1 | Does line terminate so, that the next symbol is in string constant.
This function does not consider single-line nor multi-line comments.
Args:
line: is a partial line of code starting from the 0..n.
Returns:
True, if next character appended to 'line' is inside a
string constant. | Does line terminate so, that the next symbol is in string constant. | [
"Does",
"line",
"terminate",
"so",
"that",
"the",
"next",
"symbol",
"is",
"in",
"string",
"constant",
"."
] | def IsCppString(line):
"""Does line terminate so, that the next symbol is in string constant.
This function does not consider single-line nor multi-line comments.
Args:
line: is a partial line of code starting from the 0..n.
Returns:
True, if next character appended to 'line' is inside a
string constant.
"""
line = line.replace(r'\\', 'XX') # after this, \\" does not match to \"
return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1 | [
"def",
"IsCppString",
"(",
"line",
")",
":",
"line",
"=",
"line",
".",
"replace",
"(",
"r'\\\\'",
",",
"'XX'",
")",
"# after this, \\\\\" does not match to \\\"",
"return",
"(",
"(",
"line",
".",
"count",
"(",
"'\"'",
")",
"-",
"line",
".",
"count",
"(",
"r'\\\"'",
")",
"-",
"line",
".",
"count",
"(",
"\"'\\\"'\"",
")",
")",
"&",
"1",
")",
"==",
"1"
] | https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L909-L923 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/ast.py | python | iter_fields | (node) | Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
that is present on *node*. | Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
that is present on *node*. | [
"Yield",
"a",
"tuple",
"of",
"(",
"fieldname",
"value",
")",
"for",
"each",
"field",
"in",
"node",
".",
"_fields",
"that",
"is",
"present",
"on",
"*",
"node",
"*",
"."
] | def iter_fields(node):
"""
Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
that is present on *node*.
"""
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass | [
"def",
"iter_fields",
"(",
"node",
")",
":",
"for",
"field",
"in",
"node",
".",
"_fields",
":",
"try",
":",
"yield",
"field",
",",
"getattr",
"(",
"node",
",",
"field",
")",
"except",
"AttributeError",
":",
"pass"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/ast.py#L181-L190 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/richtext.py | python | RichTextRange.IsWithin | (*args, **kwargs) | return _richtext.RichTextRange_IsWithin(*args, **kwargs) | IsWithin(self, RichTextRange range) -> bool
Returns true if this range is completely within 'range' | IsWithin(self, RichTextRange range) -> bool | [
"IsWithin",
"(",
"self",
"RichTextRange",
"range",
")",
"-",
">",
"bool"
] | def IsWithin(*args, **kwargs):
"""
IsWithin(self, RichTextRange range) -> bool
Returns true if this range is completely within 'range'
"""
return _richtext.RichTextRange_IsWithin(*args, **kwargs) | [
"def",
"IsWithin",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextRange_IsWithin",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L995-L1001 |
|
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/poor-pigs.py | python | Solution.poorPigs | (self, buckets, minutesToDie, minutesToTest) | return int(math.ceil(math.log(buckets) / math.log(minutesToTest / minutesToDie + 1))) | :type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int | :type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int | [
":",
"type",
"buckets",
":",
"int",
":",
"type",
"minutesToDie",
":",
"int",
":",
"type",
"minutesToTest",
":",
"int",
":",
"rtype",
":",
"int"
] | def poorPigs(self, buckets, minutesToDie, minutesToTest):
"""
:type buckets: int
:type minutesToDie: int
:type minutesToTest: int
:rtype: int
"""
return int(math.ceil(math.log(buckets) / math.log(minutesToTest / minutesToDie + 1))) | [
"def",
"poorPigs",
"(",
"self",
",",
"buckets",
",",
"minutesToDie",
",",
"minutesToTest",
")",
":",
"return",
"int",
"(",
"math",
".",
"ceil",
"(",
"math",
".",
"log",
"(",
"buckets",
")",
"/",
"math",
".",
"log",
"(",
"minutesToTest",
"/",
"minutesToDie",
"+",
"1",
")",
")",
")"
] | https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/poor-pigs.py#L8-L15 |
|
y123456yz/reading-and-annotate-mongodb-3.6 | 93280293672ca7586dc24af18132aa61e4ed7fcf | mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Variables/PathVariable.py | python | _PathVariableClass.PathIsDir | (self, key, val, env) | Validator to check if Path is a directory. | Validator to check if Path is a directory. | [
"Validator",
"to",
"check",
"if",
"Path",
"is",
"a",
"directory",
"."
] | def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val)) | [
"def",
"PathIsDir",
"(",
"self",
",",
"key",
",",
"val",
",",
"env",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"val",
")",
":",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"val",
")",
":",
"m",
"=",
"'Directory path for option %s is a file: %s'",
"else",
":",
"m",
"=",
"'Directory path for option %s does not exist: %s'",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"m",
"%",
"(",
"key",
",",
"val",
")",
")"
] | https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Variables/PathVariable.py#L86-L93 |
||
NASA-SW-VnV/ikos | 71325dfb94737332542caa708d7537752021522d | analyzer/python/ikos/scan.py | python | check_output | (cmd) | Run the given command and return the standard output, in bytes | Run the given command and return the standard output, in bytes | [
"Run",
"the",
"given",
"command",
"and",
"return",
"the",
"standard",
"output",
"in",
"bytes"
] | def check_output(cmd):
''' Run the given command and return the standard output, in bytes '''
log.debug('Running %s' % command_string(cmd))
try:
return subprocess.check_output(cmd)
except OSError as e:
printf('error: %s: %s\n', cmd[0], e.strerror, file=sys.stderr)
sys.exit(e.errno) | [
"def",
"check_output",
"(",
"cmd",
")",
":",
"log",
".",
"debug",
"(",
"'Running %s'",
"%",
"command_string",
"(",
"cmd",
")",
")",
"try",
":",
"return",
"subprocess",
".",
"check_output",
"(",
"cmd",
")",
"except",
"OSError",
"as",
"e",
":",
"printf",
"(",
"'error: %s: %s\\n'",
",",
"cmd",
"[",
"0",
"]",
",",
"e",
".",
"strerror",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"e",
".",
"errno",
")"
] | https://github.com/NASA-SW-VnV/ikos/blob/71325dfb94737332542caa708d7537752021522d/analyzer/python/ikos/scan.py#L436-L444 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.