nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_misc.py
|
python
|
Joystick.GetUMax
|
(*args, **kwargs)
|
return _misc_.Joystick_GetUMax(*args, **kwargs)
|
GetUMax(self) -> int
|
GetUMax(self) -> int
|
[
"GetUMax",
"(",
"self",
")",
"-",
">",
"int"
] |
def GetUMax(*args, **kwargs):
"""GetUMax(self) -> int"""
return _misc_.Joystick_GetUMax(*args, **kwargs)
|
[
"def",
"GetUMax",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"Joystick_GetUMax",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L2246-L2248
|
|
tensorflow/deepmath
|
b5b721f54de1d5d6a02d78f5da5995237f9995f9
|
deepmath/deephol/prover.py
|
python
|
check_task
|
(task: proof_assistant_pb2.ProverTask,
prover_options: deephol_pb2.ProverOptions
)
|
return None
|
Check whether the task is valid or supported.
If the task is not valid and supported, then it returns a ProofLog with the
appropriate error message.
Args:
task: Prover task to be performed.
prover_options: Prover options.
Returns:
None at success or a proof log with error message otherwise.
|
Check whether the task is valid or supported.
|
[
"Check",
"whether",
"the",
"task",
"is",
"valid",
"or",
"supported",
"."
] |
def check_task(task: proof_assistant_pb2.ProverTask,
prover_options: deephol_pb2.ProverOptions
) -> Optional[deephol_pb2.ProofLog]:
"""Check whether the task is valid or supported.
If the task is not valid and supported, then it returns a ProofLog with the
appropriate error message.
Args:
task: Prover task to be performed.
prover_options: Prover options.
Returns:
None at success or a proof log with error message otherwise.
"""
def make_empty_log(error_message: Text):
return deephol_pb2.ProofLog(
error_message=error_message,
num_proofs=0,
prover_options=prover_options)
if not task.goals:
return make_empty_log('Task has no theorems to prove')
elif len(task.goals) > 1:
return make_empty_log('Multiple theorems in one task are not supported '
'yet.')
return None
|
[
"def",
"check_task",
"(",
"task",
":",
"proof_assistant_pb2",
".",
"ProverTask",
",",
"prover_options",
":",
"deephol_pb2",
".",
"ProverOptions",
")",
"->",
"Optional",
"[",
"deephol_pb2",
".",
"ProofLog",
"]",
":",
"def",
"make_empty_log",
"(",
"error_message",
":",
"Text",
")",
":",
"return",
"deephol_pb2",
".",
"ProofLog",
"(",
"error_message",
"=",
"error_message",
",",
"num_proofs",
"=",
"0",
",",
"prover_options",
"=",
"prover_options",
")",
"if",
"not",
"task",
".",
"goals",
":",
"return",
"make_empty_log",
"(",
"'Task has no theorems to prove'",
")",
"elif",
"len",
"(",
"task",
".",
"goals",
")",
">",
"1",
":",
"return",
"make_empty_log",
"(",
"'Multiple theorems in one task are not supported '",
"'yet.'",
")",
"return",
"None"
] |
https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/deephol/prover.py#L57-L84
|
|
y123456yz/reading-and-annotate-mongodb-3.6
|
93280293672ca7586dc24af18132aa61e4ed7fcf
|
mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Node/FS.py
|
python
|
File.remove
|
(self)
|
return None
|
Remove this file.
|
Remove this file.
|
[
"Remove",
"this",
"file",
"."
] |
def remove(self):
"""Remove this file."""
if self.exists() or self.islink():
self.fs.unlink(self.get_internal_path())
return 1
return None
|
[
"def",
"remove",
"(",
"self",
")",
":",
"if",
"self",
".",
"exists",
"(",
")",
"or",
"self",
".",
"islink",
"(",
")",
":",
"self",
".",
"fs",
".",
"unlink",
"(",
"self",
".",
"get_internal_path",
"(",
")",
")",
"return",
"1",
"return",
"None"
] |
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Node/FS.py#L3100-L3105
|
|
swift/swift
|
12d031cf8177fdec0137f9aa7e2912fa23c4416b
|
3rdParty/SCons/scons-3.0.1/engine/SCons/Executor.py
|
python
|
Executor.get_action_side_effects
|
(self)
|
return result
|
Returns all side effects for all batches of this
Executor used by the underlying Action.
|
Returns all side effects for all batches of this
Executor used by the underlying Action.
|
[
"Returns",
"all",
"side",
"effects",
"for",
"all",
"batches",
"of",
"this",
"Executor",
"used",
"by",
"the",
"underlying",
"Action",
"."
] |
def get_action_side_effects(self):
"""Returns all side effects for all batches of this
Executor used by the underlying Action.
"""
result = SCons.Util.UniqueList([])
for target in self.get_action_targets():
result.extend(target.side_effects)
return result
|
[
"def",
"get_action_side_effects",
"(",
"self",
")",
":",
"result",
"=",
"SCons",
".",
"Util",
".",
"UniqueList",
"(",
"[",
"]",
")",
"for",
"target",
"in",
"self",
".",
"get_action_targets",
"(",
")",
":",
"result",
".",
"extend",
"(",
"target",
".",
"side_effects",
")",
"return",
"result"
] |
https://github.com/swift/swift/blob/12d031cf8177fdec0137f9aa7e2912fa23c4416b/3rdParty/SCons/scons-3.0.1/engine/SCons/Executor.py#L336-L344
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/util/loader.py
|
python
|
load_op_library
|
(path)
|
return ret
|
Loads a contrib op library from the given path.
NOTE(mrry): On Windows, we currently assume that some contrib op
libraries are statically linked into the main TensorFlow Python
extension DLL - use dynamically linked ops if the .so is present.
Args:
path: An absolute path to a shared object file.
Returns:
A Python module containing the Python wrappers for Ops defined in the
plugin.
|
Loads a contrib op library from the given path.
|
[
"Loads",
"a",
"contrib",
"op",
"library",
"from",
"the",
"given",
"path",
"."
] |
def load_op_library(path):
"""Loads a contrib op library from the given path.
NOTE(mrry): On Windows, we currently assume that some contrib op
libraries are statically linked into the main TensorFlow Python
extension DLL - use dynamically linked ops if the .so is present.
Args:
path: An absolute path to a shared object file.
Returns:
A Python module containing the Python wrappers for Ops defined in the
plugin.
"""
if os.name == 'nt':
# To avoid making every user_ops aware of windows, re-write
# the file extension from .so to .dll if .so file doesn't exist.
if not os.path.exists(path):
path = re.sub(r'\.so$', '.dll', path)
# Currently we have only some user_ops as dlls on windows - don't try
# to load them if the dll is not found.
# TODO(mrry): Once we have all of them this check should be removed.
if not os.path.exists(path):
return None
path = resource_loader.get_path_to_datafile(path)
ret = load_library.load_op_library(path)
assert ret, 'Could not load %s' % path
return ret
|
[
"def",
"load_op_library",
"(",
"path",
")",
":",
"if",
"os",
".",
"name",
"==",
"'nt'",
":",
"# To avoid making every user_ops aware of windows, re-write",
"# the file extension from .so to .dll if .so file doesn't exist.",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"path",
"=",
"re",
".",
"sub",
"(",
"r'\\.so$'",
",",
"'.dll'",
",",
"path",
")",
"# Currently we have only some user_ops as dlls on windows - don't try",
"# to load them if the dll is not found.",
"# TODO(mrry): Once we have all of them this check should be removed.",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"return",
"None",
"path",
"=",
"resource_loader",
".",
"get_path_to_datafile",
"(",
"path",
")",
"ret",
"=",
"load_library",
".",
"load_op_library",
"(",
"path",
")",
"assert",
"ret",
",",
"'Could not load %s'",
"%",
"path",
"return",
"ret"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/util/loader.py#L30-L58
|
|
Tencent/CMONGO
|
c40380caa14e05509f46993aa8b8da966b09b0b5
|
buildscripts/cpplint.py
|
python
|
CheckOperatorSpacing
|
(filename, clean_lines, linenum, error)
|
Checks for horizontal spacing around operators.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
|
Checks for horizontal spacing around operators.
|
[
"Checks",
"for",
"horizontal",
"spacing",
"around",
"operators",
"."
] |
def CheckOperatorSpacing(filename, clean_lines, linenum, error):
"""Checks for horizontal spacing around operators.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Don't try to do spacing checks for operator methods. Do this by
# replacing the troublesome characters with something else,
# preserving column position for all other characters.
#
# The replacement is done repeatedly to avoid false positives from
# operators that call operators.
while True:
match = Match(r'^(.*\boperator\b)(\S+)(\s*\(.*)$', line)
if match:
line = match.group(1) + ('_' * len(match.group(2))) + match.group(3)
else:
break
# We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )".
# Otherwise not. Note we only check for non-spaces on *both* sides;
# sometimes people put non-spaces on one side when aligning ='s among
# many lines (not that this is behavior that I approve of...)
if Search(r'[\w.]=[\w.]', line) and not Search(r'\b(if|while) ', line):
error(filename, linenum, 'whitespace/operators', 4,
'Missing spaces around =')
# It's ok not to have spaces around binary operators like + - * /, but if
# there's too little whitespace, we get concerned. It's hard to tell,
# though, so we punt on this one for now. TODO.
# You should always have whitespace around binary operators.
#
# Check <= and >= first to avoid false positives with < and >, then
# check non-include lines for spacing around < and >.
#
# If the operator is followed by a comma, assume it's be used in a
# macro context and don't do any checks. This avoids false
# positives.
#
# Note that && is not included here. Those are checked separately
# in CheckRValueReference
match = Search(r'[^<>=!\s](==|!=|<=|>=|\|\|)[^<>=!\s,;\)]', line)
if match:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around %s' % match.group(1))
elif not Match(r'#.*include', line):
# Look for < that is not surrounded by spaces. This is only
# triggered if both sides are missing spaces, even though
# technically should should flag if at least one side is missing a
# space. This is done to avoid some false positives with shifts.
match = Match(r'^(.*[^\s<])<[^\s=<,]', line)
if match:
(_, _, end_pos) = CloseExpression(
clean_lines, linenum, len(match.group(1)))
if end_pos <= -1:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around <')
# Look for > that is not surrounded by spaces. Similar to the
# above, we only trigger if both sides are missing spaces to avoid
# false positives with shifts.
match = Match(r'^(.*[^-\s>])>[^\s=>,]', line)
if match:
(_, _, start_pos) = ReverseCloseExpression(
clean_lines, linenum, len(match.group(1)))
if start_pos <= -1:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around >')
# We allow no-spaces around << when used like this: 10<<20, but
# not otherwise (particularly, not when used as streams)
#
# We also allow operators following an opening parenthesis, since
# those tend to be macros that deal with operators.
match = Search(r'(operator|\S)(?:L|UL|ULL|l|ul|ull)?<<([^\s,=])', line)
if (match and match.group(1) != '(' and
not (match.group(1).isdigit() and match.group(2).isdigit()) and
not (match.group(1) == 'operator' and match.group(2) == ';')):
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around <<')
# We allow no-spaces around >> for almost anything. This is because
# C++11 allows ">>" to close nested templates, which accounts for
# most cases when ">>" is not followed by a space.
#
# We still warn on ">>" followed by alpha character, because that is
# likely due to ">>" being used for right shifts, e.g.:
# value >> alpha
#
# When ">>" is used to close templates, the alphanumeric letter that
# follows would be part of an identifier, and there should still be
# a space separating the template type and the identifier.
# type<type<type>> alpha
match = Search(r'>>[a-zA-Z_]', line)
if match:
error(filename, linenum, 'whitespace/operators', 3,
'Missing spaces around >>')
# There shouldn't be space around unary operators
match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line)
if match:
error(filename, linenum, 'whitespace/operators', 4,
'Extra space for operator %s' % match.group(1))
|
[
"def",
"CheckOperatorSpacing",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# Don't try to do spacing checks for operator methods. Do this by",
"# replacing the troublesome characters with something else,",
"# preserving column position for all other characters.",
"#",
"# The replacement is done repeatedly to avoid false positives from",
"# operators that call operators.",
"while",
"True",
":",
"match",
"=",
"Match",
"(",
"r'^(.*\\boperator\\b)(\\S+)(\\s*\\(.*)$'",
",",
"line",
")",
"if",
"match",
":",
"line",
"=",
"match",
".",
"group",
"(",
"1",
")",
"+",
"(",
"'_'",
"*",
"len",
"(",
"match",
".",
"group",
"(",
"2",
")",
")",
")",
"+",
"match",
".",
"group",
"(",
"3",
")",
"else",
":",
"break",
"# We allow no-spaces around = within an if: \"if ( (a=Foo()) == 0 )\".",
"# Otherwise not. Note we only check for non-spaces on *both* sides;",
"# sometimes people put non-spaces on one side when aligning ='s among",
"# many lines (not that this is behavior that I approve of...)",
"if",
"Search",
"(",
"r'[\\w.]=[\\w.]'",
",",
"line",
")",
"and",
"not",
"Search",
"(",
"r'\\b(if|while) '",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/operators'",
",",
"4",
",",
"'Missing spaces around ='",
")",
"# It's ok not to have spaces around binary operators like + - * /, but if",
"# there's too little whitespace, we get concerned. It's hard to tell,",
"# though, so we punt on this one for now. TODO.",
"# You should always have whitespace around binary operators.",
"#",
"# Check <= and >= first to avoid false positives with < and >, then",
"# check non-include lines for spacing around < and >.",
"#",
"# If the operator is followed by a comma, assume it's be used in a",
"# macro context and don't do any checks. This avoids false",
"# positives.",
"#",
"# Note that && is not included here. Those are checked separately",
"# in CheckRValueReference",
"match",
"=",
"Search",
"(",
"r'[^<>=!\\s](==|!=|<=|>=|\\|\\|)[^<>=!\\s,;\\)]'",
",",
"line",
")",
"if",
"match",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/operators'",
",",
"3",
",",
"'Missing spaces around %s'",
"%",
"match",
".",
"group",
"(",
"1",
")",
")",
"elif",
"not",
"Match",
"(",
"r'#.*include'",
",",
"line",
")",
":",
"# Look for < that is not surrounded by spaces. This is only",
"# triggered if both sides are missing spaces, even though",
"# technically should should flag if at least one side is missing a",
"# space. This is done to avoid some false positives with shifts.",
"match",
"=",
"Match",
"(",
"r'^(.*[^\\s<])<[^\\s=<,]'",
",",
"line",
")",
"if",
"match",
":",
"(",
"_",
",",
"_",
",",
"end_pos",
")",
"=",
"CloseExpression",
"(",
"clean_lines",
",",
"linenum",
",",
"len",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
")",
"if",
"end_pos",
"<=",
"-",
"1",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/operators'",
",",
"3",
",",
"'Missing spaces around <'",
")",
"# Look for > that is not surrounded by spaces. Similar to the",
"# above, we only trigger if both sides are missing spaces to avoid",
"# false positives with shifts.",
"match",
"=",
"Match",
"(",
"r'^(.*[^-\\s>])>[^\\s=>,]'",
",",
"line",
")",
"if",
"match",
":",
"(",
"_",
",",
"_",
",",
"start_pos",
")",
"=",
"ReverseCloseExpression",
"(",
"clean_lines",
",",
"linenum",
",",
"len",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
")",
"if",
"start_pos",
"<=",
"-",
"1",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/operators'",
",",
"3",
",",
"'Missing spaces around >'",
")",
"# We allow no-spaces around << when used like this: 10<<20, but",
"# not otherwise (particularly, not when used as streams)",
"#",
"# We also allow operators following an opening parenthesis, since",
"# those tend to be macros that deal with operators.",
"match",
"=",
"Search",
"(",
"r'(operator|\\S)(?:L|UL|ULL|l|ul|ull)?<<([^\\s,=])'",
",",
"line",
")",
"if",
"(",
"match",
"and",
"match",
".",
"group",
"(",
"1",
")",
"!=",
"'('",
"and",
"not",
"(",
"match",
".",
"group",
"(",
"1",
")",
".",
"isdigit",
"(",
")",
"and",
"match",
".",
"group",
"(",
"2",
")",
".",
"isdigit",
"(",
")",
")",
"and",
"not",
"(",
"match",
".",
"group",
"(",
"1",
")",
"==",
"'operator'",
"and",
"match",
".",
"group",
"(",
"2",
")",
"==",
"';'",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/operators'",
",",
"3",
",",
"'Missing spaces around <<'",
")",
"# We allow no-spaces around >> for almost anything. This is because",
"# C++11 allows \">>\" to close nested templates, which accounts for",
"# most cases when \">>\" is not followed by a space.",
"#",
"# We still warn on \">>\" followed by alpha character, because that is",
"# likely due to \">>\" being used for right shifts, e.g.:",
"# value >> alpha",
"#",
"# When \">>\" is used to close templates, the alphanumeric letter that",
"# follows would be part of an identifier, and there should still be",
"# a space separating the template type and the identifier.",
"# type<type<type>> alpha",
"match",
"=",
"Search",
"(",
"r'>>[a-zA-Z_]'",
",",
"line",
")",
"if",
"match",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/operators'",
",",
"3",
",",
"'Missing spaces around >>'",
")",
"# There shouldn't be space around unary operators",
"match",
"=",
"Search",
"(",
"r'(!\\s|~\\s|[\\s]--[\\s;]|[\\s]\\+\\+[\\s;])'",
",",
"line",
")",
"if",
"match",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/operators'",
",",
"4",
",",
"'Extra space for operator %s'",
"%",
"match",
".",
"group",
"(",
"1",
")",
")"
] |
https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/buildscripts/cpplint.py#L3112-L3220
|
||
benoitsteiner/tensorflow-opencl
|
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
|
tensorflow/contrib/tpu/python/tpu/tpu_estimator.py
|
python
|
_ModelFnWrapper._verify_estimator_spec
|
(self, estimator_spec)
|
return estimator_spec
|
Validates the estimator_spec.
|
Validates the estimator_spec.
|
[
"Validates",
"the",
"estimator_spec",
"."
] |
def _verify_estimator_spec(self, estimator_spec):
"""Validates the estimator_spec."""
if isinstance(estimator_spec, TPUEstimatorSpec):
return estimator_spec
err_msg = '{} returned by EstimatorSpec is not supported in TPUEstimator.'
if estimator_spec.training_chief_hooks:
raise ValueError(err_msg.format('training_chief_hooks'))
if estimator_spec.training_hooks:
raise ValueError(err_msg.format('training_hooks'))
if estimator_spec.evaluation_hooks:
raise ValueError(err_msg.format('evaluation_hooks'))
return estimator_spec
|
[
"def",
"_verify_estimator_spec",
"(",
"self",
",",
"estimator_spec",
")",
":",
"if",
"isinstance",
"(",
"estimator_spec",
",",
"TPUEstimatorSpec",
")",
":",
"return",
"estimator_spec",
"err_msg",
"=",
"'{} returned by EstimatorSpec is not supported in TPUEstimator.'",
"if",
"estimator_spec",
".",
"training_chief_hooks",
":",
"raise",
"ValueError",
"(",
"err_msg",
".",
"format",
"(",
"'training_chief_hooks'",
")",
")",
"if",
"estimator_spec",
".",
"training_hooks",
":",
"raise",
"ValueError",
"(",
"err_msg",
".",
"format",
"(",
"'training_hooks'",
")",
")",
"if",
"estimator_spec",
".",
"evaluation_hooks",
":",
"raise",
"ValueError",
"(",
"err_msg",
".",
"format",
"(",
"'evaluation_hooks'",
")",
")",
"return",
"estimator_spec"
] |
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/tpu/python/tpu/tpu_estimator.py#L1066-L1078
|
|
strukturag/libheif
|
0082fea96ee70a20c8906a0373bedec0c01777bc
|
scripts/cpplint.py
|
python
|
CheckForNonStandardConstructs
|
(filename, clean_lines, linenum,
nesting_state, error)
|
r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
Complain about several constructs which gcc-2 accepts, but which are
not standard C++. Warning about these in lint is one way to ease the
transition to new compilers.
- put storage class first (e.g. "static const" instead of "const static").
- "%lld" instead of %qd" in printf-type functions.
- "%1$d" is non-standard in printf-type functions.
- "\%" is an undefined character escape sequence.
- text after #endif is not allowed.
- invalid inner-style forward declaration.
- >? and <? operators, and their >?= and <?= cousins.
Additionally, check for constructor/destructor style violations and reference
members, as it is very convenient to do so while checking for
gcc-2 compliance.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: A callable to which errors are reported, which takes 4 arguments:
filename, line number, error level, and message
|
r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
|
[
"r",
"Logs",
"an",
"error",
"if",
"we",
"see",
"certain",
"non",
"-",
"ANSI",
"constructs",
"ignored",
"by",
"gcc",
"-",
"2",
"."
] |
def CheckForNonStandardConstructs(filename, clean_lines, linenum,
nesting_state, error):
r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2.
Complain about several constructs which gcc-2 accepts, but which are
not standard C++. Warning about these in lint is one way to ease the
transition to new compilers.
- put storage class first (e.g. "static const" instead of "const static").
- "%lld" instead of %qd" in printf-type functions.
- "%1$d" is non-standard in printf-type functions.
- "\%" is an undefined character escape sequence.
- text after #endif is not allowed.
- invalid inner-style forward declaration.
- >? and <? operators, and their >?= and <?= cousins.
Additionally, check for constructor/destructor style violations and reference
members, as it is very convenient to do so while checking for
gcc-2 compliance.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: A callable to which errors are reported, which takes 4 arguments:
filename, line number, error level, and message
"""
# Remove comments from the line, but leave in strings for now.
line = clean_lines.lines[linenum]
if Search(r'printf\s*\(.*".*%[-+ ]?\d*q', line):
error(filename, linenum, 'runtime/printf_format', 3,
'%q in format strings is deprecated. Use %ll instead.')
if Search(r'printf\s*\(.*".*%\d+\$', line):
error(filename, linenum, 'runtime/printf_format', 2,
'%N$ formats are unconventional. Try rewriting to avoid them.')
# Remove escaped backslashes before looking for undefined escapes.
line = line.replace('\\\\', '')
if Search(r'("|\').*\\(%|\[|\(|{)', line):
error(filename, linenum, 'build/printf_format', 3,
'%, [, (, and { are undefined character escapes. Unescape them.')
# For the rest, work with both comments and strings removed.
line = clean_lines.elided[linenum]
if Search(r'\b(const|volatile|void|char|short|int|long'
r'|float|double|signed|unsigned'
r'|schar|u?int8|u?int16|u?int32|u?int64)'
r'\s+(register|static|extern|typedef)\b',
line):
error(filename, linenum, 'build/storage_class', 5,
'Storage-class specifier (static, extern, typedef, etc) should be '
'at the beginning of the declaration.')
if Match(r'\s*#\s*endif\s*[^/\s]+', line):
error(filename, linenum, 'build/endif_comment', 5,
'Uncommented text after #endif is non-standard. Use a comment.')
if Match(r'\s*class\s+(\w+\s*::\s*)+\w+\s*;', line):
error(filename, linenum, 'build/forward_decl', 5,
'Inner-style forward declarations are invalid. Remove this line.')
if Search(r'(\w+|[+-]?\d+(\.\d*)?)\s*(<|>)\?=?\s*(\w+|[+-]?\d+)(\.\d*)?',
line):
error(filename, linenum, 'build/deprecated', 3,
'>? and <? (max and min) operators are non-standard and deprecated.')
if Search(r'^\s*const\s*string\s*&\s*\w+\s*;', line):
# TODO(unknown): Could it be expanded safely to arbitrary references,
# without triggering too many false positives? The first
# attempt triggered 5 warnings for mostly benign code in the regtest, hence
# the restriction.
# Here's the original regexp, for the reference:
# type_name = r'\w+((\s*::\s*\w+)|(\s*<\s*\w+?\s*>))?'
# r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;'
error(filename, linenum, 'runtime/member_string_references', 2,
'const string& members are dangerous. It is much better to use '
'alternatives, such as pointers or simple constants.')
# Everything else in this function operates on class declarations.
# Return early if the top of the nesting stack is not a class, or if
# the class head is not completed yet.
classinfo = nesting_state.InnermostClass()
if not classinfo or not classinfo.seen_open_brace:
return
# The class may have been declared with namespace or classname qualifiers.
# The constructor and destructor will not have those qualifiers.
base_classname = classinfo.name.split('::')[-1]
# Look for single-argument constructors that aren't marked explicit.
# Technically a valid construct, but against style.
explicit_constructor_match = Match(
r'\s+(?:(?:inline|constexpr)\s+)*(explicit\s+)?'
r'(?:(?:inline|constexpr)\s+)*%s\s*'
r'\(((?:[^()]|\([^()]*\))*)\)'
% re.escape(base_classname),
line)
if explicit_constructor_match:
is_marked_explicit = explicit_constructor_match.group(1)
if not explicit_constructor_match.group(2):
constructor_args = []
else:
constructor_args = explicit_constructor_match.group(2).split(',')
# collapse arguments so that commas in template parameter lists and function
# argument parameter lists don't split arguments in two
i = 0
while i < len(constructor_args):
constructor_arg = constructor_args[i]
while (constructor_arg.count('<') > constructor_arg.count('>') or
constructor_arg.count('(') > constructor_arg.count(')')):
constructor_arg += ',' + constructor_args[i + 1]
del constructor_args[i + 1]
constructor_args[i] = constructor_arg
i += 1
defaulted_args = [arg for arg in constructor_args if '=' in arg]
noarg_constructor = (not constructor_args or # empty arg list
# 'void' arg specifier
(len(constructor_args) == 1 and
constructor_args[0].strip() == 'void'))
onearg_constructor = ((len(constructor_args) == 1 and # exactly one arg
not noarg_constructor) or
# all but at most one arg defaulted
(len(constructor_args) >= 1 and
not noarg_constructor and
len(defaulted_args) >= len(constructor_args) - 1))
initializer_list_constructor = bool(
onearg_constructor and
Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0]))
copy_constructor = bool(
onearg_constructor and
Match(r'(const\s+)?%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&'
% re.escape(base_classname), constructor_args[0].strip()))
if (not is_marked_explicit and
onearg_constructor and
not initializer_list_constructor and
not copy_constructor):
if defaulted_args:
error(filename, linenum, 'runtime/explicit', 5,
'Constructors callable with one argument '
'should be marked explicit.')
else:
error(filename, linenum, 'runtime/explicit', 5,
'Single-parameter constructors should be marked explicit.')
elif is_marked_explicit and not onearg_constructor:
if noarg_constructor:
error(filename, linenum, 'runtime/explicit', 5,
'Zero-parameter constructors should not be marked explicit.')
|
[
"def",
"CheckForNonStandardConstructs",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
":",
"# Remove comments from the line, but leave in strings for now.",
"line",
"=",
"clean_lines",
".",
"lines",
"[",
"linenum",
"]",
"if",
"Search",
"(",
"r'printf\\s*\\(.*\".*%[-+ ]?\\d*q'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/printf_format'",
",",
"3",
",",
"'%q in format strings is deprecated. Use %ll instead.'",
")",
"if",
"Search",
"(",
"r'printf\\s*\\(.*\".*%\\d+\\$'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/printf_format'",
",",
"2",
",",
"'%N$ formats are unconventional. Try rewriting to avoid them.'",
")",
"# Remove escaped backslashes before looking for undefined escapes.",
"line",
"=",
"line",
".",
"replace",
"(",
"'\\\\\\\\'",
",",
"''",
")",
"if",
"Search",
"(",
"r'(\"|\\').*\\\\(%|\\[|\\(|{)'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/printf_format'",
",",
"3",
",",
"'%, [, (, and { are undefined character escapes. Unescape them.'",
")",
"# For the rest, work with both comments and strings removed.",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"if",
"Search",
"(",
"r'\\b(const|volatile|void|char|short|int|long'",
"r'|float|double|signed|unsigned'",
"r'|schar|u?int8|u?int16|u?int32|u?int64)'",
"r'\\s+(register|static|extern|typedef)\\b'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/storage_class'",
",",
"5",
",",
"'Storage-class specifier (static, extern, typedef, etc) should be '",
"'at the beginning of the declaration.'",
")",
"if",
"Match",
"(",
"r'\\s*#\\s*endif\\s*[^/\\s]+'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/endif_comment'",
",",
"5",
",",
"'Uncommented text after #endif is non-standard. Use a comment.'",
")",
"if",
"Match",
"(",
"r'\\s*class\\s+(\\w+\\s*::\\s*)+\\w+\\s*;'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/forward_decl'",
",",
"5",
",",
"'Inner-style forward declarations are invalid. Remove this line.'",
")",
"if",
"Search",
"(",
"r'(\\w+|[+-]?\\d+(\\.\\d*)?)\\s*(<|>)\\?=?\\s*(\\w+|[+-]?\\d+)(\\.\\d*)?'",
",",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/deprecated'",
",",
"3",
",",
"'>? and <? (max and min) operators are non-standard and deprecated.'",
")",
"if",
"Search",
"(",
"r'^\\s*const\\s*string\\s*&\\s*\\w+\\s*;'",
",",
"line",
")",
":",
"# TODO(unknown): Could it be expanded safely to arbitrary references,",
"# without triggering too many false positives? The first",
"# attempt triggered 5 warnings for mostly benign code in the regtest, hence",
"# the restriction.",
"# Here's the original regexp, for the reference:",
"# type_name = r'\\w+((\\s*::\\s*\\w+)|(\\s*<\\s*\\w+?\\s*>))?'",
"# r'\\s*const\\s*' + type_name + '\\s*&\\s*\\w+\\s*;'",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/member_string_references'",
",",
"2",
",",
"'const string& members are dangerous. It is much better to use '",
"'alternatives, such as pointers or simple constants.'",
")",
"# Everything else in this function operates on class declarations.",
"# Return early if the top of the nesting stack is not a class, or if",
"# the class head is not completed yet.",
"classinfo",
"=",
"nesting_state",
".",
"InnermostClass",
"(",
")",
"if",
"not",
"classinfo",
"or",
"not",
"classinfo",
".",
"seen_open_brace",
":",
"return",
"# The class may have been declared with namespace or classname qualifiers.",
"# The constructor and destructor will not have those qualifiers.",
"base_classname",
"=",
"classinfo",
".",
"name",
".",
"split",
"(",
"'::'",
")",
"[",
"-",
"1",
"]",
"# Look for single-argument constructors that aren't marked explicit.",
"# Technically a valid construct, but against style.",
"explicit_constructor_match",
"=",
"Match",
"(",
"r'\\s+(?:(?:inline|constexpr)\\s+)*(explicit\\s+)?'",
"r'(?:(?:inline|constexpr)\\s+)*%s\\s*'",
"r'\\(((?:[^()]|\\([^()]*\\))*)\\)'",
"%",
"re",
".",
"escape",
"(",
"base_classname",
")",
",",
"line",
")",
"if",
"explicit_constructor_match",
":",
"is_marked_explicit",
"=",
"explicit_constructor_match",
".",
"group",
"(",
"1",
")",
"if",
"not",
"explicit_constructor_match",
".",
"group",
"(",
"2",
")",
":",
"constructor_args",
"=",
"[",
"]",
"else",
":",
"constructor_args",
"=",
"explicit_constructor_match",
".",
"group",
"(",
"2",
")",
".",
"split",
"(",
"','",
")",
"# collapse arguments so that commas in template parameter lists and function",
"# argument parameter lists don't split arguments in two",
"i",
"=",
"0",
"while",
"i",
"<",
"len",
"(",
"constructor_args",
")",
":",
"constructor_arg",
"=",
"constructor_args",
"[",
"i",
"]",
"while",
"(",
"constructor_arg",
".",
"count",
"(",
"'<'",
")",
">",
"constructor_arg",
".",
"count",
"(",
"'>'",
")",
"or",
"constructor_arg",
".",
"count",
"(",
"'('",
")",
">",
"constructor_arg",
".",
"count",
"(",
"')'",
")",
")",
":",
"constructor_arg",
"+=",
"','",
"+",
"constructor_args",
"[",
"i",
"+",
"1",
"]",
"del",
"constructor_args",
"[",
"i",
"+",
"1",
"]",
"constructor_args",
"[",
"i",
"]",
"=",
"constructor_arg",
"i",
"+=",
"1",
"defaulted_args",
"=",
"[",
"arg",
"for",
"arg",
"in",
"constructor_args",
"if",
"'='",
"in",
"arg",
"]",
"noarg_constructor",
"=",
"(",
"not",
"constructor_args",
"or",
"# empty arg list",
"# 'void' arg specifier",
"(",
"len",
"(",
"constructor_args",
")",
"==",
"1",
"and",
"constructor_args",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"==",
"'void'",
")",
")",
"onearg_constructor",
"=",
"(",
"(",
"len",
"(",
"constructor_args",
")",
"==",
"1",
"and",
"# exactly one arg",
"not",
"noarg_constructor",
")",
"or",
"# all but at most one arg defaulted",
"(",
"len",
"(",
"constructor_args",
")",
">=",
"1",
"and",
"not",
"noarg_constructor",
"and",
"len",
"(",
"defaulted_args",
")",
">=",
"len",
"(",
"constructor_args",
")",
"-",
"1",
")",
")",
"initializer_list_constructor",
"=",
"bool",
"(",
"onearg_constructor",
"and",
"Search",
"(",
"r'\\bstd\\s*::\\s*initializer_list\\b'",
",",
"constructor_args",
"[",
"0",
"]",
")",
")",
"copy_constructor",
"=",
"bool",
"(",
"onearg_constructor",
"and",
"Match",
"(",
"r'(const\\s+)?%s(\\s*<[^>]*>)?(\\s+const)?\\s*(?:<\\w+>\\s*)?&'",
"%",
"re",
".",
"escape",
"(",
"base_classname",
")",
",",
"constructor_args",
"[",
"0",
"]",
".",
"strip",
"(",
")",
")",
")",
"if",
"(",
"not",
"is_marked_explicit",
"and",
"onearg_constructor",
"and",
"not",
"initializer_list_constructor",
"and",
"not",
"copy_constructor",
")",
":",
"if",
"defaulted_args",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/explicit'",
",",
"5",
",",
"'Constructors callable with one argument '",
"'should be marked explicit.'",
")",
"else",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/explicit'",
",",
"5",
",",
"'Single-parameter constructors should be marked explicit.'",
")",
"elif",
"is_marked_explicit",
"and",
"not",
"onearg_constructor",
":",
"if",
"noarg_constructor",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/explicit'",
",",
"5",
",",
"'Zero-parameter constructors should not be marked explicit.'",
")"
] |
https://github.com/strukturag/libheif/blob/0082fea96ee70a20c8906a0373bedec0c01777bc/scripts/cpplint.py#L2659-L2816
|
||
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/tkFileDialog.py
|
python
|
askopenfilenames
|
(**options)
|
return Open(**options).show()
|
Ask for multiple filenames to open
Returns a list of filenames or empty list if
cancel button selected
|
Ask for multiple filenames to open
|
[
"Ask",
"for",
"multiple",
"filenames",
"to",
"open"
] |
def askopenfilenames(**options):
"""Ask for multiple filenames to open
Returns a list of filenames or empty list if
cancel button selected
"""
options["multiple"]=1
return Open(**options).show()
|
[
"def",
"askopenfilenames",
"(",
"*",
"*",
"options",
")",
":",
"options",
"[",
"\"multiple\"",
"]",
"=",
"1",
"return",
"Open",
"(",
"*",
"*",
"options",
")",
".",
"show",
"(",
")"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/tkFileDialog.py#L132-L139
|
|
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pip/vendor/distlib/_backport/tarfile.py
|
python
|
TarIter.__iter__
|
(self)
|
return self
|
Return iterator object.
|
Return iterator object.
|
[
"Return",
"iterator",
"object",
"."
] |
def __iter__(self):
"""Return iterator object.
"""
return self
|
[
"def",
"__iter__",
"(",
"self",
")",
":",
"return",
"self"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/pip/vendor/distlib/_backport/tarfile.py#L2565-L2568
|
|
CRYTEK/CRYENGINE
|
232227c59a220cbbd311576f0fbeba7bb53b2a8c
|
Editor/Python/windows/Lib/site-packages/pkg_resources/_vendor/pyparsing.py
|
python
|
ParserElement.__or__
|
(self, other )
|
return MatchFirst( [ self, other ] )
|
Implementation of | operator - returns C{L{MatchFirst}}
|
Implementation of | operator - returns C{L{MatchFirst}}
|
[
"Implementation",
"of",
"|",
"operator",
"-",
"returns",
"C",
"{",
"L",
"{",
"MatchFirst",
"}}"
] |
def __or__(self, other ):
"""
Implementation of | operator - returns C{L{MatchFirst}}
"""
if isinstance( other, basestring ):
other = ParserElement._literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return MatchFirst( [ self, other ] )
|
[
"def",
"__or__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"basestring",
")",
":",
"other",
"=",
"ParserElement",
".",
"_literalStringClass",
"(",
"other",
")",
"if",
"not",
"isinstance",
"(",
"other",
",",
"ParserElement",
")",
":",
"warnings",
".",
"warn",
"(",
"\"Cannot combine element of type %s with ParserElement\"",
"%",
"type",
"(",
"other",
")",
",",
"SyntaxWarning",
",",
"stacklevel",
"=",
"2",
")",
"return",
"None",
"return",
"MatchFirst",
"(",
"[",
"self",
",",
"other",
"]",
")"
] |
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pkg_resources/_vendor/pyparsing.py#L1948-L1958
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_core.py
|
python
|
Window.GetScrollRange
|
(*args, **kwargs)
|
return _core_.Window_GetScrollRange(*args, **kwargs)
|
GetScrollRange(self, int orientation) -> int
Returns the built-in scrollbar range.
|
GetScrollRange(self, int orientation) -> int
|
[
"GetScrollRange",
"(",
"self",
"int",
"orientation",
")",
"-",
">",
"int"
] |
def GetScrollRange(*args, **kwargs):
"""
GetScrollRange(self, int orientation) -> int
Returns the built-in scrollbar range.
"""
return _core_.Window_GetScrollRange(*args, **kwargs)
|
[
"def",
"GetScrollRange",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_GetScrollRange",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L11252-L11258
|
|
h0x91b/redis-v8
|
ac8b9d49701d75bcee3719892a2a6a50b437e47a
|
redis/deps/v8/tools/grokdump.py
|
python
|
InspectionShell.do_s
|
(self, word)
|
Search for a given word in available memory regions. The given word
is expanded to full pointer size and searched at aligned as well as
un-aligned memory locations. Use 'sa' to search aligned locations
only.
|
Search for a given word in available memory regions. The given word
is expanded to full pointer size and searched at aligned as well as
un-aligned memory locations. Use 'sa' to search aligned locations
only.
|
[
"Search",
"for",
"a",
"given",
"word",
"in",
"available",
"memory",
"regions",
".",
"The",
"given",
"word",
"is",
"expanded",
"to",
"full",
"pointer",
"size",
"and",
"searched",
"at",
"aligned",
"as",
"well",
"as",
"un",
"-",
"aligned",
"memory",
"locations",
".",
"Use",
"sa",
"to",
"search",
"aligned",
"locations",
"only",
"."
] |
def do_s(self, word):
"""
Search for a given word in available memory regions. The given word
is expanded to full pointer size and searched at aligned as well as
un-aligned memory locations. Use 'sa' to search aligned locations
only.
"""
try:
word = int(word, 0)
except ValueError:
print "Malformed word, prefix with '0x' to use hexadecimal format."
return
print "Searching for word %d/0x%s:" % (word, self.reader.FormatIntPtr(word))
self.reader.FindWord(word)
|
[
"def",
"do_s",
"(",
"self",
",",
"word",
")",
":",
"try",
":",
"word",
"=",
"int",
"(",
"word",
",",
"0",
")",
"except",
"ValueError",
":",
"print",
"\"Malformed word, prefix with '0x' to use hexadecimal format.\"",
"return",
"print",
"\"Searching for word %d/0x%s:\"",
"%",
"(",
"word",
",",
"self",
".",
"reader",
".",
"FormatIntPtr",
"(",
"word",
")",
")",
"self",
".",
"reader",
".",
"FindWord",
"(",
"word",
")"
] |
https://github.com/h0x91b/redis-v8/blob/ac8b9d49701d75bcee3719892a2a6a50b437e47a/redis/deps/v8/tools/grokdump.py#L1806-L1819
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py3/pandas/core/nanops.py
|
python
|
nanstd
|
(values, *, axis=None, skipna=True, ddof=1, mask=None)
|
return _wrap_results(result, orig_dtype)
|
Compute the standard deviation along given axis while ignoring NaNs
Parameters
----------
values : ndarray
axis : int, optional
skipna : bool, default True
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations is N - ddof,
where N represents the number of elements.
mask : ndarray[bool], optional
nan-mask if known
Returns
-------
result : float
Unless input is a float array, in which case use the same
precision as the input array.
Examples
--------
>>> import pandas.core.nanops as nanops
>>> s = pd.Series([1, np.nan, 2, 3])
>>> nanops.nanstd(s)
1.0
|
Compute the standard deviation along given axis while ignoring NaNs
|
[
"Compute",
"the",
"standard",
"deviation",
"along",
"given",
"axis",
"while",
"ignoring",
"NaNs"
] |
def nanstd(values, *, axis=None, skipna=True, ddof=1, mask=None):
"""
Compute the standard deviation along given axis while ignoring NaNs
Parameters
----------
values : ndarray
axis : int, optional
skipna : bool, default True
ddof : int, default 1
Delta Degrees of Freedom. The divisor used in calculations is N - ddof,
where N represents the number of elements.
mask : ndarray[bool], optional
nan-mask if known
Returns
-------
result : float
Unless input is a float array, in which case use the same
precision as the input array.
Examples
--------
>>> import pandas.core.nanops as nanops
>>> s = pd.Series([1, np.nan, 2, 3])
>>> nanops.nanstd(s)
1.0
"""
if values.dtype == "M8[ns]":
values = values.view("m8[ns]")
orig_dtype = values.dtype
values, mask, _, _, _ = _get_values(values, skipna, mask=mask)
result = np.sqrt(nanvar(values, axis=axis, skipna=skipna, ddof=ddof, mask=mask))
return _wrap_results(result, orig_dtype)
|
[
"def",
"nanstd",
"(",
"values",
",",
"*",
",",
"axis",
"=",
"None",
",",
"skipna",
"=",
"True",
",",
"ddof",
"=",
"1",
",",
"mask",
"=",
"None",
")",
":",
"if",
"values",
".",
"dtype",
"==",
"\"M8[ns]\"",
":",
"values",
"=",
"values",
".",
"view",
"(",
"\"m8[ns]\"",
")",
"orig_dtype",
"=",
"values",
".",
"dtype",
"values",
",",
"mask",
",",
"_",
",",
"_",
",",
"_",
"=",
"_get_values",
"(",
"values",
",",
"skipna",
",",
"mask",
"=",
"mask",
")",
"result",
"=",
"np",
".",
"sqrt",
"(",
"nanvar",
"(",
"values",
",",
"axis",
"=",
"axis",
",",
"skipna",
"=",
"skipna",
",",
"ddof",
"=",
"ddof",
",",
"mask",
"=",
"mask",
")",
")",
"return",
"_wrap_results",
"(",
"result",
",",
"orig_dtype",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/nanops.py#L839-L874
|
|
SFTtech/openage
|
d6a08c53c48dc1e157807471df92197f6ca9e04d
|
openage/util/observer.py
|
python
|
Observable.notify_observers
|
(self, message=None)
|
Notify the observers if the object has changed. Include
an optional message.
:param message: An optional message of any type.
|
Notify the observers if the object has changed. Include
an optional message.
|
[
"Notify",
"the",
"observers",
"if",
"the",
"object",
"has",
"changed",
".",
"Include",
"an",
"optional",
"message",
"."
] |
def notify_observers(self, message=None):
"""
Notify the observers if the object has changed. Include
an optional message.
:param message: An optional message of any type.
"""
if self.changed:
for observer in self.observers:
if observer() is not None:
observer().update(self, message=message)
else:
self.delete_observer(observer)
|
[
"def",
"notify_observers",
"(",
"self",
",",
"message",
"=",
"None",
")",
":",
"if",
"self",
".",
"changed",
":",
"for",
"observer",
"in",
"self",
".",
"observers",
":",
"if",
"observer",
"(",
")",
"is",
"not",
"None",
":",
"observer",
"(",
")",
".",
"update",
"(",
"self",
",",
"message",
"=",
"message",
")",
"else",
":",
"self",
".",
"delete_observer",
"(",
"observer",
")"
] |
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/util/observer.py#L94-L107
|
||
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/_extends/graph_kernel/model/op_infer.py
|
python
|
_Elemwise.defaultformat_to_nz
|
(default_shape)
|
return more_two_d_shape + shape
|
default format shape to fractal_Nz format shape
|
default format shape to fractal_Nz format shape
|
[
"default",
"format",
"shape",
"to",
"fractal_Nz",
"format",
"shape"
] |
def defaultformat_to_nz(default_shape):
"""default format shape to fractal_Nz format shape"""
# As shape (1,) can broadcast to any shape, it can be regarded as a special FractalNZ shape
if len(default_shape) == 1 and default_shape[0] == 1:
return default_shape
more_two_d_shape, two_d_shape = default_shape[:-2], default_shape[-2:]
# (32) or (1, 32) -> (2, 1, 1, 16)
if len(two_d_shape) == 1 or (len(two_d_shape) == 2 and two_d_shape[0] == 1):
shape = [two_d_shape[-1] // 16, 1, 1, 16]
if two_d_shape[-1] % 16 != 0:
raise GKException("should be multiplies of 16")
return more_two_d_shape + shape
# (32, 1) -> (1, 2, 16, 1)
if len(two_d_shape) == 2 and two_d_shape[1] == 1:
shape = [1, two_d_shape[0] // 16, 16, 1]
if two_d_shape[0] % 16 != 0:
raise GKException("should be multiples of 16")
return more_two_d_shape + shape
# (32, 48) -> (3, 2, 16, 16)
shape = [two_d_shape[1] // 16, two_d_shape[0] // 16, 16, 16]
if two_d_shape[0] % 16 != 0 or two_d_shape[1] % 16 != 0:
raise GKException("should be multiples of 16")
return more_two_d_shape + shape
|
[
"def",
"defaultformat_to_nz",
"(",
"default_shape",
")",
":",
"# As shape (1,) can broadcast to any shape, it can be regarded as a special FractalNZ shape",
"if",
"len",
"(",
"default_shape",
")",
"==",
"1",
"and",
"default_shape",
"[",
"0",
"]",
"==",
"1",
":",
"return",
"default_shape",
"more_two_d_shape",
",",
"two_d_shape",
"=",
"default_shape",
"[",
":",
"-",
"2",
"]",
",",
"default_shape",
"[",
"-",
"2",
":",
"]",
"# (32) or (1, 32) -> (2, 1, 1, 16)",
"if",
"len",
"(",
"two_d_shape",
")",
"==",
"1",
"or",
"(",
"len",
"(",
"two_d_shape",
")",
"==",
"2",
"and",
"two_d_shape",
"[",
"0",
"]",
"==",
"1",
")",
":",
"shape",
"=",
"[",
"two_d_shape",
"[",
"-",
"1",
"]",
"//",
"16",
",",
"1",
",",
"1",
",",
"16",
"]",
"if",
"two_d_shape",
"[",
"-",
"1",
"]",
"%",
"16",
"!=",
"0",
":",
"raise",
"GKException",
"(",
"\"should be multiplies of 16\"",
")",
"return",
"more_two_d_shape",
"+",
"shape",
"# (32, 1) -> (1, 2, 16, 1)",
"if",
"len",
"(",
"two_d_shape",
")",
"==",
"2",
"and",
"two_d_shape",
"[",
"1",
"]",
"==",
"1",
":",
"shape",
"=",
"[",
"1",
",",
"two_d_shape",
"[",
"0",
"]",
"//",
"16",
",",
"16",
",",
"1",
"]",
"if",
"two_d_shape",
"[",
"0",
"]",
"%",
"16",
"!=",
"0",
":",
"raise",
"GKException",
"(",
"\"should be multiples of 16\"",
")",
"return",
"more_two_d_shape",
"+",
"shape",
"# (32, 48) -> (3, 2, 16, 16)",
"shape",
"=",
"[",
"two_d_shape",
"[",
"1",
"]",
"//",
"16",
",",
"two_d_shape",
"[",
"0",
"]",
"//",
"16",
",",
"16",
",",
"16",
"]",
"if",
"two_d_shape",
"[",
"0",
"]",
"%",
"16",
"!=",
"0",
"or",
"two_d_shape",
"[",
"1",
"]",
"%",
"16",
"!=",
"0",
":",
"raise",
"GKException",
"(",
"\"should be multiples of 16\"",
")",
"return",
"more_two_d_shape",
"+",
"shape"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/_extends/graph_kernel/model/op_infer.py#L122-L144
|
|
jeog/TDAmeritradeAPI
|
91c738afd7d57b54f6231170bd64c2550fafd34d
|
python/tdma_api/stream.py
|
python
|
StreamingSession.get_qos
|
(self)
|
return clib.get_val(self._abi("GetQOS"), c_int, self._obj)
|
Returns the quality-of-service.
|
Returns the quality-of-service.
|
[
"Returns",
"the",
"quality",
"-",
"of",
"-",
"service",
"."
] |
def get_qos(self):
"""Returns the quality-of-service."""
return clib.get_val(self._abi("GetQOS"), c_int, self._obj)
|
[
"def",
"get_qos",
"(",
"self",
")",
":",
"return",
"clib",
".",
"get_val",
"(",
"self",
".",
"_abi",
"(",
"\"GetQOS\"",
")",
",",
"c_int",
",",
"self",
".",
"_obj",
")"
] |
https://github.com/jeog/TDAmeritradeAPI/blob/91c738afd7d57b54f6231170bd64c2550fafd34d/python/tdma_api/stream.py#L281-L283
|
|
bigartm/bigartm
|
47e37f982de87aa67bfd475ff1f39da696b181b3
|
3rdparty/protobuf-3.0.0/python/google/protobuf/proto_builder.py
|
python
|
_MakeFileDescriptorProto
|
(proto_file_name, full_name, field_items)
|
return file_proto
|
Populate FileDescriptorProto for MessageFactory's DescriptorPool.
|
Populate FileDescriptorProto for MessageFactory's DescriptorPool.
|
[
"Populate",
"FileDescriptorProto",
"for",
"MessageFactory",
"s",
"DescriptorPool",
"."
] |
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
"""Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
package, name = full_name.rsplit('.', 1)
file_proto = descriptor_pb2.FileDescriptorProto()
file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
file_proto.package = package
desc_proto = file_proto.message_type.add()
desc_proto.name = name
for f_number, (f_name, f_type) in enumerate(field_items, 1):
field_proto = desc_proto.field.add()
field_proto.name = f_name
field_proto.number = f_number
field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
field_proto.type = f_type
return file_proto
|
[
"def",
"_MakeFileDescriptorProto",
"(",
"proto_file_name",
",",
"full_name",
",",
"field_items",
")",
":",
"package",
",",
"name",
"=",
"full_name",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"file_proto",
"=",
"descriptor_pb2",
".",
"FileDescriptorProto",
"(",
")",
"file_proto",
".",
"name",
"=",
"os",
".",
"path",
".",
"join",
"(",
"package",
".",
"replace",
"(",
"'.'",
",",
"'/'",
")",
",",
"proto_file_name",
")",
"file_proto",
".",
"package",
"=",
"package",
"desc_proto",
"=",
"file_proto",
".",
"message_type",
".",
"add",
"(",
")",
"desc_proto",
".",
"name",
"=",
"name",
"for",
"f_number",
",",
"(",
"f_name",
",",
"f_type",
")",
"in",
"enumerate",
"(",
"field_items",
",",
"1",
")",
":",
"field_proto",
"=",
"desc_proto",
".",
"field",
".",
"add",
"(",
")",
"field_proto",
".",
"name",
"=",
"f_name",
"field_proto",
".",
"number",
"=",
"f_number",
"field_proto",
".",
"label",
"=",
"descriptor_pb2",
".",
"FieldDescriptorProto",
".",
"LABEL_OPTIONAL",
"field_proto",
".",
"type",
"=",
"f_type",
"return",
"file_proto"
] |
https://github.com/bigartm/bigartm/blob/47e37f982de87aa67bfd475ff1f39da696b181b3/3rdparty/protobuf-3.0.0/python/google/protobuf/proto_builder.py#L116-L130
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/numpy/py2/numpy/polynomial/_polybase.py
|
python
|
ABCPolyBase.convert
|
(self, domain=None, kind=None, window=None)
|
return self(kind.identity(domain, window=window))
|
Convert series to a different kind and/or domain and/or window.
Parameters
----------
domain : array_like, optional
The domain of the converted series. If the value is None,
the default domain of `kind` is used.
kind : class, optional
The polynomial series type class to which the current instance
should be converted. If kind is None, then the class of the
current instance is used.
window : array_like, optional
The window of the converted series. If the value is None,
the default window of `kind` is used.
Returns
-------
new_series : series
The returned class can be of different type than the current
instance and/or have a different domain and/or different
window.
Notes
-----
Conversion between domains and class types can result in
numerically ill defined series.
Examples
--------
|
Convert series to a different kind and/or domain and/or window.
|
[
"Convert",
"series",
"to",
"a",
"different",
"kind",
"and",
"/",
"or",
"domain",
"and",
"/",
"or",
"window",
"."
] |
def convert(self, domain=None, kind=None, window=None):
"""Convert series to a different kind and/or domain and/or window.
Parameters
----------
domain : array_like, optional
The domain of the converted series. If the value is None,
the default domain of `kind` is used.
kind : class, optional
The polynomial series type class to which the current instance
should be converted. If kind is None, then the class of the
current instance is used.
window : array_like, optional
The window of the converted series. If the value is None,
the default window of `kind` is used.
Returns
-------
new_series : series
The returned class can be of different type than the current
instance and/or have a different domain and/or different
window.
Notes
-----
Conversion between domains and class types can result in
numerically ill defined series.
Examples
--------
"""
if kind is None:
kind = self.__class__
if domain is None:
domain = kind.domain
if window is None:
window = kind.window
return self(kind.identity(domain, window=window))
|
[
"def",
"convert",
"(",
"self",
",",
"domain",
"=",
"None",
",",
"kind",
"=",
"None",
",",
"window",
"=",
"None",
")",
":",
"if",
"kind",
"is",
"None",
":",
"kind",
"=",
"self",
".",
"__class__",
"if",
"domain",
"is",
"None",
":",
"domain",
"=",
"kind",
".",
"domain",
"if",
"window",
"is",
"None",
":",
"window",
"=",
"kind",
".",
"window",
"return",
"self",
"(",
"kind",
".",
"identity",
"(",
"domain",
",",
"window",
"=",
"window",
")",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/polynomial/_polybase.py#L624-L662
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemDynamicContent/AWS/resource-manager-code/dynamic_content_migrator.py
|
python
|
command_migrate_table_entries
|
(context: object, args: dict)
|
Migrate existing staging settings information when content versioning is enabled or suspended
Arguments
context -- context to use
args -- arguments from the CLI command
|
Migrate existing staging settings information when content versioning is enabled or suspended
|
[
"Migrate",
"existing",
"staging",
"settings",
"information",
"when",
"content",
"versioning",
"is",
"enabled",
"or",
"suspended"
] |
def command_migrate_table_entries(context: object, args: dict):
"""
Migrate existing staging settings information when content versioning is enabled or suspended
Arguments
context -- context to use
args -- arguments from the CLI command
"""
deployment_name = args.deployment_name if args.deployment_name else context.config.default_deployment
export_current_table_entries(context, deployment_name)
import_table_entries_from_backup(context, deployment_name)
|
[
"def",
"command_migrate_table_entries",
"(",
"context",
":",
"object",
",",
"args",
":",
"dict",
")",
":",
"deployment_name",
"=",
"args",
".",
"deployment_name",
"if",
"args",
".",
"deployment_name",
"else",
"context",
".",
"config",
".",
"default_deployment",
"export_current_table_entries",
"(",
"context",
",",
"deployment_name",
")",
"import_table_entries_from_backup",
"(",
"context",
",",
"deployment_name",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDynamicContent/AWS/resource-manager-code/dynamic_content_migrator.py#L25-L36
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/numpy/py3/numpy/lib/histograms.py
|
python
|
_hist_bin_rice
|
(x, range)
|
return _ptp(x) / (2.0 * x.size ** (1.0 / 3))
|
Rice histogram bin estimator.
Another simple estimator with no normality assumption. It has better
performance for large data than Sturges, but tends to overestimate
the number of bins. The number of bins is proportional to the cube
root of data size (asymptotically optimal). The estimate depends
only on size of the data.
Parameters
----------
x : array_like
Input data that is to be histogrammed, trimmed to range. May not
be empty.
Returns
-------
h : An estimate of the optimal bin width for the given data.
|
Rice histogram bin estimator.
|
[
"Rice",
"histogram",
"bin",
"estimator",
"."
] |
def _hist_bin_rice(x, range):
"""
Rice histogram bin estimator.
Another simple estimator with no normality assumption. It has better
performance for large data than Sturges, but tends to overestimate
the number of bins. The number of bins is proportional to the cube
root of data size (asymptotically optimal). The estimate depends
only on size of the data.
Parameters
----------
x : array_like
Input data that is to be histogrammed, trimmed to range. May not
be empty.
Returns
-------
h : An estimate of the optimal bin width for the given data.
"""
del range # unused
return _ptp(x) / (2.0 * x.size ** (1.0 / 3))
|
[
"def",
"_hist_bin_rice",
"(",
"x",
",",
"range",
")",
":",
"del",
"range",
"# unused",
"return",
"_ptp",
"(",
"x",
")",
"/",
"(",
"2.0",
"*",
"x",
".",
"size",
"**",
"(",
"1.0",
"/",
"3",
")",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/lib/histograms.py#L76-L97
|
|
usdot-fhwa-stol/carma-platform
|
d9d9b93f9689b2c7dd607cf5432d5296fc1000f5
|
guidance_plugin_validator/src/guidance_plugin_validator/guidance_plugin_components.py
|
python
|
RequiredControlComponents.__init__
|
(self, plugin_capability, plan_trajectory_topic)
|
Constructor for RequiredControlComponents
|
Constructor for RequiredControlComponents
|
[
"Constructor",
"for",
"RequiredControlComponents"
] |
def __init__(self, plugin_capability, plan_trajectory_topic):
"""Constructor for RequiredControlComponents"""
# Validation result indicating whether control plugin's node successfully launches
self.has_node = False
# Validation result indicating whether control plugin's node subscribes to required topics
self.plan_trajectory_topic = plan_trajectory_topic
self.has_plan_trajectory_sub = False
# Validation result indicating whether control plugin's node publishes to required topics
self.plugin_discovery_topic = "/guidance/plugin_discovery"
self.has_plugin_discovery_pub = False
self.final_waypoints_topic = "/guidance/carma_final_waypoints"
self.has_final_waypoints_pub = False
# Validation results indicating whether control plugin's node publishes required information to the plugin_discovery topic
self.correct_plugin_discovery_type = Plugin.CONTROL
self.has_correct_plugin_discovery_type = False
self.correct_plugin_discovery_capability = plugin_capability
self.has_correct_plugin_discovery_capability = False
|
[
"def",
"__init__",
"(",
"self",
",",
"plugin_capability",
",",
"plan_trajectory_topic",
")",
":",
"# Validation result indicating whether control plugin's node successfully launches",
"self",
".",
"has_node",
"=",
"False",
"# Validation result indicating whether control plugin's node subscribes to required topics",
"self",
".",
"plan_trajectory_topic",
"=",
"plan_trajectory_topic",
"self",
".",
"has_plan_trajectory_sub",
"=",
"False",
"# Validation result indicating whether control plugin's node publishes to required topics",
"self",
".",
"plugin_discovery_topic",
"=",
"\"/guidance/plugin_discovery\"",
"self",
".",
"has_plugin_discovery_pub",
"=",
"False",
"self",
".",
"final_waypoints_topic",
"=",
"\"/guidance/carma_final_waypoints\"",
"self",
".",
"has_final_waypoints_pub",
"=",
"False",
"# Validation results indicating whether control plugin's node publishes required information to the plugin_discovery topic",
"self",
".",
"correct_plugin_discovery_type",
"=",
"Plugin",
".",
"CONTROL",
"self",
".",
"has_correct_plugin_discovery_type",
"=",
"False",
"self",
".",
"correct_plugin_discovery_capability",
"=",
"plugin_capability",
"self",
".",
"has_correct_plugin_discovery_capability",
"=",
"False"
] |
https://github.com/usdot-fhwa-stol/carma-platform/blob/d9d9b93f9689b2c7dd607cf5432d5296fc1000f5/guidance_plugin_validator/src/guidance_plugin_validator/guidance_plugin_components.py#L368-L390
|
||
Tencent/Pebble
|
68315f176d9e328a233ace29b7579a829f89879f
|
tools/blade/src/blade/java_targets.py
|
python
|
JavaTarget.__init__
|
(self,
name,
type,
srcs,
deps,
prebuilt,
blade,
kwargs)
|
Init method.
Init the java jar target.
|
Init method.
|
[
"Init",
"method",
"."
] |
def __init__(self,
name,
type,
srcs,
deps,
prebuilt,
blade,
kwargs):
"""Init method.
Init the java jar target.
"""
srcs = var_to_list(srcs)
deps = var_to_list(deps)
Target.__init__(self,
name,
type,
srcs,
deps,
blade,
kwargs)
|
[
"def",
"__init__",
"(",
"self",
",",
"name",
",",
"type",
",",
"srcs",
",",
"deps",
",",
"prebuilt",
",",
"blade",
",",
"kwargs",
")",
":",
"srcs",
"=",
"var_to_list",
"(",
"srcs",
")",
"deps",
"=",
"var_to_list",
"(",
"deps",
")",
"Target",
".",
"__init__",
"(",
"self",
",",
"name",
",",
"type",
",",
"srcs",
",",
"deps",
",",
"blade",
",",
"kwargs",
")"
] |
https://github.com/Tencent/Pebble/blob/68315f176d9e328a233ace29b7579a829f89879f/tools/blade/src/blade/java_targets.py#L22-L44
|
||
mysql/mysql-router
|
cc0179f982bb9739a834eb6fd205a56224616133
|
ext/gmock/scripts/upload.py
|
python
|
GetRpcServer
|
(options)
|
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
|
Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
|
Returns an instance of an AbstractRpcServer.
|
[
"Returns",
"an",
"instance",
"of",
"an",
"AbstractRpcServer",
"."
] |
def GetRpcServer(options):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
rpc_server_class = HttpRpcServer
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
return (email, password)
# If this is the dev_appserver, use fake authentication.
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "[email protected]"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie":
'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
# Don't try to talk to ClientLogin.
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host,
save_cookies=options.save_cookies)
|
[
"def",
"GetRpcServer",
"(",
"options",
")",
":",
"rpc_server_class",
"=",
"HttpRpcServer",
"def",
"GetUserCredentials",
"(",
")",
":",
"\"\"\"Prompts the user for a username and password.\"\"\"",
"email",
"=",
"options",
".",
"email",
"if",
"email",
"is",
"None",
":",
"email",
"=",
"GetEmail",
"(",
"\"Email (login for uploading to %s)\"",
"%",
"options",
".",
"server",
")",
"password",
"=",
"getpass",
".",
"getpass",
"(",
"\"Password for %s: \"",
"%",
"email",
")",
"return",
"(",
"email",
",",
"password",
")",
"# If this is the dev_appserver, use fake authentication.",
"host",
"=",
"(",
"options",
".",
"host",
"or",
"options",
".",
"server",
")",
".",
"lower",
"(",
")",
"if",
"host",
"==",
"\"localhost\"",
"or",
"host",
".",
"startswith",
"(",
"\"localhost:\"",
")",
":",
"email",
"=",
"options",
".",
"email",
"if",
"email",
"is",
"None",
":",
"email",
"=",
"\"[email protected]\"",
"logging",
".",
"info",
"(",
"\"Using debug user %s. Override with --email\"",
"%",
"email",
")",
"server",
"=",
"rpc_server_class",
"(",
"options",
".",
"server",
",",
"lambda",
":",
"(",
"email",
",",
"\"password\"",
")",
",",
"host_override",
"=",
"options",
".",
"host",
",",
"extra_headers",
"=",
"{",
"\"Cookie\"",
":",
"'dev_appserver_login=\"%s:False\"'",
"%",
"email",
"}",
",",
"save_cookies",
"=",
"options",
".",
"save_cookies",
")",
"# Don't try to talk to ClientLogin.",
"server",
".",
"authenticated",
"=",
"True",
"return",
"server",
"return",
"rpc_server_class",
"(",
"options",
".",
"server",
",",
"GetUserCredentials",
",",
"host_override",
"=",
"options",
".",
"host",
",",
"save_cookies",
"=",
"options",
".",
"save_cookies",
")"
] |
https://github.com/mysql/mysql-router/blob/cc0179f982bb9739a834eb6fd205a56224616133/ext/gmock/scripts/upload.py#L458-L495
|
|
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/locale.py
|
python
|
format_string
|
(f, val, grouping=False)
|
return new_f % val
|
Formats a string in the same way that the % formatting would use,
but takes the current locale into account.
Grouping is applied if the third parameter is true.
|
Formats a string in the same way that the % formatting would use,
but takes the current locale into account.
Grouping is applied if the third parameter is true.
|
[
"Formats",
"a",
"string",
"in",
"the",
"same",
"way",
"that",
"the",
"%",
"formatting",
"would",
"use",
"but",
"takes",
"the",
"current",
"locale",
"into",
"account",
".",
"Grouping",
"is",
"applied",
"if",
"the",
"third",
"parameter",
"is",
"true",
"."
] |
def format_string(f, val, grouping=False):
"""Formats a string in the same way that the % formatting would use,
but takes the current locale into account.
Grouping is applied if the third parameter is true."""
percents = list(_percent_re.finditer(f))
new_f = _percent_re.sub('%s', f)
if operator.isMappingType(val):
new_val = []
for perc in percents:
if perc.group()[-1]=='%':
new_val.append('%')
else:
new_val.append(format(perc.group(), val, grouping))
else:
if not isinstance(val, tuple):
val = (val,)
new_val = []
i = 0
for perc in percents:
if perc.group()[-1]=='%':
new_val.append('%')
else:
starcount = perc.group('modifiers').count('*')
new_val.append(_format(perc.group(),
val[i],
grouping,
False,
*val[i+1:i+1+starcount]))
i += (1 + starcount)
val = tuple(new_val)
return new_f % val
|
[
"def",
"format_string",
"(",
"f",
",",
"val",
",",
"grouping",
"=",
"False",
")",
":",
"percents",
"=",
"list",
"(",
"_percent_re",
".",
"finditer",
"(",
"f",
")",
")",
"new_f",
"=",
"_percent_re",
".",
"sub",
"(",
"'%s'",
",",
"f",
")",
"if",
"operator",
".",
"isMappingType",
"(",
"val",
")",
":",
"new_val",
"=",
"[",
"]",
"for",
"perc",
"in",
"percents",
":",
"if",
"perc",
".",
"group",
"(",
")",
"[",
"-",
"1",
"]",
"==",
"'%'",
":",
"new_val",
".",
"append",
"(",
"'%'",
")",
"else",
":",
"new_val",
".",
"append",
"(",
"format",
"(",
"perc",
".",
"group",
"(",
")",
",",
"val",
",",
"grouping",
")",
")",
"else",
":",
"if",
"not",
"isinstance",
"(",
"val",
",",
"tuple",
")",
":",
"val",
"=",
"(",
"val",
",",
")",
"new_val",
"=",
"[",
"]",
"i",
"=",
"0",
"for",
"perc",
"in",
"percents",
":",
"if",
"perc",
".",
"group",
"(",
")",
"[",
"-",
"1",
"]",
"==",
"'%'",
":",
"new_val",
".",
"append",
"(",
"'%'",
")",
"else",
":",
"starcount",
"=",
"perc",
".",
"group",
"(",
"'modifiers'",
")",
".",
"count",
"(",
"'*'",
")",
"new_val",
".",
"append",
"(",
"_format",
"(",
"perc",
".",
"group",
"(",
")",
",",
"val",
"[",
"i",
"]",
",",
"grouping",
",",
"False",
",",
"*",
"val",
"[",
"i",
"+",
"1",
":",
"i",
"+",
"1",
"+",
"starcount",
"]",
")",
")",
"i",
"+=",
"(",
"1",
"+",
"starcount",
")",
"val",
"=",
"tuple",
"(",
"new_val",
")",
"return",
"new_f",
"%",
"val"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/locale.py#L222-L254
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/pickletools.py
|
python
|
read_bytes4
|
(f)
|
r"""
>>> import io
>>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x00abc"))
b''
>>> read_bytes4(io.BytesIO(b"\x03\x00\x00\x00abcdef"))
b'abc'
>>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x03abcdef"))
Traceback (most recent call last):
...
ValueError: expected 50331648 bytes in a bytes4, but only 6 remain
|
r"""
>>> import io
>>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x00abc"))
b''
>>> read_bytes4(io.BytesIO(b"\x03\x00\x00\x00abcdef"))
b'abc'
>>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x03abcdef"))
Traceback (most recent call last):
...
ValueError: expected 50331648 bytes in a bytes4, but only 6 remain
|
[
"r",
">>>",
"import",
"io",
">>>",
"read_bytes4",
"(",
"io",
".",
"BytesIO",
"(",
"b",
"\\",
"x00",
"\\",
"x00",
"\\",
"x00",
"\\",
"x00abc",
"))",
"b",
">>>",
"read_bytes4",
"(",
"io",
".",
"BytesIO",
"(",
"b",
"\\",
"x03",
"\\",
"x00",
"\\",
"x00",
"\\",
"x00abcdef",
"))",
"b",
"abc",
">>>",
"read_bytes4",
"(",
"io",
".",
"BytesIO",
"(",
"b",
"\\",
"x00",
"\\",
"x00",
"\\",
"x00",
"\\",
"x03abcdef",
"))",
"Traceback",
"(",
"most",
"recent",
"call",
"last",
")",
":",
"...",
"ValueError",
":",
"expected",
"50331648",
"bytes",
"in",
"a",
"bytes4",
"but",
"only",
"6",
"remain"
] |
def read_bytes4(f):
r"""
>>> import io
>>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x00abc"))
b''
>>> read_bytes4(io.BytesIO(b"\x03\x00\x00\x00abcdef"))
b'abc'
>>> read_bytes4(io.BytesIO(b"\x00\x00\x00\x03abcdef"))
Traceback (most recent call last):
...
ValueError: expected 50331648 bytes in a bytes4, but only 6 remain
"""
n = read_uint4(f)
assert n >= 0
if n > sys.maxsize:
raise ValueError("bytes4 byte count > sys.maxsize: %d" % n)
data = f.read(n)
if len(data) == n:
return data
raise ValueError("expected %d bytes in a bytes4, but only %d remain" %
(n, len(data)))
|
[
"def",
"read_bytes4",
"(",
"f",
")",
":",
"n",
"=",
"read_uint4",
"(",
"f",
")",
"assert",
"n",
">=",
"0",
"if",
"n",
">",
"sys",
".",
"maxsize",
":",
"raise",
"ValueError",
"(",
"\"bytes4 byte count > sys.maxsize: %d\"",
"%",
"n",
")",
"data",
"=",
"f",
".",
"read",
"(",
"n",
")",
"if",
"len",
"(",
"data",
")",
"==",
"n",
":",
"return",
"data",
"raise",
"ValueError",
"(",
"\"expected %d bytes in a bytes4, but only %d remain\"",
"%",
"(",
"n",
",",
"len",
"(",
"data",
")",
")",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/pickletools.py#L500-L521
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/ipython/py3/IPython/utils/ipstruct.py
|
python
|
Struct.__iadd__
|
(self, other)
|
return self
|
s += s2 is a shorthand for s.merge(s2).
Examples
--------
>>> s = Struct(a=10,b=30)
>>> s2 = Struct(a=20,c=40)
>>> s += s2
>>> sorted(s.keys())
['a', 'b', 'c']
|
s += s2 is a shorthand for s.merge(s2).
|
[
"s",
"+",
"=",
"s2",
"is",
"a",
"shorthand",
"for",
"s",
".",
"merge",
"(",
"s2",
")",
"."
] |
def __iadd__(self, other):
"""s += s2 is a shorthand for s.merge(s2).
Examples
--------
>>> s = Struct(a=10,b=30)
>>> s2 = Struct(a=20,c=40)
>>> s += s2
>>> sorted(s.keys())
['a', 'b', 'c']
"""
self.merge(other)
return self
|
[
"def",
"__iadd__",
"(",
"self",
",",
"other",
")",
":",
"self",
".",
"merge",
"(",
"other",
")",
"return",
"self"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/utils/ipstruct.py#L153-L166
|
|
apache/arrow
|
af33dd1157eb8d7d9bfac25ebf61445b793b7943
|
cpp/build-support/cpplint.py
|
python
|
UpdateIncludeState
|
(filename, include_dict, io=codecs)
|
return True
|
Fill up the include_dict with new includes found from the file.
Args:
filename: the name of the header to read.
include_dict: a dictionary in which the headers are inserted.
io: The io factory to use to read the file. Provided for testability.
Returns:
True if a header was successfully added. False otherwise.
|
Fill up the include_dict with new includes found from the file.
|
[
"Fill",
"up",
"the",
"include_dict",
"with",
"new",
"includes",
"found",
"from",
"the",
"file",
"."
] |
def UpdateIncludeState(filename, include_dict, io=codecs):
"""Fill up the include_dict with new includes found from the file.
Args:
filename: the name of the header to read.
include_dict: a dictionary in which the headers are inserted.
io: The io factory to use to read the file. Provided for testability.
Returns:
True if a header was successfully added. False otherwise.
"""
headerfile = None
try:
headerfile = io.open(filename, 'r', 'utf8', 'replace')
except IOError:
return False
linenum = 0
for line in headerfile:
linenum += 1
clean_line = CleanseComments(line)
match = _RE_PATTERN_INCLUDE.search(clean_line)
if match:
include = match.group(2)
include_dict.setdefault(include, linenum)
return True
|
[
"def",
"UpdateIncludeState",
"(",
"filename",
",",
"include_dict",
",",
"io",
"=",
"codecs",
")",
":",
"headerfile",
"=",
"None",
"try",
":",
"headerfile",
"=",
"io",
".",
"open",
"(",
"filename",
",",
"'r'",
",",
"'utf8'",
",",
"'replace'",
")",
"except",
"IOError",
":",
"return",
"False",
"linenum",
"=",
"0",
"for",
"line",
"in",
"headerfile",
":",
"linenum",
"+=",
"1",
"clean_line",
"=",
"CleanseComments",
"(",
"line",
")",
"match",
"=",
"_RE_PATTERN_INCLUDE",
".",
"search",
"(",
"clean_line",
")",
"if",
"match",
":",
"include",
"=",
"match",
".",
"group",
"(",
"2",
")",
"include_dict",
".",
"setdefault",
"(",
"include",
",",
"linenum",
")",
"return",
"True"
] |
https://github.com/apache/arrow/blob/af33dd1157eb8d7d9bfac25ebf61445b793b7943/cpp/build-support/cpplint.py#L5631-L5655
|
|
apache/incubator-mxnet
|
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
|
python/mxnet/symbol/symbol.py
|
python
|
Symbol.attr_dict
|
(self)
|
return ret
|
Recursively gets all attributes from the symbol and its children.
Example
-------
>>> a = mx.sym.Variable('a', attr={'a1':'a2'})
>>> b = mx.sym.Variable('b', attr={'b1':'b2'})
>>> c = a+b
>>> c.attr_dict()
{'a': {'a1': 'a2'}, 'b': {'b1': 'b2'}}
Returns
-------
ret : Dict of str to dict
There is a key in the returned dict for every child with non-empty attribute set.
For each symbol, the name of the symbol is its key in the dict
and the correspond value is that symbol's attribute list (itself a dictionary).
|
Recursively gets all attributes from the symbol and its children.
|
[
"Recursively",
"gets",
"all",
"attributes",
"from",
"the",
"symbol",
"and",
"its",
"children",
"."
] |
def attr_dict(self):
"""Recursively gets all attributes from the symbol and its children.
Example
-------
>>> a = mx.sym.Variable('a', attr={'a1':'a2'})
>>> b = mx.sym.Variable('b', attr={'b1':'b2'})
>>> c = a+b
>>> c.attr_dict()
{'a': {'a1': 'a2'}, 'b': {'b1': 'b2'}}
Returns
-------
ret : Dict of str to dict
There is a key in the returned dict for every child with non-empty attribute set.
For each symbol, the name of the symbol is its key in the dict
and the correspond value is that symbol's attribute list (itself a dictionary).
"""
size = mx_uint()
pairs = ctypes.POINTER(ctypes.c_char_p)()
f_handle = _LIB.MXSymbolListAttr
check_call(f_handle(self.handle, ctypes.byref(size), ctypes.byref(pairs)))
ret = {}
for i in range(size.value):
name, key = py_str(pairs[i * 2]).split('$')
val = py_str(pairs[i * 2 + 1])
if name not in ret:
ret[name] = {}
ret[name][key] = val
return ret
|
[
"def",
"attr_dict",
"(",
"self",
")",
":",
"size",
"=",
"mx_uint",
"(",
")",
"pairs",
"=",
"ctypes",
".",
"POINTER",
"(",
"ctypes",
".",
"c_char_p",
")",
"(",
")",
"f_handle",
"=",
"_LIB",
".",
"MXSymbolListAttr",
"check_call",
"(",
"f_handle",
"(",
"self",
".",
"handle",
",",
"ctypes",
".",
"byref",
"(",
"size",
")",
",",
"ctypes",
".",
"byref",
"(",
"pairs",
")",
")",
")",
"ret",
"=",
"{",
"}",
"for",
"i",
"in",
"range",
"(",
"size",
".",
"value",
")",
":",
"name",
",",
"key",
"=",
"py_str",
"(",
"pairs",
"[",
"i",
"*",
"2",
"]",
")",
".",
"split",
"(",
"'$'",
")",
"val",
"=",
"py_str",
"(",
"pairs",
"[",
"i",
"*",
"2",
"+",
"1",
"]",
")",
"if",
"name",
"not",
"in",
"ret",
":",
"ret",
"[",
"name",
"]",
"=",
"{",
"}",
"ret",
"[",
"name",
"]",
"[",
"key",
"]",
"=",
"val",
"return",
"ret"
] |
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/symbol.py#L634-L663
|
|
ApolloAuto/apollo-platform
|
86d9dc6743b496ead18d597748ebabd34a513289
|
ros/dynamic_reconfigure/src/dynamic_reconfigure/client.py
|
python
|
Client.update_groups
|
(self, changes)
|
return descr
|
Changes the servers group configuration
@param changes: dictionary of key value pairs for the parameters that are changing
@type changes: {str: value}
|
Changes the servers group configuration
|
[
"Changes",
"the",
"servers",
"group",
"configuration"
] |
def update_groups(self, changes):
"""
Changes the servers group configuration
@param changes: dictionary of key value pairs for the parameters that are changing
@type changes: {str: value}
"""
descr = self.get_group_descriptions()
groups = []
def update_state(group, description):
for p,g in description['groups'].items():
if g['name'] == group:
description['groups'][p]['state'] = changes[group]
else:
update_state(group, g)
return description
for change in changes:
descr = update_state(change, descr)
return descr
|
[
"def",
"update_groups",
"(",
"self",
",",
"changes",
")",
":",
"descr",
"=",
"self",
".",
"get_group_descriptions",
"(",
")",
"groups",
"=",
"[",
"]",
"def",
"update_state",
"(",
"group",
",",
"description",
")",
":",
"for",
"p",
",",
"g",
"in",
"description",
"[",
"'groups'",
"]",
".",
"items",
"(",
")",
":",
"if",
"g",
"[",
"'name'",
"]",
"==",
"group",
":",
"description",
"[",
"'groups'",
"]",
"[",
"p",
"]",
"[",
"'state'",
"]",
"=",
"changes",
"[",
"group",
"]",
"else",
":",
"update_state",
"(",
"group",
",",
"g",
")",
"return",
"description",
"for",
"change",
"in",
"changes",
":",
"descr",
"=",
"update_state",
"(",
"change",
",",
"descr",
")",
"return",
"descr"
] |
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/dynamic_reconfigure/src/dynamic_reconfigure/client.py#L233-L255
|
|
gabyx/ApproxMVBB
|
838f3ff7690a938f1e4199a5f41b6feefc32a603
|
example/kdTreeFiltering/python/Tools/Transformations/Transformations.py
|
python
|
reflection_from_matrix
|
(matrix)
|
return point, normal
|
Return mirror plane point and normal vector from reflection matrix.
>>> v0 = numpy.random.random(3) - 0.5
>>> v1 = numpy.random.random(3) - 0.5
>>> M0 = reflection_matrix(v0, v1)
>>> point, normal = reflection_from_matrix(M0)
>>> M1 = reflection_matrix(point, normal)
>>> is_same_transform(M0, M1)
True
|
Return mirror plane point and normal vector from reflection matrix.
|
[
"Return",
"mirror",
"plane",
"point",
"and",
"normal",
"vector",
"from",
"reflection",
"matrix",
"."
] |
def reflection_from_matrix(matrix):
"""Return mirror plane point and normal vector from reflection matrix.
>>> v0 = numpy.random.random(3) - 0.5
>>> v1 = numpy.random.random(3) - 0.5
>>> M0 = reflection_matrix(v0, v1)
>>> point, normal = reflection_from_matrix(M0)
>>> M1 = reflection_matrix(point, normal)
>>> is_same_transform(M0, M1)
True
"""
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
# normal: unit eigenvector corresponding to eigenvalue -1
w, V = numpy.linalg.eig(M[:3, :3])
i = numpy.where(abs(numpy.real(w) + 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue -1")
normal = numpy.real(V[:, i[0]]).squeeze()
# point: any unit eigenvector corresponding to eigenvalue 1
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue 1")
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
return point, normal
|
[
"def",
"reflection_from_matrix",
"(",
"matrix",
")",
":",
"M",
"=",
"numpy",
".",
"array",
"(",
"matrix",
",",
"dtype",
"=",
"numpy",
".",
"float64",
",",
"copy",
"=",
"False",
")",
"# normal: unit eigenvector corresponding to eigenvalue -1",
"w",
",",
"V",
"=",
"numpy",
".",
"linalg",
".",
"eig",
"(",
"M",
"[",
":",
"3",
",",
":",
"3",
"]",
")",
"i",
"=",
"numpy",
".",
"where",
"(",
"abs",
"(",
"numpy",
".",
"real",
"(",
"w",
")",
"+",
"1.0",
")",
"<",
"1e-8",
")",
"[",
"0",
"]",
"if",
"not",
"len",
"(",
"i",
")",
":",
"raise",
"ValueError",
"(",
"\"no unit eigenvector corresponding to eigenvalue -1\"",
")",
"normal",
"=",
"numpy",
".",
"real",
"(",
"V",
"[",
":",
",",
"i",
"[",
"0",
"]",
"]",
")",
".",
"squeeze",
"(",
")",
"# point: any unit eigenvector corresponding to eigenvalue 1",
"w",
",",
"V",
"=",
"numpy",
".",
"linalg",
".",
"eig",
"(",
"M",
")",
"i",
"=",
"numpy",
".",
"where",
"(",
"abs",
"(",
"numpy",
".",
"real",
"(",
"w",
")",
"-",
"1.0",
")",
"<",
"1e-8",
")",
"[",
"0",
"]",
"if",
"not",
"len",
"(",
"i",
")",
":",
"raise",
"ValueError",
"(",
"\"no unit eigenvector corresponding to eigenvalue 1\"",
")",
"point",
"=",
"numpy",
".",
"real",
"(",
"V",
"[",
":",
",",
"i",
"[",
"-",
"1",
"]",
"]",
")",
".",
"squeeze",
"(",
")",
"point",
"/=",
"point",
"[",
"3",
"]",
"return",
"point",
",",
"normal"
] |
https://github.com/gabyx/ApproxMVBB/blob/838f3ff7690a938f1e4199a5f41b6feefc32a603/example/kdTreeFiltering/python/Tools/Transformations/Transformations.py#L268-L294
|
|
cms-sw/cmssw
|
fd9de012d503d3405420bcbeec0ec879baa57cf2
|
PhysicsTools/HeppyCore/python/framework/eventstfile.py
|
python
|
Events.to
|
(self, iEv)
|
return self.tree
|
navigate to event iEv.
|
navigate to event iEv.
|
[
"navigate",
"to",
"event",
"iEv",
"."
] |
def to(self, iEv):
'''navigate to event iEv.'''
self.tree.GetEntry(iEv)
return self.tree
|
[
"def",
"to",
"(",
"self",
",",
"iEv",
")",
":",
"self",
".",
"tree",
".",
"GetEntry",
"(",
"iEv",
")",
"return",
"self",
".",
"tree"
] |
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/PhysicsTools/HeppyCore/python/framework/eventstfile.py#L23-L26
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/build/waf-1.7.13/waflib/extras/msvs.py
|
python
|
msvs_generator.init
|
(self)
|
Some data that needs to be present
|
Some data that needs to be present
|
[
"Some",
"data",
"that",
"needs",
"to",
"be",
"present"
] |
def init(self):
"""
Some data that needs to be present
"""
if not getattr(self, 'configurations', None):
self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
if not getattr(self, 'platforms', None):
self.platforms = ['Win32']
if not getattr(self, 'all_projects', None):
self.all_projects = []
if not getattr(self, 'project_extension', None):
self.project_extension = '.vcxproj'
if not getattr(self, 'projects_dir', None):
self.projects_dir = self.srcnode.make_node('.depproj')
self.projects_dir.mkdir()
# bind the classes to the object, so that subclass can provide custom generators
if not getattr(self, 'vsnode_vsdir', None):
self.vsnode_vsdir = vsnode_vsdir
if not getattr(self, 'vsnode_target', None):
self.vsnode_target = vsnode_target
if not getattr(self, 'vsnode_build_all', None):
self.vsnode_build_all = vsnode_build_all
if not getattr(self, 'vsnode_install_all', None):
self.vsnode_install_all = vsnode_install_all
if not getattr(self, 'vsnode_project_view', None):
self.vsnode_project_view = vsnode_project_view
self.numver = '12.00'
self.vsver = '2013'
|
[
"def",
"init",
"(",
"self",
")",
":",
"if",
"not",
"getattr",
"(",
"self",
",",
"'configurations'",
",",
"None",
")",
":",
"self",
".",
"configurations",
"=",
"[",
"'Release'",
"]",
"# LocalRelease, RemoteDebug, etc",
"if",
"not",
"getattr",
"(",
"self",
",",
"'platforms'",
",",
"None",
")",
":",
"self",
".",
"platforms",
"=",
"[",
"'Win32'",
"]",
"if",
"not",
"getattr",
"(",
"self",
",",
"'all_projects'",
",",
"None",
")",
":",
"self",
".",
"all_projects",
"=",
"[",
"]",
"if",
"not",
"getattr",
"(",
"self",
",",
"'project_extension'",
",",
"None",
")",
":",
"self",
".",
"project_extension",
"=",
"'.vcxproj'",
"if",
"not",
"getattr",
"(",
"self",
",",
"'projects_dir'",
",",
"None",
")",
":",
"self",
".",
"projects_dir",
"=",
"self",
".",
"srcnode",
".",
"make_node",
"(",
"'.depproj'",
")",
"self",
".",
"projects_dir",
".",
"mkdir",
"(",
")",
"# bind the classes to the object, so that subclass can provide custom generators",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_vsdir'",
",",
"None",
")",
":",
"self",
".",
"vsnode_vsdir",
"=",
"vsnode_vsdir",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_target'",
",",
"None",
")",
":",
"self",
".",
"vsnode_target",
"=",
"vsnode_target",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_build_all'",
",",
"None",
")",
":",
"self",
".",
"vsnode_build_all",
"=",
"vsnode_build_all",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_install_all'",
",",
"None",
")",
":",
"self",
".",
"vsnode_install_all",
"=",
"vsnode_install_all",
"if",
"not",
"getattr",
"(",
"self",
",",
"'vsnode_project_view'",
",",
"None",
")",
":",
"self",
".",
"vsnode_project_view",
"=",
"vsnode_project_view",
"self",
".",
"numver",
"=",
"'12.00'",
"self",
".",
"vsver",
"=",
"'2013'"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/waflib/extras/msvs.py#L696-L725
|
||
google-ar/WebARonTango
|
e86965d2cbc652156b480e0fcf77c716745578cd
|
chromium/src/gpu/command_buffer/build_gles2_cmd_buffer.py
|
python
|
Function.IsES3
|
(self)
|
return self.GetInfo('es3', False)
|
Returns whether the function requires an ES3 context or not.
|
Returns whether the function requires an ES3 context or not.
|
[
"Returns",
"whether",
"the",
"function",
"requires",
"an",
"ES3",
"context",
"or",
"not",
"."
] |
def IsES3(self):
"""Returns whether the function requires an ES3 context or not."""
return self.GetInfo('es3', False)
|
[
"def",
"IsES3",
"(",
"self",
")",
":",
"return",
"self",
".",
"GetInfo",
"(",
"'es3'",
",",
"False",
")"
] |
https://github.com/google-ar/WebARonTango/blob/e86965d2cbc652156b480e0fcf77c716745578cd/chromium/src/gpu/command_buffer/build_gles2_cmd_buffer.py#L9302-L9304
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_core.py
|
python
|
Window.SetThemeEnabled
|
(*args, **kwargs)
|
return _core_.Window_SetThemeEnabled(*args, **kwargs)
|
SetThemeEnabled(self, bool enableTheme)
This function tells a window if it should use the system's "theme"
code to draw the windows' background instead if its own background
drawing code. This will only have an effect on platforms that support
the notion of themes in user defined windows. One such platform is
GTK+ where windows can have (very colourful) backgrounds defined by a
user's selected theme.
Dialogs, notebook pages and the status bar have this flag set to true
by default so that the default look and feel is simulated best.
|
SetThemeEnabled(self, bool enableTheme)
|
[
"SetThemeEnabled",
"(",
"self",
"bool",
"enableTheme",
")"
] |
def SetThemeEnabled(*args, **kwargs):
"""
SetThemeEnabled(self, bool enableTheme)
This function tells a window if it should use the system's "theme"
code to draw the windows' background instead if its own background
drawing code. This will only have an effect on platforms that support
the notion of themes in user defined windows. One such platform is
GTK+ where windows can have (very colourful) backgrounds defined by a
user's selected theme.
Dialogs, notebook pages and the status bar have this flag set to true
by default so that the default look and feel is simulated best.
"""
return _core_.Window_SetThemeEnabled(*args, **kwargs)
|
[
"def",
"SetThemeEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_SetThemeEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L10099-L10113
|
|
miyosuda/TensorFlowAndroidDemo
|
35903e0221aa5f109ea2dbef27f20b52e317f42d
|
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/dnn.py
|
python
|
DNNRegressor.__init__
|
(self,
hidden_units,
feature_columns=None,
model_dir=None,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=True,
config=None)
|
Initializes a `DNNRegressor` instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can also
be used to load checkpoints from the directory into a estimator to continue
training a previously saved model.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
Returns:
A `DNNRegressor` estimator.
|
Initializes a `DNNRegressor` instance.
|
[
"Initializes",
"a",
"DNNRegressor",
"instance",
"."
] |
def __init__(self,
hidden_units,
feature_columns=None,
model_dir=None,
weight_column_name=None,
optimizer=None,
activation_fn=nn.relu,
dropout=None,
gradient_clip_norm=None,
enable_centered_bias=True,
config=None):
"""Initializes a `DNNRegressor` instance.
Args:
hidden_units: List of hidden units per layer. All layers are fully
connected. Ex. `[64, 32]` means first layer has 64 nodes and second one
has 32.
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can also
be used to load checkpoints from the directory into a estimator to continue
training a previously saved model.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Adagrad optimizer.
activation_fn: Activation function applied to each layer. If `None`, will
use `tf.nn.relu`.
dropout: When not `None`, the probability we will drop out a given
coordinate.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
config: `RunConfig` object to configure the runtime settings.
Returns:
A `DNNRegressor` estimator.
"""
_changing(feature_columns)
super(DNNRegressor, self).__init__(
model_dir=model_dir,
weight_column_name=weight_column_name,
dnn_feature_columns=feature_columns,
dnn_optimizer=optimizer,
dnn_hidden_units=hidden_units,
dnn_activation_fn=activation_fn,
dnn_dropout=dropout,
gradient_clip_norm=gradient_clip_norm,
enable_centered_bias=enable_centered_bias,
config=config)
self.feature_columns = feature_columns
self.optimizer = optimizer
self.activation_fn = activation_fn
self.dropout = dropout
self.hidden_units = hidden_units
self._feature_columns_inferred = False
|
[
"def",
"__init__",
"(",
"self",
",",
"hidden_units",
",",
"feature_columns",
"=",
"None",
",",
"model_dir",
"=",
"None",
",",
"weight_column_name",
"=",
"None",
",",
"optimizer",
"=",
"None",
",",
"activation_fn",
"=",
"nn",
".",
"relu",
",",
"dropout",
"=",
"None",
",",
"gradient_clip_norm",
"=",
"None",
",",
"enable_centered_bias",
"=",
"True",
",",
"config",
"=",
"None",
")",
":",
"_changing",
"(",
"feature_columns",
")",
"super",
"(",
"DNNRegressor",
",",
"self",
")",
".",
"__init__",
"(",
"model_dir",
"=",
"model_dir",
",",
"weight_column_name",
"=",
"weight_column_name",
",",
"dnn_feature_columns",
"=",
"feature_columns",
",",
"dnn_optimizer",
"=",
"optimizer",
",",
"dnn_hidden_units",
"=",
"hidden_units",
",",
"dnn_activation_fn",
"=",
"activation_fn",
",",
"dnn_dropout",
"=",
"dropout",
",",
"gradient_clip_norm",
"=",
"gradient_clip_norm",
",",
"enable_centered_bias",
"=",
"enable_centered_bias",
",",
"config",
"=",
"config",
")",
"self",
".",
"feature_columns",
"=",
"feature_columns",
"self",
".",
"optimizer",
"=",
"optimizer",
"self",
".",
"activation_fn",
"=",
"activation_fn",
"self",
".",
"dropout",
"=",
"dropout",
"self",
".",
"hidden_units",
"=",
"hidden_units",
"self",
".",
"_feature_columns_inferred",
"=",
"False"
] |
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/dnn.py#L262-L322
|
||
krishauser/Klampt
|
972cc83ea5befac3f653c1ba20f80155768ad519
|
Python/python2_version/klampt/robotsim.py
|
python
|
RobotModelLink.getPointAcceleration
|
(self, plocal, ddq)
|
return _robotsim.RobotModelLink_getPointAcceleration(self, plocal, ddq)
|
getPointAcceleration(RobotModelLink self, double const [3] plocal, doubleVector ddq)
Returns the acceleration of the point given the robot's current joint
configuration and velocities, and the joint accelerations ddq.
Returns:
(list of 3 floats): the acceleration of the point, in
world coordinates.
|
getPointAcceleration(RobotModelLink self, double const [3] plocal, doubleVector ddq)
|
[
"getPointAcceleration",
"(",
"RobotModelLink",
"self",
"double",
"const",
"[",
"3",
"]",
"plocal",
"doubleVector",
"ddq",
")"
] |
def getPointAcceleration(self, plocal, ddq):
"""
getPointAcceleration(RobotModelLink self, double const [3] plocal, doubleVector ddq)
Returns the acceleration of the point given the robot's current joint
configuration and velocities, and the joint accelerations ddq.
Returns:
(list of 3 floats): the acceleration of the point, in
world coordinates.
"""
return _robotsim.RobotModelLink_getPointAcceleration(self, plocal, ddq)
|
[
"def",
"getPointAcceleration",
"(",
"self",
",",
"plocal",
",",
"ddq",
")",
":",
"return",
"_robotsim",
".",
"RobotModelLink_getPointAcceleration",
"(",
"self",
",",
"plocal",
",",
"ddq",
")"
] |
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/robotsim.py#L4148-L4163
|
|
google/or-tools
|
2cb85b4eead4c38e1c54b48044f92087cf165bce
|
ortools/constraint_solver/samples/vrp_time_windows_per_vehicles.py
|
python
|
create_data_model
|
()
|
return data
|
Stores the data for the problem.
|
Stores the data for the problem.
|
[
"Stores",
"the",
"data",
"for",
"the",
"problem",
"."
] |
def create_data_model():
"""Stores the data for the problem."""
data = {}
data['time_matrix'] = [
[0, 6, 9, 8, 7, 3, 6, 2, 3, 2, 6, 6, 4, 4, 5, 9, 7],
[6, 0, 8, 3, 2, 6, 8, 4, 8, 8, 13, 7, 5, 8, 12, 10, 14],
[9, 8, 0, 11, 10, 6, 3, 9, 5, 8, 4, 15, 14, 13, 9, 18, 9],
[8, 3, 11, 0, 1, 7, 10, 6, 10, 10, 14, 6, 7, 9, 14, 6, 16],
[7, 2, 10, 1, 0, 6, 9, 4, 8, 9, 13, 4, 6, 8, 12, 8, 14],
[3, 6, 6, 7, 6, 0, 2, 3, 2, 2, 7, 9, 7, 7, 6, 12, 8],
[6, 8, 3, 10, 9, 2, 0, 6, 2, 5, 4, 12, 10, 10, 6, 15, 5],
[2, 4, 9, 6, 4, 3, 6, 0, 4, 4, 8, 5, 4, 3, 7, 8, 10],
[3, 8, 5, 10, 8, 2, 2, 4, 0, 3, 4, 9, 8, 7, 3, 13, 6],
[2, 8, 8, 10, 9, 2, 5, 4, 3, 0, 4, 6, 5, 4, 3, 9, 5],
[6, 13, 4, 14, 13, 7, 4, 8, 4, 4, 0, 10, 9, 8, 4, 13, 4],
[6, 7, 15, 6, 4, 9, 12, 5, 9, 6, 10, 0, 1, 3, 7, 3, 10],
[4, 5, 14, 7, 6, 7, 10, 4, 8, 5, 9, 1, 0, 2, 6, 4, 8],
[4, 8, 13, 9, 8, 7, 10, 3, 7, 4, 8, 3, 2, 0, 4, 5, 6],
[5, 12, 9, 14, 12, 6, 6, 7, 3, 3, 4, 7, 6, 4, 0, 9, 2],
[9, 10, 18, 6, 8, 12, 15, 8, 13, 9, 13, 3, 4, 5, 9, 0, 9],
[7, 14, 9, 16, 14, 8, 5, 10, 6, 5, 4, 10, 8, 6, 2, 9, 0],
]
data['num_vehicles'] = 4
data['depot'] = 0
return data
|
[
"def",
"create_data_model",
"(",
")",
":",
"data",
"=",
"{",
"}",
"data",
"[",
"'time_matrix'",
"]",
"=",
"[",
"[",
"0",
",",
"6",
",",
"9",
",",
"8",
",",
"7",
",",
"3",
",",
"6",
",",
"2",
",",
"3",
",",
"2",
",",
"6",
",",
"6",
",",
"4",
",",
"4",
",",
"5",
",",
"9",
",",
"7",
"]",
",",
"[",
"6",
",",
"0",
",",
"8",
",",
"3",
",",
"2",
",",
"6",
",",
"8",
",",
"4",
",",
"8",
",",
"8",
",",
"13",
",",
"7",
",",
"5",
",",
"8",
",",
"12",
",",
"10",
",",
"14",
"]",
",",
"[",
"9",
",",
"8",
",",
"0",
",",
"11",
",",
"10",
",",
"6",
",",
"3",
",",
"9",
",",
"5",
",",
"8",
",",
"4",
",",
"15",
",",
"14",
",",
"13",
",",
"9",
",",
"18",
",",
"9",
"]",
",",
"[",
"8",
",",
"3",
",",
"11",
",",
"0",
",",
"1",
",",
"7",
",",
"10",
",",
"6",
",",
"10",
",",
"10",
",",
"14",
",",
"6",
",",
"7",
",",
"9",
",",
"14",
",",
"6",
",",
"16",
"]",
",",
"[",
"7",
",",
"2",
",",
"10",
",",
"1",
",",
"0",
",",
"6",
",",
"9",
",",
"4",
",",
"8",
",",
"9",
",",
"13",
",",
"4",
",",
"6",
",",
"8",
",",
"12",
",",
"8",
",",
"14",
"]",
",",
"[",
"3",
",",
"6",
",",
"6",
",",
"7",
",",
"6",
",",
"0",
",",
"2",
",",
"3",
",",
"2",
",",
"2",
",",
"7",
",",
"9",
",",
"7",
",",
"7",
",",
"6",
",",
"12",
",",
"8",
"]",
",",
"[",
"6",
",",
"8",
",",
"3",
",",
"10",
",",
"9",
",",
"2",
",",
"0",
",",
"6",
",",
"2",
",",
"5",
",",
"4",
",",
"12",
",",
"10",
",",
"10",
",",
"6",
",",
"15",
",",
"5",
"]",
",",
"[",
"2",
",",
"4",
",",
"9",
",",
"6",
",",
"4",
",",
"3",
",",
"6",
",",
"0",
",",
"4",
",",
"4",
",",
"8",
",",
"5",
",",
"4",
",",
"3",
",",
"7",
",",
"8",
",",
"10",
"]",
",",
"[",
"3",
",",
"8",
",",
"5",
",",
"10",
",",
"8",
",",
"2",
",",
"2",
",",
"4",
",",
"0",
",",
"3",
",",
"4",
",",
"9",
",",
"8",
",",
"7",
",",
"3",
",",
"13",
",",
"6",
"]",
",",
"[",
"2",
",",
"8",
",",
"8",
",",
"10",
",",
"9",
",",
"2",
",",
"5",
",",
"4",
",",
"3",
",",
"0",
",",
"4",
",",
"6",
",",
"5",
",",
"4",
",",
"3",
",",
"9",
",",
"5",
"]",
",",
"[",
"6",
",",
"13",
",",
"4",
",",
"14",
",",
"13",
",",
"7",
",",
"4",
",",
"8",
",",
"4",
",",
"4",
",",
"0",
",",
"10",
",",
"9",
",",
"8",
",",
"4",
",",
"13",
",",
"4",
"]",
",",
"[",
"6",
",",
"7",
",",
"15",
",",
"6",
",",
"4",
",",
"9",
",",
"12",
",",
"5",
",",
"9",
",",
"6",
",",
"10",
",",
"0",
",",
"1",
",",
"3",
",",
"7",
",",
"3",
",",
"10",
"]",
",",
"[",
"4",
",",
"5",
",",
"14",
",",
"7",
",",
"6",
",",
"7",
",",
"10",
",",
"4",
",",
"8",
",",
"5",
",",
"9",
",",
"1",
",",
"0",
",",
"2",
",",
"6",
",",
"4",
",",
"8",
"]",
",",
"[",
"4",
",",
"8",
",",
"13",
",",
"9",
",",
"8",
",",
"7",
",",
"10",
",",
"3",
",",
"7",
",",
"4",
",",
"8",
",",
"3",
",",
"2",
",",
"0",
",",
"4",
",",
"5",
",",
"6",
"]",
",",
"[",
"5",
",",
"12",
",",
"9",
",",
"14",
",",
"12",
",",
"6",
",",
"6",
",",
"7",
",",
"3",
",",
"3",
",",
"4",
",",
"7",
",",
"6",
",",
"4",
",",
"0",
",",
"9",
",",
"2",
"]",
",",
"[",
"9",
",",
"10",
",",
"18",
",",
"6",
",",
"8",
",",
"12",
",",
"15",
",",
"8",
",",
"13",
",",
"9",
",",
"13",
",",
"3",
",",
"4",
",",
"5",
",",
"9",
",",
"0",
",",
"9",
"]",
",",
"[",
"7",
",",
"14",
",",
"9",
",",
"16",
",",
"14",
",",
"8",
",",
"5",
",",
"10",
",",
"6",
",",
"5",
",",
"4",
",",
"10",
",",
"8",
",",
"6",
",",
"2",
",",
"9",
",",
"0",
"]",
",",
"]",
"data",
"[",
"'num_vehicles'",
"]",
"=",
"4",
"data",
"[",
"'depot'",
"]",
"=",
"0",
"return",
"data"
] |
https://github.com/google/or-tools/blob/2cb85b4eead4c38e1c54b48044f92087cf165bce/ortools/constraint_solver/samples/vrp_time_windows_per_vehicles.py#L39-L63
|
|
FreeCAD/FreeCAD
|
ba42231b9c6889b89e064d6d563448ed81e376ec
|
src/Mod/Path/PathScripts/PathDressupAxisMap.py
|
python
|
ObjectDressup._linear2angular
|
(self, radius, length)
|
return 360 * (float(length) / circum)
|
returns an angular distance in degrees to achieve a linear move of a given length
|
returns an angular distance in degrees to achieve a linear move of a given length
|
[
"returns",
"an",
"angular",
"distance",
"in",
"degrees",
"to",
"achieve",
"a",
"linear",
"move",
"of",
"a",
"given",
"length"
] |
def _linear2angular(self, radius, length):
"""returns an angular distance in degrees to achieve a linear move of a given length"""
circum = 2 * math.pi * float(radius)
return 360 * (float(length) / circum)
|
[
"def",
"_linear2angular",
"(",
"self",
",",
"radius",
",",
"length",
")",
":",
"circum",
"=",
"2",
"*",
"math",
".",
"pi",
"*",
"float",
"(",
"radius",
")",
"return",
"360",
"*",
"(",
"float",
"(",
"length",
")",
"/",
"circum",
")"
] |
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Path/PathScripts/PathDressupAxisMap.py#L81-L84
|
|
eventql/eventql
|
7ca0dbb2e683b525620ea30dc40540a22d5eb227
|
deps/3rdparty/spidermonkey/mozjs/python/requests/requests/packages/urllib3/packages/ordered_dict.py
|
python
|
OrderedDict.__eq__
|
(self, other)
|
return dict.__eq__(self, other)
|
od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
|
od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
|
[
"od",
".",
"__eq__",
"(",
"y",
")",
"<",
"==",
">",
"od",
"==",
"y",
".",
"Comparison",
"to",
"another",
"OD",
"is",
"order",
"-",
"sensitive",
"while",
"comparison",
"to",
"a",
"regular",
"mapping",
"is",
"order",
"-",
"insensitive",
"."
] |
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
|
[
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"OrderedDict",
")",
":",
"return",
"len",
"(",
"self",
")",
"==",
"len",
"(",
"other",
")",
"and",
"self",
".",
"items",
"(",
")",
"==",
"other",
".",
"items",
"(",
")",
"return",
"dict",
".",
"__eq__",
"(",
"self",
",",
"other",
")"
] |
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/requests/requests/packages/urllib3/packages/ordered_dict.py#L235-L242
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/threading.py
|
python
|
setprofile
|
(func)
|
Set a profile function for all threads started from the threading module.
The func will be passed to sys.setprofile() for each thread, before its
run() method is called.
|
Set a profile function for all threads started from the threading module.
|
[
"Set",
"a",
"profile",
"function",
"for",
"all",
"threads",
"started",
"from",
"the",
"threading",
"module",
"."
] |
def setprofile(func):
"""Set a profile function for all threads started from the threading module.
The func will be passed to sys.setprofile() for each thread, before its
run() method is called.
"""
global _profile_hook
_profile_hook = func
|
[
"def",
"setprofile",
"(",
"func",
")",
":",
"global",
"_profile_hook",
"_profile_hook",
"=",
"func"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/threading.py#L89-L97
|
||
BogdanDIA/gr-dvbt
|
79e1c54ef3bd66906c2d94dbc383c35c17b3f088
|
python/ofdm_sync_pn.py
|
python
|
ofdm_sync_pn.__init__
|
(self, fft_length, cp_length, logging=False)
|
OFDM synchronization using PN Correlation:
T. M. Schmidl and D. C. Cox, "Robust Frequency and Timing
Synchonization for OFDM," IEEE Trans. Communications, vol. 45,
no. 12, 1997.
|
OFDM synchronization using PN Correlation:
T. M. Schmidl and D. C. Cox, "Robust Frequency and Timing
Synchonization for OFDM," IEEE Trans. Communications, vol. 45,
no. 12, 1997.
|
[
"OFDM",
"synchronization",
"using",
"PN",
"Correlation",
":",
"T",
".",
"M",
".",
"Schmidl",
"and",
"D",
".",
"C",
".",
"Cox",
"Robust",
"Frequency",
"and",
"Timing",
"Synchonization",
"for",
"OFDM",
"IEEE",
"Trans",
".",
"Communications",
"vol",
".",
"45",
"no",
".",
"12",
"1997",
"."
] |
def __init__(self, fft_length, cp_length, logging=False):
"""
OFDM synchronization using PN Correlation:
T. M. Schmidl and D. C. Cox, "Robust Frequency and Timing
Synchonization for OFDM," IEEE Trans. Communications, vol. 45,
no. 12, 1997.
"""
gr.hier_block2.__init__(self, "ofdm_sync_pn",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature2(2, 2, gr.sizeof_float, gr.sizeof_char)) # Output signature
self.input = gr.add_const_cc(0)
# PN Sync
# Create a delay line
self.delay = gr.delay(gr.sizeof_gr_complex, fft_length/2)
# Correlation from ML Sync
self.conjg = gr.conjugate_cc();
self.corr = gr.multiply_cc();
# Create a moving sum filter for the corr output
if 1:
moving_sum_taps = [1.0 for i in range(fft_length//2)]
self.moving_sum_filter = gr.fir_filter_ccf(1,moving_sum_taps)
else:
moving_sum_taps = [complex(1.0,0.0) for i in range(fft_length//2)]
self.moving_sum_filter = gr.fft_filter_ccc(1,moving_sum_taps)
# Create a moving sum filter for the input
self.inputmag2 = gr.complex_to_mag_squared()
movingsum2_taps = [1.0 for i in range(fft_length//2)]
if 1:
self.inputmovingsum = gr.fir_filter_fff(1,movingsum2_taps)
else:
self.inputmovingsum = gr.fft_filter_fff(1,movingsum2_taps)
self.square = gr.multiply_ff()
self.normalize = gr.divide_ff()
# Get magnitude (peaks) and angle (phase/freq error)
self.c2mag = gr.complex_to_mag_squared()
self.angle = gr.complex_to_arg()
self.sample_and_hold = gr.sample_and_hold_ff()
#ML measurements input to sampler block and detect
#self.sub1 = gr.add_const_ff(-1)
self.sub1 = gr.add_const_ff(0)
self.pk_detect = gr.peak_detector_fb(0.20, 0.20, 30, 0.001)
#self.pk_detect = gr.peak_detector2_fb(9)
self.connect(self, self.input)
# Calculate the frequency offset from the correlation of the preamble
self.connect(self.input, self.delay)
self.connect(self.input, (self.corr,0))
self.connect(self.delay, self.conjg)
self.connect(self.conjg, (self.corr,1))
self.connect(self.corr, self.moving_sum_filter)
self.connect(self.moving_sum_filter, self.c2mag)
self.connect(self.moving_sum_filter, self.angle)
self.connect(self.angle, (self.sample_and_hold,0))
# Get the power of the input signal to normalize the output of the correlation
self.connect(self.input, self.inputmag2, self.inputmovingsum)
self.connect(self.inputmovingsum, (self.square,0))
self.connect(self.inputmovingsum, (self.square,1))
self.connect(self.square, (self.normalize,1))
self.connect(self.c2mag, (self.normalize,0))
# Create a moving sum filter for the corr output
matched_filter_taps = [1.0/cp_length for i in range(cp_length)]
self.matched_filter = gr.fir_filter_fff(1,matched_filter_taps)
self.connect(self.normalize, self.matched_filter)
self.connect(self.matched_filter, self.sub1, self.pk_detect)
#self.connect(self.matched_filter, self.pk_detect)
self.connect(self.pk_detect, (self.sample_and_hold,1))
# Set output signals
# Output 0: fine frequency correction value
# Output 1: timing signal
self.connect(self.sample_and_hold, (self,0))
self.connect(self.pk_detect, (self,1))
if logging:
self.connect(self.matched_filter, gr.file_sink(gr.sizeof_float, "ofdm_sync_pn-mf_f.dat"))
self.connect(self.normalize, gr.file_sink(gr.sizeof_float, "ofdm_sync_pn-theta_f.dat"))
self.connect(self.angle, gr.file_sink(gr.sizeof_float, "ofdm_sync_pn-epsilon_f.dat"))
self.connect(self.pk_detect, gr.file_sink(gr.sizeof_char, "ofdm_sync_pn-peaks_b.dat"))
self.connect(self.sample_and_hold, gr.file_sink(gr.sizeof_float, "ofdm_sync_pn-sample_and_hold_f.dat"))
self.connect(self.input, gr.file_sink(gr.sizeof_gr_complex, "ofdm_sync_pn-input_c.dat"))
|
[
"def",
"__init__",
"(",
"self",
",",
"fft_length",
",",
"cp_length",
",",
"logging",
"=",
"False",
")",
":",
"gr",
".",
"hier_block2",
".",
"__init__",
"(",
"self",
",",
"\"ofdm_sync_pn\"",
",",
"gr",
".",
"io_signature",
"(",
"1",
",",
"1",
",",
"gr",
".",
"sizeof_gr_complex",
")",
",",
"# Input signature",
"gr",
".",
"io_signature2",
"(",
"2",
",",
"2",
",",
"gr",
".",
"sizeof_float",
",",
"gr",
".",
"sizeof_char",
")",
")",
"# Output signature",
"self",
".",
"input",
"=",
"gr",
".",
"add_const_cc",
"(",
"0",
")",
"# PN Sync",
"# Create a delay line",
"self",
".",
"delay",
"=",
"gr",
".",
"delay",
"(",
"gr",
".",
"sizeof_gr_complex",
",",
"fft_length",
"/",
"2",
")",
"# Correlation from ML Sync",
"self",
".",
"conjg",
"=",
"gr",
".",
"conjugate_cc",
"(",
")",
"self",
".",
"corr",
"=",
"gr",
".",
"multiply_cc",
"(",
")",
"# Create a moving sum filter for the corr output",
"if",
"1",
":",
"moving_sum_taps",
"=",
"[",
"1.0",
"for",
"i",
"in",
"range",
"(",
"fft_length",
"//",
"2",
")",
"]",
"self",
".",
"moving_sum_filter",
"=",
"gr",
".",
"fir_filter_ccf",
"(",
"1",
",",
"moving_sum_taps",
")",
"else",
":",
"moving_sum_taps",
"=",
"[",
"complex",
"(",
"1.0",
",",
"0.0",
")",
"for",
"i",
"in",
"range",
"(",
"fft_length",
"//",
"2",
")",
"]",
"self",
".",
"moving_sum_filter",
"=",
"gr",
".",
"fft_filter_ccc",
"(",
"1",
",",
"moving_sum_taps",
")",
"# Create a moving sum filter for the input",
"self",
".",
"inputmag2",
"=",
"gr",
".",
"complex_to_mag_squared",
"(",
")",
"movingsum2_taps",
"=",
"[",
"1.0",
"for",
"i",
"in",
"range",
"(",
"fft_length",
"//",
"2",
")",
"]",
"if",
"1",
":",
"self",
".",
"inputmovingsum",
"=",
"gr",
".",
"fir_filter_fff",
"(",
"1",
",",
"movingsum2_taps",
")",
"else",
":",
"self",
".",
"inputmovingsum",
"=",
"gr",
".",
"fft_filter_fff",
"(",
"1",
",",
"movingsum2_taps",
")",
"self",
".",
"square",
"=",
"gr",
".",
"multiply_ff",
"(",
")",
"self",
".",
"normalize",
"=",
"gr",
".",
"divide_ff",
"(",
")",
"# Get magnitude (peaks) and angle (phase/freq error)",
"self",
".",
"c2mag",
"=",
"gr",
".",
"complex_to_mag_squared",
"(",
")",
"self",
".",
"angle",
"=",
"gr",
".",
"complex_to_arg",
"(",
")",
"self",
".",
"sample_and_hold",
"=",
"gr",
".",
"sample_and_hold_ff",
"(",
")",
"#ML measurements input to sampler block and detect",
"#self.sub1 = gr.add_const_ff(-1)",
"self",
".",
"sub1",
"=",
"gr",
".",
"add_const_ff",
"(",
"0",
")",
"self",
".",
"pk_detect",
"=",
"gr",
".",
"peak_detector_fb",
"(",
"0.20",
",",
"0.20",
",",
"30",
",",
"0.001",
")",
"#self.pk_detect = gr.peak_detector2_fb(9)",
"self",
".",
"connect",
"(",
"self",
",",
"self",
".",
"input",
")",
"# Calculate the frequency offset from the correlation of the preamble",
"self",
".",
"connect",
"(",
"self",
".",
"input",
",",
"self",
".",
"delay",
")",
"self",
".",
"connect",
"(",
"self",
".",
"input",
",",
"(",
"self",
".",
"corr",
",",
"0",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"delay",
",",
"self",
".",
"conjg",
")",
"self",
".",
"connect",
"(",
"self",
".",
"conjg",
",",
"(",
"self",
".",
"corr",
",",
"1",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"corr",
",",
"self",
".",
"moving_sum_filter",
")",
"self",
".",
"connect",
"(",
"self",
".",
"moving_sum_filter",
",",
"self",
".",
"c2mag",
")",
"self",
".",
"connect",
"(",
"self",
".",
"moving_sum_filter",
",",
"self",
".",
"angle",
")",
"self",
".",
"connect",
"(",
"self",
".",
"angle",
",",
"(",
"self",
".",
"sample_and_hold",
",",
"0",
")",
")",
"# Get the power of the input signal to normalize the output of the correlation",
"self",
".",
"connect",
"(",
"self",
".",
"input",
",",
"self",
".",
"inputmag2",
",",
"self",
".",
"inputmovingsum",
")",
"self",
".",
"connect",
"(",
"self",
".",
"inputmovingsum",
",",
"(",
"self",
".",
"square",
",",
"0",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"inputmovingsum",
",",
"(",
"self",
".",
"square",
",",
"1",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"square",
",",
"(",
"self",
".",
"normalize",
",",
"1",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"c2mag",
",",
"(",
"self",
".",
"normalize",
",",
"0",
")",
")",
"# Create a moving sum filter for the corr output",
"matched_filter_taps",
"=",
"[",
"1.0",
"/",
"cp_length",
"for",
"i",
"in",
"range",
"(",
"cp_length",
")",
"]",
"self",
".",
"matched_filter",
"=",
"gr",
".",
"fir_filter_fff",
"(",
"1",
",",
"matched_filter_taps",
")",
"self",
".",
"connect",
"(",
"self",
".",
"normalize",
",",
"self",
".",
"matched_filter",
")",
"self",
".",
"connect",
"(",
"self",
".",
"matched_filter",
",",
"self",
".",
"sub1",
",",
"self",
".",
"pk_detect",
")",
"#self.connect(self.matched_filter, self.pk_detect)",
"self",
".",
"connect",
"(",
"self",
".",
"pk_detect",
",",
"(",
"self",
".",
"sample_and_hold",
",",
"1",
")",
")",
"# Set output signals",
"# Output 0: fine frequency correction value",
"# Output 1: timing signal",
"self",
".",
"connect",
"(",
"self",
".",
"sample_and_hold",
",",
"(",
"self",
",",
"0",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"pk_detect",
",",
"(",
"self",
",",
"1",
")",
")",
"if",
"logging",
":",
"self",
".",
"connect",
"(",
"self",
".",
"matched_filter",
",",
"gr",
".",
"file_sink",
"(",
"gr",
".",
"sizeof_float",
",",
"\"ofdm_sync_pn-mf_f.dat\"",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"normalize",
",",
"gr",
".",
"file_sink",
"(",
"gr",
".",
"sizeof_float",
",",
"\"ofdm_sync_pn-theta_f.dat\"",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"angle",
",",
"gr",
".",
"file_sink",
"(",
"gr",
".",
"sizeof_float",
",",
"\"ofdm_sync_pn-epsilon_f.dat\"",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"pk_detect",
",",
"gr",
".",
"file_sink",
"(",
"gr",
".",
"sizeof_char",
",",
"\"ofdm_sync_pn-peaks_b.dat\"",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"sample_and_hold",
",",
"gr",
".",
"file_sink",
"(",
"gr",
".",
"sizeof_float",
",",
"\"ofdm_sync_pn-sample_and_hold_f.dat\"",
")",
")",
"self",
".",
"connect",
"(",
"self",
".",
"input",
",",
"gr",
".",
"file_sink",
"(",
"gr",
".",
"sizeof_gr_complex",
",",
"\"ofdm_sync_pn-input_c.dat\"",
")",
")"
] |
https://github.com/BogdanDIA/gr-dvbt/blob/79e1c54ef3bd66906c2d94dbc383c35c17b3f088/python/ofdm_sync_pn.py#L28-L123
|
||
trilinos/Trilinos
|
6168be6dd51e35e1cd681e9c4b24433e709df140
|
packages/seacas/scripts/exomerge2.py
|
python
|
ExodusModel.get_side_set_field_values
|
(self,
side_set_field_name,
side_set_id='auto',
timestep='last')
|
return fields[side_set_field_name][timestep_index]
|
Return the list of side set field values.
The actual list of values is returned, so any modifications to it will
be stored in the model.
Examples:
>>> model.get_side_set_field_values('contact_pressure', side_set_id=1)
>>> model.get_side_set_field_values('contact_pressure', timestep=2.0)
>>> model.get_side_set_field_values('contact_pressure',
... side_set_id=5,
... timestep='last')
|
Return the list of side set field values.
|
[
"Return",
"the",
"list",
"of",
"side",
"set",
"field",
"values",
"."
] |
def get_side_set_field_values(self,
side_set_field_name,
side_set_id='auto',
timestep='last'):
"""
Return the list of side set field values.
The actual list of values is returned, so any modifications to it will
be stored in the model.
Examples:
>>> model.get_side_set_field_values('contact_pressure', side_set_id=1)
>>> model.get_side_set_field_values('contact_pressure', timestep=2.0)
>>> model.get_side_set_field_values('contact_pressure',
... side_set_id=5,
... timestep='last')
"""
[side_set_id] = self._format_side_set_id_list(
[side_set_id],
single=True)
[side_set_field_name] = self._format_id_list(
[side_set_field_name],
self.get_side_set_field_names(),
'side set field',
single=True)
[timestep] = self._format_id_list(
[timestep],
self.get_timesteps(),
'timestep',
single=True)
timestep_index = self._get_internal_timestep_index(timestep)
if not self.side_set_field_exists(side_set_field_name,
side_set_id):
self._missing_on_entity_error(
side_set_field_name,
'side set field',
side_set_id,
'side set')
fields = self._get_side_set_fields(side_set_id)
return fields[side_set_field_name][timestep_index]
|
[
"def",
"get_side_set_field_values",
"(",
"self",
",",
"side_set_field_name",
",",
"side_set_id",
"=",
"'auto'",
",",
"timestep",
"=",
"'last'",
")",
":",
"[",
"side_set_id",
"]",
"=",
"self",
".",
"_format_side_set_id_list",
"(",
"[",
"side_set_id",
"]",
",",
"single",
"=",
"True",
")",
"[",
"side_set_field_name",
"]",
"=",
"self",
".",
"_format_id_list",
"(",
"[",
"side_set_field_name",
"]",
",",
"self",
".",
"get_side_set_field_names",
"(",
")",
",",
"'side set field'",
",",
"single",
"=",
"True",
")",
"[",
"timestep",
"]",
"=",
"self",
".",
"_format_id_list",
"(",
"[",
"timestep",
"]",
",",
"self",
".",
"get_timesteps",
"(",
")",
",",
"'timestep'",
",",
"single",
"=",
"True",
")",
"timestep_index",
"=",
"self",
".",
"_get_internal_timestep_index",
"(",
"timestep",
")",
"if",
"not",
"self",
".",
"side_set_field_exists",
"(",
"side_set_field_name",
",",
"side_set_id",
")",
":",
"self",
".",
"_missing_on_entity_error",
"(",
"side_set_field_name",
",",
"'side set field'",
",",
"side_set_id",
",",
"'side set'",
")",
"fields",
"=",
"self",
".",
"_get_side_set_fields",
"(",
"side_set_id",
")",
"return",
"fields",
"[",
"side_set_field_name",
"]",
"[",
"timestep_index",
"]"
] |
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exomerge2.py#L3162-L3202
|
|
GoSSIP-SJTU/Armariris
|
ad5d868482956b2194a77b39c8d543c7c2318200
|
tools/clang/bindings/python/clang/cindex.py
|
python
|
Type.translation_unit
|
(self)
|
return self._tu
|
The TranslationUnit to which this Type is associated.
|
The TranslationUnit to which this Type is associated.
|
[
"The",
"TranslationUnit",
"to",
"which",
"this",
"Type",
"is",
"associated",
"."
] |
def translation_unit(self):
"""The TranslationUnit to which this Type is associated."""
# If this triggers an AttributeError, the instance was not properly
# instantiated.
return self._tu
|
[
"def",
"translation_unit",
"(",
"self",
")",
":",
"# If this triggers an AttributeError, the instance was not properly",
"# instantiated.",
"return",
"self",
".",
"_tu"
] |
https://github.com/GoSSIP-SJTU/Armariris/blob/ad5d868482956b2194a77b39c8d543c7c2318200/tools/clang/bindings/python/clang/cindex.py#L1850-L1854
|
|
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Source/ThirdParty/CEF3/cef_source/tools/cef_parser.py
|
python
|
obj_typedef.get_value
|
(self)
|
return self.value
|
Return an analysis of the value based on the class or header file
definition context.
|
Return an analysis of the value based on the class or header file
definition context.
|
[
"Return",
"an",
"analysis",
"of",
"the",
"value",
"based",
"on",
"the",
"class",
"or",
"header",
"file",
"definition",
"context",
"."
] |
def get_value(self):
""" Return an analysis of the value based on the class or header file
definition context.
"""
return self.value
|
[
"def",
"get_value",
"(",
"self",
")",
":",
"return",
"self",
".",
"value"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/cef_source/tools/cef_parser.py#L1020-L1024
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_windows.py
|
python
|
PrintPreview.GetPrintDialogData
|
(*args, **kwargs)
|
return _windows_.PrintPreview_GetPrintDialogData(*args, **kwargs)
|
GetPrintDialogData(self) -> PrintDialogData
|
GetPrintDialogData(self) -> PrintDialogData
|
[
"GetPrintDialogData",
"(",
"self",
")",
"-",
">",
"PrintDialogData"
] |
def GetPrintDialogData(*args, **kwargs):
"""GetPrintDialogData(self) -> PrintDialogData"""
return _windows_.PrintPreview_GetPrintDialogData(*args, **kwargs)
|
[
"def",
"GetPrintDialogData",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"PrintPreview_GetPrintDialogData",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_windows.py#L5621-L5623
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/_pydecimal.py
|
python
|
Decimal.__repr__
|
(self)
|
return "Decimal('%s')" % str(self)
|
Represents the number as an instance of Decimal.
|
Represents the number as an instance of Decimal.
|
[
"Represents",
"the",
"number",
"as",
"an",
"instance",
"of",
"Decimal",
"."
] |
def __repr__(self):
"""Represents the number as an instance of Decimal."""
# Invariant: eval(repr(d)) == d
return "Decimal('%s')" % str(self)
|
[
"def",
"__repr__",
"(",
"self",
")",
":",
"# Invariant: eval(repr(d)) == d",
"return",
"\"Decimal('%s')\"",
"%",
"str",
"(",
"self",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/_pydecimal.py#L1026-L1029
|
|
eric612/Caffe-YOLOv3-Windows
|
6736ca6e16781789b828cc64218ff77cc3454e5d
|
scripts/cpp_lint.py
|
python
|
CheckMakePairUsesDeduction
|
(filename, clean_lines, linenum, error)
|
Check that make_pair's template arguments are deduced.
G++ 4.6 in C++0x mode fails badly if make_pair's template arguments are
specified explicitly, and such use isn't intended in any case.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
|
Check that make_pair's template arguments are deduced.
|
[
"Check",
"that",
"make_pair",
"s",
"template",
"arguments",
"are",
"deduced",
"."
] |
def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error):
"""Check that make_pair's template arguments are deduced.
G++ 4.6 in C++0x mode fails badly if make_pair's template arguments are
specified explicitly, and such use isn't intended in any case.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line)
if match:
error(filename, linenum, 'build/explicit_make_pair',
4, # 4 = high confidence
'For C++11-compatibility, omit template arguments from make_pair'
' OR use pair directly OR if appropriate, construct a pair directly')
|
[
"def",
"CheckMakePairUsesDeduction",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"match",
"=",
"_RE_PATTERN_EXPLICIT_MAKEPAIR",
".",
"search",
"(",
"line",
")",
"if",
"match",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/explicit_make_pair'",
",",
"4",
",",
"# 4 = high confidence",
"'For C++11-compatibility, omit template arguments from make_pair'",
"' OR use pair directly OR if appropriate, construct a pair directly'",
")"
] |
https://github.com/eric612/Caffe-YOLOv3-Windows/blob/6736ca6e16781789b828cc64218ff77cc3454e5d/scripts/cpp_lint.py#L4583-L4601
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_misc.py
|
python
|
ToolTip.SetAutoPop
|
(*args, **kwargs)
|
return _misc_.ToolTip_SetAutoPop(*args, **kwargs)
|
SetAutoPop(long milliseconds)
|
SetAutoPop(long milliseconds)
|
[
"SetAutoPop",
"(",
"long",
"milliseconds",
")"
] |
def SetAutoPop(*args, **kwargs):
"""SetAutoPop(long milliseconds)"""
return _misc_.ToolTip_SetAutoPop(*args, **kwargs)
|
[
"def",
"SetAutoPop",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"ToolTip_SetAutoPop",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_misc.py#L689-L691
|
|
apache/arrow
|
af33dd1157eb8d7d9bfac25ebf61445b793b7943
|
cpp/build-support/cpplint.py
|
python
|
ProcessConfigOverrides
|
(filename)
|
return True
|
Loads the configuration files and processes the config overrides.
Args:
filename: The name of the file being processed by the linter.
Returns:
False if the current |filename| should not be processed further.
|
Loads the configuration files and processes the config overrides.
|
[
"Loads",
"the",
"configuration",
"files",
"and",
"processes",
"the",
"config",
"overrides",
"."
] |
def ProcessConfigOverrides(filename):
""" Loads the configuration files and processes the config overrides.
Args:
filename: The name of the file being processed by the linter.
Returns:
False if the current |filename| should not be processed further.
"""
abs_filename = os.path.abspath(filename)
cfg_filters = []
keep_looking = True
while keep_looking:
abs_path, base_name = os.path.split(abs_filename)
if not base_name:
break # Reached the root directory.
cfg_file = os.path.join(abs_path, "CPPLINT.cfg")
abs_filename = abs_path
if not os.path.isfile(cfg_file):
continue
try:
with open(cfg_file) as file_handle:
for line in file_handle:
line, _, _ = line.partition('#') # Remove comments.
if not line.strip():
continue
name, _, val = line.partition('=')
name = name.strip()
val = val.strip()
if name == 'set noparent':
keep_looking = False
elif name == 'filter':
cfg_filters.append(val)
elif name == 'exclude_files':
# When matching exclude_files pattern, use the base_name of
# the current file name or the directory name we are processing.
# For example, if we are checking for lint errors in /foo/bar/baz.cc
# and we found the .cfg file at /foo/CPPLINT.cfg, then the config
# file's "exclude_files" filter is meant to be checked against "bar"
# and not "baz" nor "bar/baz.cc".
if base_name:
pattern = re.compile(val)
if pattern.match(base_name):
_cpplint_state.PrintInfo('Ignoring "%s": file excluded by '
'"%s". File path component "%s" matches pattern "%s"\n' %
(filename, cfg_file, base_name, val))
return False
elif name == 'linelength':
global _line_length
try:
_line_length = int(val)
except ValueError:
_cpplint_state.PrintError('Line length must be numeric.')
elif name == 'extensions':
global _valid_extensions
try:
extensions = [ext.strip() for ext in val.split(',')]
_valid_extensions = set(extensions)
except ValueError:
sys.stderr.write('Extensions should be a comma-separated list of values;'
'for example: extensions=hpp,cpp\n'
'This could not be parsed: "%s"' % (val,))
elif name == 'headers':
global _header_extensions
try:
extensions = [ext.strip() for ext in val.split(',')]
_header_extensions = set(extensions)
except ValueError:
sys.stderr.write('Extensions should be a comma-separated list of values;'
'for example: extensions=hpp,cpp\n'
'This could not be parsed: "%s"' % (val,))
elif name == 'root':
global _root
_root = val
else:
_cpplint_state.PrintError(
'Invalid configuration option (%s) in file %s\n' %
(name, cfg_file))
except IOError:
_cpplint_state.PrintError(
"Skipping config file '%s': Can't open for reading\n" % cfg_file)
keep_looking = False
# Apply all the accumulated filters in reverse order (top-level directory
# config options having the least priority).
for cfg_filter in reversed(cfg_filters):
_AddFilters(cfg_filter)
return True
|
[
"def",
"ProcessConfigOverrides",
"(",
"filename",
")",
":",
"abs_filename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
"cfg_filters",
"=",
"[",
"]",
"keep_looking",
"=",
"True",
"while",
"keep_looking",
":",
"abs_path",
",",
"base_name",
"=",
"os",
".",
"path",
".",
"split",
"(",
"abs_filename",
")",
"if",
"not",
"base_name",
":",
"break",
"# Reached the root directory.",
"cfg_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"abs_path",
",",
"\"CPPLINT.cfg\"",
")",
"abs_filename",
"=",
"abs_path",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"cfg_file",
")",
":",
"continue",
"try",
":",
"with",
"open",
"(",
"cfg_file",
")",
"as",
"file_handle",
":",
"for",
"line",
"in",
"file_handle",
":",
"line",
",",
"_",
",",
"_",
"=",
"line",
".",
"partition",
"(",
"'#'",
")",
"# Remove comments.",
"if",
"not",
"line",
".",
"strip",
"(",
")",
":",
"continue",
"name",
",",
"_",
",",
"val",
"=",
"line",
".",
"partition",
"(",
"'='",
")",
"name",
"=",
"name",
".",
"strip",
"(",
")",
"val",
"=",
"val",
".",
"strip",
"(",
")",
"if",
"name",
"==",
"'set noparent'",
":",
"keep_looking",
"=",
"False",
"elif",
"name",
"==",
"'filter'",
":",
"cfg_filters",
".",
"append",
"(",
"val",
")",
"elif",
"name",
"==",
"'exclude_files'",
":",
"# When matching exclude_files pattern, use the base_name of",
"# the current file name or the directory name we are processing.",
"# For example, if we are checking for lint errors in /foo/bar/baz.cc",
"# and we found the .cfg file at /foo/CPPLINT.cfg, then the config",
"# file's \"exclude_files\" filter is meant to be checked against \"bar\"",
"# and not \"baz\" nor \"bar/baz.cc\".",
"if",
"base_name",
":",
"pattern",
"=",
"re",
".",
"compile",
"(",
"val",
")",
"if",
"pattern",
".",
"match",
"(",
"base_name",
")",
":",
"_cpplint_state",
".",
"PrintInfo",
"(",
"'Ignoring \"%s\": file excluded by '",
"'\"%s\". File path component \"%s\" matches pattern \"%s\"\\n'",
"%",
"(",
"filename",
",",
"cfg_file",
",",
"base_name",
",",
"val",
")",
")",
"return",
"False",
"elif",
"name",
"==",
"'linelength'",
":",
"global",
"_line_length",
"try",
":",
"_line_length",
"=",
"int",
"(",
"val",
")",
"except",
"ValueError",
":",
"_cpplint_state",
".",
"PrintError",
"(",
"'Line length must be numeric.'",
")",
"elif",
"name",
"==",
"'extensions'",
":",
"global",
"_valid_extensions",
"try",
":",
"extensions",
"=",
"[",
"ext",
".",
"strip",
"(",
")",
"for",
"ext",
"in",
"val",
".",
"split",
"(",
"','",
")",
"]",
"_valid_extensions",
"=",
"set",
"(",
"extensions",
")",
"except",
"ValueError",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Extensions should be a comma-separated list of values;'",
"'for example: extensions=hpp,cpp\\n'",
"'This could not be parsed: \"%s\"'",
"%",
"(",
"val",
",",
")",
")",
"elif",
"name",
"==",
"'headers'",
":",
"global",
"_header_extensions",
"try",
":",
"extensions",
"=",
"[",
"ext",
".",
"strip",
"(",
")",
"for",
"ext",
"in",
"val",
".",
"split",
"(",
"','",
")",
"]",
"_header_extensions",
"=",
"set",
"(",
"extensions",
")",
"except",
"ValueError",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Extensions should be a comma-separated list of values;'",
"'for example: extensions=hpp,cpp\\n'",
"'This could not be parsed: \"%s\"'",
"%",
"(",
"val",
",",
")",
")",
"elif",
"name",
"==",
"'root'",
":",
"global",
"_root",
"_root",
"=",
"val",
"else",
":",
"_cpplint_state",
".",
"PrintError",
"(",
"'Invalid configuration option (%s) in file %s\\n'",
"%",
"(",
"name",
",",
"cfg_file",
")",
")",
"except",
"IOError",
":",
"_cpplint_state",
".",
"PrintError",
"(",
"\"Skipping config file '%s': Can't open for reading\\n\"",
"%",
"cfg_file",
")",
"keep_looking",
"=",
"False",
"# Apply all the accumulated filters in reverse order (top-level directory",
"# config options having the least priority).",
"for",
"cfg_filter",
"in",
"reversed",
"(",
"cfg_filters",
")",
":",
"_AddFilters",
"(",
"cfg_filter",
")",
"return",
"True"
] |
https://github.com/apache/arrow/blob/af33dd1157eb8d7d9bfac25ebf61445b793b7943/cpp/build-support/cpplint.py#L6107-L6200
|
|
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/dataset/audio/validators.py
|
python
|
check_time_stretch
|
(method)
|
return new_method
|
Wrapper method to check the parameters of TimeStretch.
|
Wrapper method to check the parameters of TimeStretch.
|
[
"Wrapper",
"method",
"to",
"check",
"the",
"parameters",
"of",
"TimeStretch",
"."
] |
def check_time_stretch(method):
"""Wrapper method to check the parameters of TimeStretch."""
@wraps(method)
def new_method(self, *args, **kwargs):
[hop_length, n_freq, fixed_rate], _ = parse_user_args(method, *args, **kwargs)
if hop_length is not None:
type_check(hop_length, (int,), "hop_length")
check_pos_int32(hop_length, "hop_length")
type_check(n_freq, (int,), "n_freq")
check_pos_int32(n_freq, "n_freq")
if fixed_rate is not None:
type_check(fixed_rate, (int, float), "fixed_rate")
check_pos_float32(fixed_rate, "fixed_rate")
return method(self, *args, **kwargs)
return new_method
|
[
"def",
"check_time_stretch",
"(",
"method",
")",
":",
"@",
"wraps",
"(",
"method",
")",
"def",
"new_method",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"[",
"hop_length",
",",
"n_freq",
",",
"fixed_rate",
"]",
",",
"_",
"=",
"parse_user_args",
"(",
"method",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"hop_length",
"is",
"not",
"None",
":",
"type_check",
"(",
"hop_length",
",",
"(",
"int",
",",
")",
",",
"\"hop_length\"",
")",
"check_pos_int32",
"(",
"hop_length",
",",
"\"hop_length\"",
")",
"type_check",
"(",
"n_freq",
",",
"(",
"int",
",",
")",
",",
"\"n_freq\"",
")",
"check_pos_int32",
"(",
"n_freq",
",",
"\"n_freq\"",
")",
"if",
"fixed_rate",
"is",
"not",
"None",
":",
"type_check",
"(",
"fixed_rate",
",",
"(",
"int",
",",
"float",
")",
",",
"\"fixed_rate\"",
")",
"check_pos_float32",
"(",
"fixed_rate",
",",
"\"fixed_rate\"",
")",
"return",
"method",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"new_method"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/dataset/audio/validators.py#L422-L441
|
|
mongodb/mongo
|
d8ff665343ad29cf286ee2cf4a1960d29371937b
|
src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ipkg.py
|
python
|
generate
|
(env)
|
Add Builders and construction variables for ipkg to an Environment.
|
Add Builders and construction variables for ipkg to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"ipkg",
"to",
"an",
"Environment",
"."
] |
def generate(env):
"""Add Builders and construction variables for ipkg to an Environment."""
try:
bld = env['BUILDERS']['Ipkg']
except KeyError:
bld = SCons.Builder.Builder(action='$IPKGCOM',
suffix='$IPKGSUFFIX',
source_scanner=None,
target_scanner=None)
env['BUILDERS']['Ipkg'] = bld
env['IPKG'] = 'ipkg-build'
env['IPKGCOM'] = '$IPKG $IPKGFLAGS ${SOURCE}'
if env.WhereIs('id'):
with os.popen('id -un') as p:
env['IPKGUSER'] = p.read().strip()
with os.popen('id -gn') as p:
env['IPKGGROUP'] = p.read().strip()
env['IPKGFLAGS'] = SCons.Util.CLVar('-o $IPKGUSER -g $IPKGGROUP')
env['IPKGSUFFIX'] = '.ipk'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"try",
":",
"bld",
"=",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'Ipkg'",
"]",
"except",
"KeyError",
":",
"bld",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"(",
"action",
"=",
"'$IPKGCOM'",
",",
"suffix",
"=",
"'$IPKGSUFFIX'",
",",
"source_scanner",
"=",
"None",
",",
"target_scanner",
"=",
"None",
")",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'Ipkg'",
"]",
"=",
"bld",
"env",
"[",
"'IPKG'",
"]",
"=",
"'ipkg-build'",
"env",
"[",
"'IPKGCOM'",
"]",
"=",
"'$IPKG $IPKGFLAGS ${SOURCE}'",
"if",
"env",
".",
"WhereIs",
"(",
"'id'",
")",
":",
"with",
"os",
".",
"popen",
"(",
"'id -un'",
")",
"as",
"p",
":",
"env",
"[",
"'IPKGUSER'",
"]",
"=",
"p",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
"with",
"os",
".",
"popen",
"(",
"'id -gn'",
")",
"as",
"p",
":",
"env",
"[",
"'IPKGGROUP'",
"]",
"=",
"p",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
"env",
"[",
"'IPKGFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'-o $IPKGUSER -g $IPKGGROUP'",
")",
"env",
"[",
"'IPKGSUFFIX'",
"]",
"=",
"'.ipk'"
] |
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Tool/ipkg.py#L42-L63
|
||
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
Framework/PythonInterface/plugins/algorithms/FindGoniometerFromUB.py
|
python
|
FindGoniometerFromUB.createGoniometerTable
|
(self)
|
return gonioTable
|
:return: Empty table workspace with columns Run, Chi, Phi and GonioAxis (unit vector)
|
:return: Empty table workspace with columns Run, Chi, Phi and GonioAxis (unit vector)
|
[
":",
"return",
":",
"Empty",
"table",
"workspace",
"with",
"columns",
"Run",
"Chi",
"Phi",
"and",
"GonioAxis",
"(",
"unit",
"vector",
")"
] |
def createGoniometerTable(self):
"""
:return: Empty table workspace with columns Run, Chi, Phi and GonioAxis (unit vector)
"""
gonioTable = CreateEmptyTableWorkspace(StoreInADS=False)
# Add some columns, Recognized types are: int,float,double,bool,str,V3D,long64
gonioTable.addColumn(type="str", name="Run")
gonioTable.addColumn(type="float", name="Chi")
gonioTable.addColumn(type="float", name="Phi")
gonioTable.addColumn(type="V3D", name="GonioAxis")
return gonioTable
|
[
"def",
"createGoniometerTable",
"(",
"self",
")",
":",
"gonioTable",
"=",
"CreateEmptyTableWorkspace",
"(",
"StoreInADS",
"=",
"False",
")",
"# Add some columns, Recognized types are: int,float,double,bool,str,V3D,long64",
"gonioTable",
".",
"addColumn",
"(",
"type",
"=",
"\"str\"",
",",
"name",
"=",
"\"Run\"",
")",
"gonioTable",
".",
"addColumn",
"(",
"type",
"=",
"\"float\"",
",",
"name",
"=",
"\"Chi\"",
")",
"gonioTable",
".",
"addColumn",
"(",
"type",
"=",
"\"float\"",
",",
"name",
"=",
"\"Phi\"",
")",
"gonioTable",
".",
"addColumn",
"(",
"type",
"=",
"\"V3D\"",
",",
"name",
"=",
"\"GonioAxis\"",
")",
"return",
"gonioTable"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/FindGoniometerFromUB.py#L261-L271
|
|
facebook/ThreatExchange
|
31914a51820c73c8a0daffe62ccca29a6e3d359e
|
python-threatexchange/threatexchange/content_type/meta.py
|
python
|
get_all_content_types
|
()
|
return [
text.TextContent,
video.VideoContent,
photo.PhotoContent,
pdf.PDFContent,
url.URL,
]
|
Returns all content_type implementations for commands
|
Returns all content_type implementations for commands
|
[
"Returns",
"all",
"content_type",
"implementations",
"for",
"commands"
] |
def get_all_content_types() -> t.List[t.Type[content_base.ContentType]]:
"""Returns all content_type implementations for commands"""
return [
text.TextContent,
video.VideoContent,
photo.PhotoContent,
pdf.PDFContent,
url.URL,
]
|
[
"def",
"get_all_content_types",
"(",
")",
"->",
"t",
".",
"List",
"[",
"t",
".",
"Type",
"[",
"content_base",
".",
"ContentType",
"]",
"]",
":",
"return",
"[",
"text",
".",
"TextContent",
",",
"video",
".",
"VideoContent",
",",
"photo",
".",
"PhotoContent",
",",
"pdf",
".",
"PDFContent",
",",
"url",
".",
"URL",
",",
"]"
] |
https://github.com/facebook/ThreatExchange/blob/31914a51820c73c8a0daffe62ccca29a6e3d359e/python-threatexchange/threatexchange/content_type/meta.py#L16-L24
|
|
yrnkrn/zapcc
|
c6a8aa30006d997eff0d60fd37b0e62b8aa0ea50
|
tools/clang/bindings/python/clang/cindex.py
|
python
|
Cursor.storage_class
|
(self)
|
return StorageClass.from_id(self._storage_class)
|
Retrieves the storage class (if any) of the entity pointed at by the
cursor.
|
Retrieves the storage class (if any) of the entity pointed at by the
cursor.
|
[
"Retrieves",
"the",
"storage",
"class",
"(",
"if",
"any",
")",
"of",
"the",
"entity",
"pointed",
"at",
"by",
"the",
"cursor",
"."
] |
def storage_class(self):
"""
Retrieves the storage class (if any) of the entity pointed at by the
cursor.
"""
if not hasattr(self, '_storage_class'):
self._storage_class = conf.lib.clang_Cursor_getStorageClass(self)
return StorageClass.from_id(self._storage_class)
|
[
"def",
"storage_class",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_storage_class'",
")",
":",
"self",
".",
"_storage_class",
"=",
"conf",
".",
"lib",
".",
"clang_Cursor_getStorageClass",
"(",
"self",
")",
"return",
"StorageClass",
".",
"from_id",
"(",
"self",
".",
"_storage_class",
")"
] |
https://github.com/yrnkrn/zapcc/blob/c6a8aa30006d997eff0d60fd37b0e62b8aa0ea50/tools/clang/bindings/python/clang/cindex.py#L1588-L1596
|
|
ltcmelo/psychec
|
46672204681d73b40772a7bf24137dca23175e81
|
cnippet/wrapper/Python/PsycheCFacade.py
|
python
|
PsycheCFacade.solve_constraints
|
(self, unit: Unit)
|
Solve the constraint.
|
Solve the constraint.
|
[
"Solve",
"the",
"constraint",
"."
] |
def solve_constraints(self, unit: Unit):
"""
Solve the constraint.
"""
cmd = [PsycheCFacade._SOLVER,
'--',
'-i',
unit.cstr_file,
'-o',
unit.cnip_file]
if not self.no_stdlib:
cmd.append('--match-stdlib=approx')
ok = execute(PsycheCFacade.ID(), cmd)
if ok != 0:
sys.exit(
DiagnosticReporter.fatal(CONSTRAINT_SOLVING_FOR_FILE_FAILED,
unit.c_file))
|
[
"def",
"solve_constraints",
"(",
"self",
",",
"unit",
":",
"Unit",
")",
":",
"cmd",
"=",
"[",
"PsycheCFacade",
".",
"_SOLVER",
",",
"'--'",
",",
"'-i'",
",",
"unit",
".",
"cstr_file",
",",
"'-o'",
",",
"unit",
".",
"cnip_file",
"]",
"if",
"not",
"self",
".",
"no_stdlib",
":",
"cmd",
".",
"append",
"(",
"'--match-stdlib=approx'",
")",
"ok",
"=",
"execute",
"(",
"PsycheCFacade",
".",
"ID",
"(",
")",
",",
"cmd",
")",
"if",
"ok",
"!=",
"0",
":",
"sys",
".",
"exit",
"(",
"DiagnosticReporter",
".",
"fatal",
"(",
"CONSTRAINT_SOLVING_FOR_FILE_FAILED",
",",
"unit",
".",
"c_file",
")",
")"
] |
https://github.com/ltcmelo/psychec/blob/46672204681d73b40772a7bf24137dca23175e81/cnippet/wrapper/Python/PsycheCFacade.py#L78-L97
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_controls.py
|
python
|
SpinCtrlDouble.__init__
|
(self, *args, **kwargs)
|
__init__(self, Window parent, int id=ID_ANY, String value=wxEmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
long style=wxSP_ARROW_KEYS|wxALIGN_RIGHT,
double min=0, double max=100, double initial=0,
double inc=1, String name="wxSpinCtrlDouble") -> SpinCtrlDouble
|
__init__(self, Window parent, int id=ID_ANY, String value=wxEmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
long style=wxSP_ARROW_KEYS|wxALIGN_RIGHT,
double min=0, double max=100, double initial=0,
double inc=1, String name="wxSpinCtrlDouble") -> SpinCtrlDouble
|
[
"__init__",
"(",
"self",
"Window",
"parent",
"int",
"id",
"=",
"ID_ANY",
"String",
"value",
"=",
"wxEmptyString",
"Point",
"pos",
"=",
"DefaultPosition",
"Size",
"size",
"=",
"DefaultSize",
"long",
"style",
"=",
"wxSP_ARROW_KEYS|wxALIGN_RIGHT",
"double",
"min",
"=",
"0",
"double",
"max",
"=",
"100",
"double",
"initial",
"=",
"0",
"double",
"inc",
"=",
"1",
"String",
"name",
"=",
"wxSpinCtrlDouble",
")",
"-",
">",
"SpinCtrlDouble"
] |
def __init__(self, *args, **kwargs):
"""
__init__(self, Window parent, int id=ID_ANY, String value=wxEmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
long style=wxSP_ARROW_KEYS|wxALIGN_RIGHT,
double min=0, double max=100, double initial=0,
double inc=1, String name="wxSpinCtrlDouble") -> SpinCtrlDouble
"""
_controls_.SpinCtrlDouble_swiginit(self,_controls_.new_SpinCtrlDouble(*args, **kwargs))
self._setOORInfo(self)
|
[
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_controls_",
".",
"SpinCtrlDouble_swiginit",
"(",
"self",
",",
"_controls_",
".",
"new_SpinCtrlDouble",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"self",
".",
"_setOORInfo",
"(",
"self",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L2479-L2488
|
||
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/dataset/vision/validators.py
|
python
|
check_cutout
|
(method)
|
return new_method
|
Wrapper method to check the parameters of cutout operation.
|
Wrapper method to check the parameters of cutout operation.
|
[
"Wrapper",
"method",
"to",
"check",
"the",
"parameters",
"of",
"cutout",
"operation",
"."
] |
def check_cutout(method):
"""Wrapper method to check the parameters of cutout operation."""
@wraps(method)
def new_method(self, *args, **kwargs):
[length, num_patches], _ = parse_user_args(method, *args, **kwargs)
type_check(length, (int,), "length")
type_check(num_patches, (int,), "num_patches")
check_value(length, (1, FLOAT_MAX_INTEGER))
check_value(num_patches, (1, FLOAT_MAX_INTEGER))
return method(self, *args, **kwargs)
return new_method
|
[
"def",
"check_cutout",
"(",
"method",
")",
":",
"@",
"wraps",
"(",
"method",
")",
"def",
"new_method",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"[",
"length",
",",
"num_patches",
"]",
",",
"_",
"=",
"parse_user_args",
"(",
"method",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"type_check",
"(",
"length",
",",
"(",
"int",
",",
")",
",",
"\"length\"",
")",
"type_check",
"(",
"num_patches",
",",
"(",
"int",
",",
")",
",",
"\"num_patches\"",
")",
"check_value",
"(",
"length",
",",
"(",
"1",
",",
"FLOAT_MAX_INTEGER",
")",
")",
"check_value",
"(",
"num_patches",
",",
"(",
"1",
",",
"FLOAT_MAX_INTEGER",
")",
")",
"return",
"method",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"new_method"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/dataset/vision/validators.py#L698-L711
|
|
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/communication/management.py
|
python
|
create_group
|
(group, rank_ids)
|
Create a user collective communication group.
Note:
GPU version of MindSpore doesn't support this method.
The size of rank_ids should be larger than 1, rank_ids should not have duplicate data.
This method should be used after init().
Only support global single communication group in PyNative mode.
The user needs to preset communication environment variables before running the following example, please see
the docstring of the mindspore.managerment.
Args:
group (str): The name of the communication group to be created.
rank_ids (list): A list of device IDs.
Raises:
TypeError: If group is not a string or `rank_ids` is not a list.
ValueError: If `rank_ids` size is not larger than 1, or `rank_ids` has duplicate data, or backend is invalid.
RuntimeError: If HCCL is not available or MindSpore is GPU version.
Examples:
>>> from mindspore.context import set_context
>>> from mindspore.ops import operations as ops
>>> from mindspore.communication.management import init, create_group
>>> set_context(device_target="Ascend")
>>> init()
>>> group = "0-8"
>>> rank_ids = [0,8]
>>> create_group(group, rank_ids)
>>> allreduce = ops.AllReduce(group)
|
Create a user collective communication group.
|
[
"Create",
"a",
"user",
"collective",
"communication",
"group",
"."
] |
def create_group(group, rank_ids):
"""
Create a user collective communication group.
Note:
GPU version of MindSpore doesn't support this method.
The size of rank_ids should be larger than 1, rank_ids should not have duplicate data.
This method should be used after init().
Only support global single communication group in PyNative mode.
The user needs to preset communication environment variables before running the following example, please see
the docstring of the mindspore.managerment.
Args:
group (str): The name of the communication group to be created.
rank_ids (list): A list of device IDs.
Raises:
TypeError: If group is not a string or `rank_ids` is not a list.
ValueError: If `rank_ids` size is not larger than 1, or `rank_ids` has duplicate data, or backend is invalid.
RuntimeError: If HCCL is not available or MindSpore is GPU version.
Examples:
>>> from mindspore.context import set_context
>>> from mindspore.ops import operations as ops
>>> from mindspore.communication.management import init, create_group
>>> set_context(device_target="Ascend")
>>> init()
>>> group = "0-8"
>>> rank_ids = [0,8]
>>> create_group(group, rank_ids)
>>> allreduce = ops.AllReduce(group)
"""
if not isinstance(group, str):
raise TypeError("For 'create_group', the argument 'group' must be type of string, "
"but got 'group' type : {}.".format(type(group)))
_create_group_helper(group, rank_ids, backend=GlobalComm.BACKEND)
|
[
"def",
"create_group",
"(",
"group",
",",
"rank_ids",
")",
":",
"if",
"not",
"isinstance",
"(",
"group",
",",
"str",
")",
":",
"raise",
"TypeError",
"(",
"\"For 'create_group', the argument 'group' must be type of string, \"",
"\"but got 'group' type : {}.\"",
".",
"format",
"(",
"type",
"(",
"group",
")",
")",
")",
"_create_group_helper",
"(",
"group",
",",
"rank_ids",
",",
"backend",
"=",
"GlobalComm",
".",
"BACKEND",
")"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/communication/management.py#L395-L430
|
||
FreeCAD/FreeCAD
|
ba42231b9c6889b89e064d6d563448ed81e376ec
|
src/Mod/Arch/ArchWall.py
|
python
|
_CommandMergeWalls.IsActive
|
(self)
|
return bool(FreeCADGui.Selection.getSelection())
|
Determines whether or not the Arch MergeWalls tool is active.
Inactive commands are indicated by a greyed-out icon in the menus and
toolbars.
|
Determines whether or not the Arch MergeWalls tool is active.
|
[
"Determines",
"whether",
"or",
"not",
"the",
"Arch",
"MergeWalls",
"tool",
"is",
"active",
"."
] |
def IsActive(self):
"""Determines whether or not the Arch MergeWalls tool is active.
Inactive commands are indicated by a greyed-out icon in the menus and
toolbars.
"""
return bool(FreeCADGui.Selection.getSelection())
|
[
"def",
"IsActive",
"(",
"self",
")",
":",
"return",
"bool",
"(",
"FreeCADGui",
".",
"Selection",
".",
"getSelection",
"(",
")",
")"
] |
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Arch/ArchWall.py#L634-L641
|
|
Slicer/SlicerGitSVNArchive
|
65e92bb16c2b32ea47a1a66bee71f238891ee1ca
|
Modules/Scripted/DICOMLib/DICOMUtils.py
|
python
|
loadSeriesWithVerification
|
(seriesUIDs, expectedSelectedPlugins=None, expectedLoadedNodes=None)
|
return success
|
Load series by UID, and verify loadable selection and loaded nodes.
``selectedPlugins`` example: { 'Scalar Volume':1, 'RT':2 }
``expectedLoadedNodes`` example: { 'vtkMRMLScalarVolumeNode':2, 'vtkMRMLSegmentationNode':1 }
|
Load series by UID, and verify loadable selection and loaded nodes.
|
[
"Load",
"series",
"by",
"UID",
"and",
"verify",
"loadable",
"selection",
"and",
"loaded",
"nodes",
"."
] |
def loadSeriesWithVerification(seriesUIDs, expectedSelectedPlugins=None, expectedLoadedNodes=None):
""" Load series by UID, and verify loadable selection and loaded nodes.
``selectedPlugins`` example: { 'Scalar Volume':1, 'RT':2 }
``expectedLoadedNodes`` example: { 'vtkMRMLScalarVolumeNode':2, 'vtkMRMLSegmentationNode':1 }
"""
if not slicer.dicomDatabase.isOpen:
logging.error('DICOM module or database cannot be accessed')
return False
if seriesUIDs is None or len(seriesUIDs) == 0:
logging.error('No series UIDs given')
return False
fileLists = []
for seriesUID in seriesUIDs:
fileLists.append(slicer.dicomDatabase.filesForSeries(seriesUID))
if len(fileLists) == 0:
logging.error('No files found for DICOM series list')
return False
loadablesByPlugin, loadEnabled = getLoadablesFromFileLists(fileLists)
success = True
# Verify loadables if baseline is given
if expectedSelectedPlugins is not None and len(expectedSelectedPlugins.keys()) > 0:
actualSelectedPlugins = {}
for plugin in loadablesByPlugin:
for loadable in loadablesByPlugin[plugin]:
if loadable.selected:
if plugin.loadType in actualSelectedPlugins:
count = int(actualSelectedPlugins[plugin.loadType])
actualSelectedPlugins[plugin.loadType] = count+1
else:
actualSelectedPlugins[plugin.loadType] = 1
for pluginName in expectedSelectedPlugins.keys():
if pluginName not in actualSelectedPlugins:
logging.error("Expected DICOM plugin '%s' was not selected" % (pluginName))
success = False
elif actualSelectedPlugins[pluginName] != expectedSelectedPlugins[pluginName]:
logging.error("DICOM plugin '%s' was expected to be selected in %d loadables, but was selected in %d" % \
(pluginName, expectedSelectedPlugins[pluginName], actualSelectedPlugins[pluginName]))
success = False
# Count relevant node types in scene
actualLoadedNodes = {}
if expectedLoadedNodes is not None:
for nodeType in expectedLoadedNodes.keys():
nodeCollection = slicer.mrmlScene.GetNodesByClass(nodeType)
nodeCollection.UnRegister(None)
actualLoadedNodes[nodeType] = nodeCollection.GetNumberOfItems()
# Load selected data
loadedNodeIDs = loadLoadables(loadablesByPlugin)
if expectedLoadedNodes is not None:
for nodeType in expectedLoadedNodes.keys():
nodeCollection = slicer.mrmlScene.GetNodesByClass(nodeType)
nodeCollection.UnRegister(None)
numOfLoadedNodes = nodeCollection.GetNumberOfItems()-actualLoadedNodes[nodeType]
if numOfLoadedNodes != expectedLoadedNodes[nodeType]:
logging.error("Number of loaded %s nodes was %d, but %d was expected" % \
(nodeType, numOfLoadedNodes, expectedLoadedNodes[nodeType]) )
success = False
return success
|
[
"def",
"loadSeriesWithVerification",
"(",
"seriesUIDs",
",",
"expectedSelectedPlugins",
"=",
"None",
",",
"expectedLoadedNodes",
"=",
"None",
")",
":",
"if",
"not",
"slicer",
".",
"dicomDatabase",
".",
"isOpen",
":",
"logging",
".",
"error",
"(",
"'DICOM module or database cannot be accessed'",
")",
"return",
"False",
"if",
"seriesUIDs",
"is",
"None",
"or",
"len",
"(",
"seriesUIDs",
")",
"==",
"0",
":",
"logging",
".",
"error",
"(",
"'No series UIDs given'",
")",
"return",
"False",
"fileLists",
"=",
"[",
"]",
"for",
"seriesUID",
"in",
"seriesUIDs",
":",
"fileLists",
".",
"append",
"(",
"slicer",
".",
"dicomDatabase",
".",
"filesForSeries",
"(",
"seriesUID",
")",
")",
"if",
"len",
"(",
"fileLists",
")",
"==",
"0",
":",
"logging",
".",
"error",
"(",
"'No files found for DICOM series list'",
")",
"return",
"False",
"loadablesByPlugin",
",",
"loadEnabled",
"=",
"getLoadablesFromFileLists",
"(",
"fileLists",
")",
"success",
"=",
"True",
"# Verify loadables if baseline is given",
"if",
"expectedSelectedPlugins",
"is",
"not",
"None",
"and",
"len",
"(",
"expectedSelectedPlugins",
".",
"keys",
"(",
")",
")",
">",
"0",
":",
"actualSelectedPlugins",
"=",
"{",
"}",
"for",
"plugin",
"in",
"loadablesByPlugin",
":",
"for",
"loadable",
"in",
"loadablesByPlugin",
"[",
"plugin",
"]",
":",
"if",
"loadable",
".",
"selected",
":",
"if",
"plugin",
".",
"loadType",
"in",
"actualSelectedPlugins",
":",
"count",
"=",
"int",
"(",
"actualSelectedPlugins",
"[",
"plugin",
".",
"loadType",
"]",
")",
"actualSelectedPlugins",
"[",
"plugin",
".",
"loadType",
"]",
"=",
"count",
"+",
"1",
"else",
":",
"actualSelectedPlugins",
"[",
"plugin",
".",
"loadType",
"]",
"=",
"1",
"for",
"pluginName",
"in",
"expectedSelectedPlugins",
".",
"keys",
"(",
")",
":",
"if",
"pluginName",
"not",
"in",
"actualSelectedPlugins",
":",
"logging",
".",
"error",
"(",
"\"Expected DICOM plugin '%s' was not selected\"",
"%",
"(",
"pluginName",
")",
")",
"success",
"=",
"False",
"elif",
"actualSelectedPlugins",
"[",
"pluginName",
"]",
"!=",
"expectedSelectedPlugins",
"[",
"pluginName",
"]",
":",
"logging",
".",
"error",
"(",
"\"DICOM plugin '%s' was expected to be selected in %d loadables, but was selected in %d\"",
"%",
"(",
"pluginName",
",",
"expectedSelectedPlugins",
"[",
"pluginName",
"]",
",",
"actualSelectedPlugins",
"[",
"pluginName",
"]",
")",
")",
"success",
"=",
"False",
"# Count relevant node types in scene",
"actualLoadedNodes",
"=",
"{",
"}",
"if",
"expectedLoadedNodes",
"is",
"not",
"None",
":",
"for",
"nodeType",
"in",
"expectedLoadedNodes",
".",
"keys",
"(",
")",
":",
"nodeCollection",
"=",
"slicer",
".",
"mrmlScene",
".",
"GetNodesByClass",
"(",
"nodeType",
")",
"nodeCollection",
".",
"UnRegister",
"(",
"None",
")",
"actualLoadedNodes",
"[",
"nodeType",
"]",
"=",
"nodeCollection",
".",
"GetNumberOfItems",
"(",
")",
"# Load selected data",
"loadedNodeIDs",
"=",
"loadLoadables",
"(",
"loadablesByPlugin",
")",
"if",
"expectedLoadedNodes",
"is",
"not",
"None",
":",
"for",
"nodeType",
"in",
"expectedLoadedNodes",
".",
"keys",
"(",
")",
":",
"nodeCollection",
"=",
"slicer",
".",
"mrmlScene",
".",
"GetNodesByClass",
"(",
"nodeType",
")",
"nodeCollection",
".",
"UnRegister",
"(",
"None",
")",
"numOfLoadedNodes",
"=",
"nodeCollection",
".",
"GetNumberOfItems",
"(",
")",
"-",
"actualLoadedNodes",
"[",
"nodeType",
"]",
"if",
"numOfLoadedNodes",
"!=",
"expectedLoadedNodes",
"[",
"nodeType",
"]",
":",
"logging",
".",
"error",
"(",
"\"Number of loaded %s nodes was %d, but %d was expected\"",
"%",
"(",
"nodeType",
",",
"numOfLoadedNodes",
",",
"expectedLoadedNodes",
"[",
"nodeType",
"]",
")",
")",
"success",
"=",
"False",
"return",
"success"
] |
https://github.com/Slicer/SlicerGitSVNArchive/blob/65e92bb16c2b32ea47a1a66bee71f238891ee1ca/Modules/Scripted/DICOMLib/DICOMUtils.py#L330-L395
|
|
xlgames-inc/XLE
|
cdd8682367d9e9fdbdda9f79d72bb5b1499cec46
|
Foreign/FreeType/src/tools/docmaker/sources.py
|
python
|
SourceProcessor.reset
|
( self )
|
Reset a block processor and clean up all its blocks.
|
Reset a block processor and clean up all its blocks.
|
[
"Reset",
"a",
"block",
"processor",
"and",
"clean",
"up",
"all",
"its",
"blocks",
"."
] |
def reset( self ):
"""Reset a block processor and clean up all its blocks."""
self.blocks = []
self.format = None
|
[
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"blocks",
"=",
"[",
"]",
"self",
".",
"format",
"=",
"None"
] |
https://github.com/xlgames-inc/XLE/blob/cdd8682367d9e9fdbdda9f79d72bb5b1499cec46/Foreign/FreeType/src/tools/docmaker/sources.py#L324-L327
|
||
mitmedialab/Junkyard-Jumbotron
|
7e32ecc8a01ea5a578fea6ea54f1f44c7f8f546e
|
python/artoolkit.py
|
python
|
ARMarkerInfo.init
|
(self, xform)
|
Initialize with the given xform, calculating python-only variables
|
Initialize with the given xform, calculating python-only variables
|
[
"Initialize",
"with",
"the",
"given",
"xform",
"calculating",
"python",
"-",
"only",
"variables"
] |
def init(self, xform):
"""Initialize with the given xform, calculating python-only variables"""
self.xform = xform
corners = (Vec3(-0.5, -0.5),
Vec3(-0.5, 0.5),
Vec3( 0.5, 0.5),
Vec3( 0.5, -0.5))
self.world_vertices = [xform.transform(c) for c in corners]
self.world_normal = xform.transform_vector(-Vec3.z_axis()).normalize()
self.world_center = xform.transform(Vec3.origin())
# Point upwards by rotating up vector by -90, -180, or -270 degrees.
# Direction is the rotation angle (in units of 90-degrees).
# 0=up, 1=left, 2=down, 3=right
# In image space, up is negative.
up = xform.transform_vector(-Vec3.y_axis()).normalized()
direc = 0
if abs(up.x) > abs(up.y):
rot = Mat4.new_rotate_axis(self.world_normal, math.pi*0.5)
up = rot.transform(up)
direc = 1
if up.y < 0:
rot = Mat4.new_rotate_axis(self.world_normal, math.pi)
up = rot.transform(up)
direc += 2
self.world_up = up
self.world_direction = direc
|
[
"def",
"init",
"(",
"self",
",",
"xform",
")",
":",
"self",
".",
"xform",
"=",
"xform",
"corners",
"=",
"(",
"Vec3",
"(",
"-",
"0.5",
",",
"-",
"0.5",
")",
",",
"Vec3",
"(",
"-",
"0.5",
",",
"0.5",
")",
",",
"Vec3",
"(",
"0.5",
",",
"0.5",
")",
",",
"Vec3",
"(",
"0.5",
",",
"-",
"0.5",
")",
")",
"self",
".",
"world_vertices",
"=",
"[",
"xform",
".",
"transform",
"(",
"c",
")",
"for",
"c",
"in",
"corners",
"]",
"self",
".",
"world_normal",
"=",
"xform",
".",
"transform_vector",
"(",
"-",
"Vec3",
".",
"z_axis",
"(",
")",
")",
".",
"normalize",
"(",
")",
"self",
".",
"world_center",
"=",
"xform",
".",
"transform",
"(",
"Vec3",
".",
"origin",
"(",
")",
")",
"# Point upwards by rotating up vector by -90, -180, or -270 degrees.",
"# Direction is the rotation angle (in units of 90-degrees).",
"# 0=up, 1=left, 2=down, 3=right",
"# In image space, up is negative.",
"up",
"=",
"xform",
".",
"transform_vector",
"(",
"-",
"Vec3",
".",
"y_axis",
"(",
")",
")",
".",
"normalized",
"(",
")",
"direc",
"=",
"0",
"if",
"abs",
"(",
"up",
".",
"x",
")",
">",
"abs",
"(",
"up",
".",
"y",
")",
":",
"rot",
"=",
"Mat4",
".",
"new_rotate_axis",
"(",
"self",
".",
"world_normal",
",",
"math",
".",
"pi",
"*",
"0.5",
")",
"up",
"=",
"rot",
".",
"transform",
"(",
"up",
")",
"direc",
"=",
"1",
"if",
"up",
".",
"y",
"<",
"0",
":",
"rot",
"=",
"Mat4",
".",
"new_rotate_axis",
"(",
"self",
".",
"world_normal",
",",
"math",
".",
"pi",
")",
"up",
"=",
"rot",
".",
"transform",
"(",
"up",
")",
"direc",
"+=",
"2",
"self",
".",
"world_up",
"=",
"up",
"self",
".",
"world_direction",
"=",
"direc"
] |
https://github.com/mitmedialab/Junkyard-Jumbotron/blob/7e32ecc8a01ea5a578fea6ea54f1f44c7f8f546e/python/artoolkit.py#L35-L62
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py
|
python
|
Checkbutton.invoke
|
(self)
|
return self.tk.call(self._w, 'invoke')
|
Toggle the button and invoke a command if given as resource.
|
Toggle the button and invoke a command if given as resource.
|
[
"Toggle",
"the",
"button",
"and",
"invoke",
"a",
"command",
"if",
"given",
"as",
"resource",
"."
] |
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
|
[
"def",
"invoke",
"(",
"self",
")",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'invoke'",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py#L2424-L2426
|
|
panda3d/panda3d
|
833ad89ebad58395d0af0b7ec08538e5e4308265
|
direct/src/showbase/ShowBase.py
|
python
|
ShowBase.setupRender
|
(self)
|
Creates the render scene graph, the primary scene graph for
rendering 3-d geometry.
|
Creates the render scene graph, the primary scene graph for
rendering 3-d geometry.
|
[
"Creates",
"the",
"render",
"scene",
"graph",
"the",
"primary",
"scene",
"graph",
"for",
"rendering",
"3",
"-",
"d",
"geometry",
"."
] |
def setupRender(self):
"""
Creates the render scene graph, the primary scene graph for
rendering 3-d geometry.
"""
## This is the root of the 3-D scene graph.
self.render = NodePath('render')
self.render.setAttrib(RescaleNormalAttrib.makeDefault())
self.render.setTwoSided(0)
self.backfaceCullingEnabled = 1
self.textureEnabled = 1
self.wireframeEnabled = 0
|
[
"def",
"setupRender",
"(",
"self",
")",
":",
"## This is the root of the 3-D scene graph.",
"self",
".",
"render",
"=",
"NodePath",
"(",
"'render'",
")",
"self",
".",
"render",
".",
"setAttrib",
"(",
"RescaleNormalAttrib",
".",
"makeDefault",
"(",
")",
")",
"self",
".",
"render",
".",
"setTwoSided",
"(",
"0",
")",
"self",
".",
"backfaceCullingEnabled",
"=",
"1",
"self",
".",
"textureEnabled",
"=",
"1",
"self",
".",
"wireframeEnabled",
"=",
"0"
] |
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/showbase/ShowBase.py#L1149-L1161
|
||
cms-sw/cmssw
|
fd9de012d503d3405420bcbeec0ec879baa57cf2
|
Alignment/MuonAlignment/python/svgfig.py
|
python
|
Text.SVG
|
(self, trans=None)
|
return SVG("text", self.d, x=X, y=Y, **self.attr)
|
Apply the transformation "trans" and return an SVG object.
|
Apply the transformation "trans" and return an SVG object.
|
[
"Apply",
"the",
"transformation",
"trans",
"and",
"return",
"an",
"SVG",
"object",
"."
] |
def SVG(self, trans=None):
"""Apply the transformation "trans" and return an SVG object."""
if isinstance(trans, str): trans = totrans(trans)
X, Y = self.x, self.y
if trans != None: X, Y = trans(X, Y)
return SVG("text", self.d, x=X, y=Y, **self.attr)
|
[
"def",
"SVG",
"(",
"self",
",",
"trans",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"trans",
",",
"str",
")",
":",
"trans",
"=",
"totrans",
"(",
"trans",
")",
"X",
",",
"Y",
"=",
"self",
".",
"x",
",",
"self",
".",
"y",
"if",
"trans",
"!=",
"None",
":",
"X",
",",
"Y",
"=",
"trans",
"(",
"X",
",",
"Y",
")",
"return",
"SVG",
"(",
"\"text\"",
",",
"self",
".",
"d",
",",
"x",
"=",
"X",
",",
"y",
"=",
"Y",
",",
"*",
"*",
"self",
".",
"attr",
")"
] |
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/MuonAlignment/python/svgfig.py#L1855-L1861
|
|
cms-sw/cmssw
|
fd9de012d503d3405420bcbeec0ec879baa57cf2
|
Alignment/MillePedeAlignmentAlgorithm/scripts/mps_setup_new_align.py
|
python
|
copy_default_templates
|
(args, next_campaign)
|
Copies the default configuration templates.
Arguments:
- `args`: container with the needed information
- `next_campaign`: destination for the copy operation
|
Copies the default configuration templates.
|
[
"Copies",
"the",
"default",
"configuration",
"templates",
"."
] |
def copy_default_templates(args, next_campaign):
"""Copies the default configuration templates.
Arguments:
- `args`: container with the needed information
- `next_campaign`: destination for the copy operation
"""
default_conf_dir = os.path.join(args.MPS_dir, "templates")
template_files = ("universalConfigTemplate.py", "alignment_config.ini")
for f in template_files:
shutil.copy(os.path.join(default_conf_dir, f), next_campaign)
# customize alignment_config.ini
# - replace job name with campaign ID as initial value
# - replace global tag with the corresponding auto GT depending on data type
auto_gt = args.type.replace("MC", "phase1_2017_realistic")
auto_gt = auto_gt.replace("data", "run2_data")
customize_default_template(os.path.join(next_campaign, "alignment_config.ini"),
(r"(jobname\s*[=:])(.*)", r"\1 "+next_campaign),
(r"(globaltag\s*[=:])(.*)",
r"\1 auto:"+auto_gt))
print(" - copied default configuration templates from", end=' ')
print("'"+default_conf_dir+"'")
print(" - please modify these template files according to your needs:", end=' ')
print(", ".join(template_files))
|
[
"def",
"copy_default_templates",
"(",
"args",
",",
"next_campaign",
")",
":",
"default_conf_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"args",
".",
"MPS_dir",
",",
"\"templates\"",
")",
"template_files",
"=",
"(",
"\"universalConfigTemplate.py\"",
",",
"\"alignment_config.ini\"",
")",
"for",
"f",
"in",
"template_files",
":",
"shutil",
".",
"copy",
"(",
"os",
".",
"path",
".",
"join",
"(",
"default_conf_dir",
",",
"f",
")",
",",
"next_campaign",
")",
"# customize alignment_config.ini",
"# - replace job name with campaign ID as initial value",
"# - replace global tag with the corresponding auto GT depending on data type",
"auto_gt",
"=",
"args",
".",
"type",
".",
"replace",
"(",
"\"MC\"",
",",
"\"phase1_2017_realistic\"",
")",
"auto_gt",
"=",
"auto_gt",
".",
"replace",
"(",
"\"data\"",
",",
"\"run2_data\"",
")",
"customize_default_template",
"(",
"os",
".",
"path",
".",
"join",
"(",
"next_campaign",
",",
"\"alignment_config.ini\"",
")",
",",
"(",
"r\"(jobname\\s*[=:])(.*)\"",
",",
"r\"\\1 \"",
"+",
"next_campaign",
")",
",",
"(",
"r\"(globaltag\\s*[=:])(.*)\"",
",",
"r\"\\1 auto:\"",
"+",
"auto_gt",
")",
")",
"print",
"(",
"\" - copied default configuration templates from\"",
",",
"end",
"=",
"' '",
")",
"print",
"(",
"\"'\"",
"+",
"default_conf_dir",
"+",
"\"'\"",
")",
"print",
"(",
"\" - please modify these template files according to your needs:\"",
",",
"end",
"=",
"' '",
")",
"print",
"(",
"\", \"",
".",
"join",
"(",
"template_files",
")",
")"
] |
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/MillePedeAlignmentAlgorithm/scripts/mps_setup_new_align.py#L198-L224
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/prompt-toolkit/py3/prompt_toolkit/filters/app.py
|
python
|
is_multiline
|
()
|
return get_app().current_buffer.multiline()
|
True when the current buffer has been marked as multiline.
|
True when the current buffer has been marked as multiline.
|
[
"True",
"when",
"the",
"current",
"buffer",
"has",
"been",
"marked",
"as",
"multiline",
"."
] |
def is_multiline() -> bool:
"""
True when the current buffer has been marked as multiline.
"""
return get_app().current_buffer.multiline()
|
[
"def",
"is_multiline",
"(",
")",
"->",
"bool",
":",
"return",
"get_app",
"(",
")",
".",
"current_buffer",
".",
"multiline",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py3/prompt_toolkit/filters/app.py#L144-L148
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/devil/devil/android/device_utils.py
|
python
|
DeviceUtils.DumpCacheData
|
(self, timeout=None, retries=None)
|
return json.dumps(obj, separators=(',', ':'))
|
Dumps the current cache state to a string.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
A serialized cache as a string.
|
Dumps the current cache state to a string.
|
[
"Dumps",
"the",
"current",
"cache",
"state",
"to",
"a",
"string",
"."
] |
def DumpCacheData(self, timeout=None, retries=None):
"""Dumps the current cache state to a string.
Args:
timeout: timeout in seconds
retries: number of retries
Returns:
A serialized cache as a string.
"""
self._EnsureCacheInitialized()
obj = {}
obj['token'] = self._cache['token']
obj['package_apk_paths'] = self._cache['package_apk_paths']
obj['package_apk_checksums'] = self._cache['package_apk_checksums']
# JSON can't handle sets.
for k, v in obj['package_apk_checksums'].iteritems():
obj['package_apk_checksums'][k] = list(v)
obj['device_path_checksums'] = self._cache['device_path_checksums']
return json.dumps(obj, separators=(',', ':'))
|
[
"def",
"DumpCacheData",
"(",
"self",
",",
"timeout",
"=",
"None",
",",
"retries",
"=",
"None",
")",
":",
"self",
".",
"_EnsureCacheInitialized",
"(",
")",
"obj",
"=",
"{",
"}",
"obj",
"[",
"'token'",
"]",
"=",
"self",
".",
"_cache",
"[",
"'token'",
"]",
"obj",
"[",
"'package_apk_paths'",
"]",
"=",
"self",
".",
"_cache",
"[",
"'package_apk_paths'",
"]",
"obj",
"[",
"'package_apk_checksums'",
"]",
"=",
"self",
".",
"_cache",
"[",
"'package_apk_checksums'",
"]",
"# JSON can't handle sets.",
"for",
"k",
",",
"v",
"in",
"obj",
"[",
"'package_apk_checksums'",
"]",
".",
"iteritems",
"(",
")",
":",
"obj",
"[",
"'package_apk_checksums'",
"]",
"[",
"k",
"]",
"=",
"list",
"(",
"v",
")",
"obj",
"[",
"'device_path_checksums'",
"]",
"=",
"self",
".",
"_cache",
"[",
"'device_path_checksums'",
"]",
"return",
"json",
".",
"dumps",
"(",
"obj",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/devil/devil/android/device_utils.py#L2272-L2291
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/command_parser.py
|
python
|
parse_command
|
(command)
|
return arguments
|
Parse command string into a list of arguments.
- Disregards whitespace inside double quotes and brackets.
- Strips paired leading and trailing double quotes in arguments.
- Splits the command at whitespace.
Nested double quotes and brackets are not handled.
Args:
command: (str) Input command.
Returns:
(list of str) List of arguments.
|
Parse command string into a list of arguments.
|
[
"Parse",
"command",
"string",
"into",
"a",
"list",
"of",
"arguments",
"."
] |
def parse_command(command):
"""Parse command string into a list of arguments.
- Disregards whitespace inside double quotes and brackets.
- Strips paired leading and trailing double quotes in arguments.
- Splits the command at whitespace.
Nested double quotes and brackets are not handled.
Args:
command: (str) Input command.
Returns:
(list of str) List of arguments.
"""
command = command.strip()
if not command:
return []
brackets_intervals = [f.span() for f in _BRACKETS_PATTERN.finditer(command)]
quotes_intervals = [f.span() for f in _QUOTES_PATTERN.finditer(command)]
whitespaces_intervals = [
f.span() for f in _WHITESPACE_PATTERN.finditer(command)
]
if not whitespaces_intervals:
return [command]
arguments = []
idx0 = 0
for start, end in whitespaces_intervals + [(len(command), None)]:
# Skip whitespace stretches enclosed in brackets or double quotes.
if not any(interval[0] < start < interval[1]
for interval in brackets_intervals + quotes_intervals):
argument = command[idx0:start]
# Strip leading and trailing double quote if they are paired.
if (argument.startswith("\"") and argument.endswith("\"") or
argument.startswith("'") and argument.endswith("'")):
argument = argument[1:-1]
arguments.append(argument)
idx0 = end
return arguments
|
[
"def",
"parse_command",
"(",
"command",
")",
":",
"command",
"=",
"command",
".",
"strip",
"(",
")",
"if",
"not",
"command",
":",
"return",
"[",
"]",
"brackets_intervals",
"=",
"[",
"f",
".",
"span",
"(",
")",
"for",
"f",
"in",
"_BRACKETS_PATTERN",
".",
"finditer",
"(",
"command",
")",
"]",
"quotes_intervals",
"=",
"[",
"f",
".",
"span",
"(",
")",
"for",
"f",
"in",
"_QUOTES_PATTERN",
".",
"finditer",
"(",
"command",
")",
"]",
"whitespaces_intervals",
"=",
"[",
"f",
".",
"span",
"(",
")",
"for",
"f",
"in",
"_WHITESPACE_PATTERN",
".",
"finditer",
"(",
"command",
")",
"]",
"if",
"not",
"whitespaces_intervals",
":",
"return",
"[",
"command",
"]",
"arguments",
"=",
"[",
"]",
"idx0",
"=",
"0",
"for",
"start",
",",
"end",
"in",
"whitespaces_intervals",
"+",
"[",
"(",
"len",
"(",
"command",
")",
",",
"None",
")",
"]",
":",
"# Skip whitespace stretches enclosed in brackets or double quotes.",
"if",
"not",
"any",
"(",
"interval",
"[",
"0",
"]",
"<",
"start",
"<",
"interval",
"[",
"1",
"]",
"for",
"interval",
"in",
"brackets_intervals",
"+",
"quotes_intervals",
")",
":",
"argument",
"=",
"command",
"[",
"idx0",
":",
"start",
"]",
"# Strip leading and trailing double quote if they are paired.",
"if",
"(",
"argument",
".",
"startswith",
"(",
"\"\\\"\"",
")",
"and",
"argument",
".",
"endswith",
"(",
"\"\\\"\"",
")",
"or",
"argument",
".",
"startswith",
"(",
"\"'\"",
")",
"and",
"argument",
".",
"endswith",
"(",
"\"'\"",
")",
")",
":",
"argument",
"=",
"argument",
"[",
"1",
":",
"-",
"1",
"]",
"arguments",
".",
"append",
"(",
"argument",
")",
"idx0",
"=",
"end",
"return",
"arguments"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/command_parser.py#L56-L101
|
|
apple/swift-lldb
|
d74be846ef3e62de946df343e8c234bde93a8912
|
scripts/Python/static-binding/lldb.py
|
python
|
SBEvent.GetDataFlavor
|
(self)
|
return _lldb.SBEvent_GetDataFlavor(self)
|
GetDataFlavor(SBEvent self) -> char const *
|
GetDataFlavor(SBEvent self) -> char const *
|
[
"GetDataFlavor",
"(",
"SBEvent",
"self",
")",
"-",
">",
"char",
"const",
"*"
] |
def GetDataFlavor(self):
"""GetDataFlavor(SBEvent self) -> char const *"""
return _lldb.SBEvent_GetDataFlavor(self)
|
[
"def",
"GetDataFlavor",
"(",
"self",
")",
":",
"return",
"_lldb",
".",
"SBEvent_GetDataFlavor",
"(",
"self",
")"
] |
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L4797-L4799
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/array_analysis.py
|
python
|
ShapeEquivSet.get_equiv_var
|
(self, obj)
|
return vs[0] if vs != [] else None
|
If the given object is equivalent to some defined variable,
return the variable, or None otherwise.
|
If the given object is equivalent to some defined variable,
return the variable, or None otherwise.
|
[
"If",
"the",
"given",
"object",
"is",
"equivalent",
"to",
"some",
"defined",
"variable",
"return",
"the",
"variable",
"or",
"None",
"otherwise",
"."
] |
def get_equiv_var(self, obj):
"""If the given object is equivalent to some defined variable,
return the variable, or None otherwise.
"""
names = self._get_names(obj)
if len(names) != 1:
return None
ind = self._get_ind(names[0])
vs = self.ind_to_var.get(ind, [])
return vs[0] if vs != [] else None
|
[
"def",
"get_equiv_var",
"(",
"self",
",",
"obj",
")",
":",
"names",
"=",
"self",
".",
"_get_names",
"(",
"obj",
")",
"if",
"len",
"(",
"names",
")",
"!=",
"1",
":",
"return",
"None",
"ind",
"=",
"self",
".",
"_get_ind",
"(",
"names",
"[",
"0",
"]",
")",
"vs",
"=",
"self",
".",
"ind_to_var",
".",
"get",
"(",
"ind",
",",
"[",
"]",
")",
"return",
"vs",
"[",
"0",
"]",
"if",
"vs",
"!=",
"[",
"]",
"else",
"None"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/array_analysis.py#L425-L434
|
|
apache/parquet-cpp
|
642da055adf009652689b20e68a198cffb857651
|
build-support/cpplint.py
|
python
|
_OutputFormat
|
()
|
return _cpplint_state.output_format
|
Gets the module's output format.
|
Gets the module's output format.
|
[
"Gets",
"the",
"module",
"s",
"output",
"format",
"."
] |
def _OutputFormat():
"""Gets the module's output format."""
return _cpplint_state.output_format
|
[
"def",
"_OutputFormat",
"(",
")",
":",
"return",
"_cpplint_state",
".",
"output_format"
] |
https://github.com/apache/parquet-cpp/blob/642da055adf009652689b20e68a198cffb857651/build-support/cpplint.py#L851-L853
|
|
VowpalWabbit/vowpal_wabbit
|
866b8fa88ff85a957c7eb72065ea44518b9ba416
|
python/docs/source/conf.py
|
python
|
_split_repo_url
|
(url)
|
return org, repo
|
Split a repository URL into an org / repo combination.
|
Split a repository URL into an org / repo combination.
|
[
"Split",
"a",
"repository",
"URL",
"into",
"an",
"org",
"/",
"repo",
"combination",
"."
] |
def _split_repo_url(url):
"""Split a repository URL into an org / repo combination."""
if "github.com/" in url:
end = url.split("github.com/")[-1]
org, repo = end.split("/")[:2]
else:
raise ValueError(
f"Currently Binder/JupyterHub repositories must be on GitHub, got {url}"
)
return org, repo
|
[
"def",
"_split_repo_url",
"(",
"url",
")",
":",
"if",
"\"github.com/\"",
"in",
"url",
":",
"end",
"=",
"url",
".",
"split",
"(",
"\"github.com/\"",
")",
"[",
"-",
"1",
"]",
"org",
",",
"repo",
"=",
"end",
".",
"split",
"(",
"\"/\"",
")",
"[",
":",
"2",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"f\"Currently Binder/JupyterHub repositories must be on GitHub, got {url}\"",
")",
"return",
"org",
",",
"repo"
] |
https://github.com/VowpalWabbit/vowpal_wabbit/blob/866b8fa88ff85a957c7eb72065ea44518b9ba416/python/docs/source/conf.py#L188-L197
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/aui.py
|
python
|
AuiManager.HideHint
|
(*args, **kwargs)
|
return _aui.AuiManager_HideHint(*args, **kwargs)
|
HideHint(self)
|
HideHint(self)
|
[
"HideHint",
"(",
"self",
")"
] |
def HideHint(*args, **kwargs):
"""HideHint(self)"""
return _aui.AuiManager_HideHint(*args, **kwargs)
|
[
"def",
"HideHint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_aui",
".",
"AuiManager_HideHint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/aui.py#L727-L729
|
|
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
|
python
|
xmlNs.newTextChild
|
(self, parent, name, content)
|
return __tmp
|
Creation of a new child element, added at the end of
@parent children list. @ns and @content parameters are
optional (None). If @ns is None, the newly created element
inherits the namespace of @parent. If @content is non None,
a child TEXT node will be created containing the string
@content. NOTE: Use xmlNewChild() if @content will contain
entities that need to be preserved. Use this function,
xmlNewTextChild(), if you need to ensure that reserved XML
chars that might appear in @content, such as the ampersand,
greater-than or less-than signs, are automatically replaced
by their XML escaped entity representations.
|
Creation of a new child element, added at the end of
|
[
"Creation",
"of",
"a",
"new",
"child",
"element",
"added",
"at",
"the",
"end",
"of"
] |
def newTextChild(self, parent, name, content):
"""Creation of a new child element, added at the end of
@parent children list. @ns and @content parameters are
optional (None). If @ns is None, the newly created element
inherits the namespace of @parent. If @content is non None,
a child TEXT node will be created containing the string
@content. NOTE: Use xmlNewChild() if @content will contain
entities that need to be preserved. Use this function,
xmlNewTextChild(), if you need to ensure that reserved XML
chars that might appear in @content, such as the ampersand,
greater-than or less-than signs, are automatically replaced
by their XML escaped entity representations. """
if parent is None: parent__o = None
else: parent__o = parent._o
ret = libxml2mod.xmlNewTextChild(parent__o, self._o, name, content)
if ret is None:raise treeError('xmlNewTextChild() failed')
__tmp = xmlNode(_obj=ret)
return __tmp
|
[
"def",
"newTextChild",
"(",
"self",
",",
"parent",
",",
"name",
",",
"content",
")",
":",
"if",
"parent",
"is",
"None",
":",
"parent__o",
"=",
"None",
"else",
":",
"parent__o",
"=",
"parent",
".",
"_o",
"ret",
"=",
"libxml2mod",
".",
"xmlNewTextChild",
"(",
"parent__o",
",",
"self",
".",
"_o",
",",
"name",
",",
"content",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlNewTextChild() failed'",
")",
"__tmp",
"=",
"xmlNode",
"(",
"_obj",
"=",
"ret",
")",
"return",
"__tmp"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L5189-L5206
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/x86/toolchain/lib/python2.7/idlelib/AutoComplete.py
|
python
|
AutoComplete.get_entity
|
(self, name)
|
return eval(name, namespace)
|
Lookup name in a namespace spanning sys.modules and __main.dict__
|
Lookup name in a namespace spanning sys.modules and __main.dict__
|
[
"Lookup",
"name",
"in",
"a",
"namespace",
"spanning",
"sys",
".",
"modules",
"and",
"__main",
".",
"dict__"
] |
def get_entity(self, name):
"""Lookup name in a namespace spanning sys.modules and __main.dict__"""
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
return eval(name, namespace)
|
[
"def",
"get_entity",
"(",
"self",
",",
"name",
")",
":",
"namespace",
"=",
"sys",
".",
"modules",
".",
"copy",
"(",
")",
"namespace",
".",
"update",
"(",
"__main__",
".",
"__dict__",
")",
"return",
"eval",
"(",
"name",
",",
"namespace",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/idlelib/AutoComplete.py#L223-L227
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/grid.py
|
python
|
GridCellNumberRenderer.__init__
|
(self, *args, **kwargs)
|
__init__(self) -> GridCellNumberRenderer
|
__init__(self) -> GridCellNumberRenderer
|
[
"__init__",
"(",
"self",
")",
"-",
">",
"GridCellNumberRenderer"
] |
def __init__(self, *args, **kwargs):
"""__init__(self) -> GridCellNumberRenderer"""
_grid.GridCellNumberRenderer_swiginit(self,_grid.new_GridCellNumberRenderer(*args, **kwargs))
self._setOORInfo(self)
|
[
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_grid",
".",
"GridCellNumberRenderer_swiginit",
"(",
"self",
",",
"_grid",
".",
"new_GridCellNumberRenderer",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"self",
".",
"_setOORInfo",
"(",
"self",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/grid.py#L165-L168
|
||
apache/trafodion
|
8455c839ad6b6d7b6e04edda5715053095b78046
|
core/sqf/src/seatrans/hbase-trx/src/main/python/thrift1/gen-py/hbase/Hbase.py
|
python
|
Iface.scannerOpenWithScan
|
(self, tableName, scan, attributes)
|
Get a scanner on the current table, using the Scan instance
for the scan parameters.
Parameters:
- tableName: name of table
- scan: Scan instance
- attributes: Scan attributes
|
Get a scanner on the current table, using the Scan instance
for the scan parameters.
|
[
"Get",
"a",
"scanner",
"on",
"the",
"current",
"table",
"using",
"the",
"Scan",
"instance",
"for",
"the",
"scan",
"parameters",
"."
] |
def scannerOpenWithScan(self, tableName, scan, attributes):
"""
Get a scanner on the current table, using the Scan instance
for the scan parameters.
Parameters:
- tableName: name of table
- scan: Scan instance
- attributes: Scan attributes
"""
pass
|
[
"def",
"scannerOpenWithScan",
"(",
"self",
",",
"tableName",
",",
"scan",
",",
"attributes",
")",
":",
"pass"
] |
https://github.com/apache/trafodion/blob/8455c839ad6b6d7b6e04edda5715053095b78046/core/sqf/src/seatrans/hbase-trx/src/main/python/thrift1/gen-py/hbase/Hbase.py#L432-L442
|
||
etotheipi/BitcoinArmory
|
2a6fc5355bb0c6fe26e387ccba30a5baafe8cd98
|
armoryengine/Transaction.py
|
python
|
PyTx.fetchCpp
|
(self)
|
return TheBDM.getTxByHash(self.getHash())
|
Use the info in this PyTx to get the C++ version from TheBDM
|
Use the info in this PyTx to get the C++ version from TheBDM
|
[
"Use",
"the",
"info",
"in",
"this",
"PyTx",
"to",
"get",
"the",
"C",
"++",
"version",
"from",
"TheBDM"
] |
def fetchCpp(self):
""" Use the info in this PyTx to get the C++ version from TheBDM """
return TheBDM.getTxByHash(self.getHash())
|
[
"def",
"fetchCpp",
"(",
"self",
")",
":",
"return",
"TheBDM",
".",
"getTxByHash",
"(",
"self",
".",
"getHash",
"(",
")",
")"
] |
https://github.com/etotheipi/BitcoinArmory/blob/2a6fc5355bb0c6fe26e387ccba30a5baafe8cd98/armoryengine/Transaction.py#L760-L762
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/tornado/tornado-6/tornado/iostream.py
|
python
|
BaseIOStream.write_to_fd
|
(self, data: memoryview)
|
Attempts to write ``data`` to the underlying file.
Returns the number of bytes written.
|
Attempts to write ``data`` to the underlying file.
|
[
"Attempts",
"to",
"write",
"data",
"to",
"the",
"underlying",
"file",
"."
] |
def write_to_fd(self, data: memoryview) -> int:
"""Attempts to write ``data`` to the underlying file.
Returns the number of bytes written.
"""
raise NotImplementedError()
|
[
"def",
"write_to_fd",
"(",
"self",
",",
"data",
":",
"memoryview",
")",
"->",
"int",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/tornado/tornado-6/tornado/iostream.py#L298-L303
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/setuptools/py2/setuptools/_vendor/pyparsing.py
|
python
|
delimitedList
|
( expr, delim=",", combine=False )
|
Helper to define a delimited list of expressions - the delimiter defaults to ','.
By default, the list elements and delimiters can have intervening whitespace, and
comments, but this can be overridden by passing C{combine=True} in the constructor.
If C{combine} is set to C{True}, the matching tokens are returned as a single token
string, with the delimiters included; otherwise, the matching tokens are returned
as a list of tokens, with the delimiters suppressed.
Example::
delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
|
Helper to define a delimited list of expressions - the delimiter defaults to ','.
By default, the list elements and delimiters can have intervening whitespace, and
comments, but this can be overridden by passing C{combine=True} in the constructor.
If C{combine} is set to C{True}, the matching tokens are returned as a single token
string, with the delimiters included; otherwise, the matching tokens are returned
as a list of tokens, with the delimiters suppressed.
|
[
"Helper",
"to",
"define",
"a",
"delimited",
"list",
"of",
"expressions",
"-",
"the",
"delimiter",
"defaults",
"to",
".",
"By",
"default",
"the",
"list",
"elements",
"and",
"delimiters",
"can",
"have",
"intervening",
"whitespace",
"and",
"comments",
"but",
"this",
"can",
"be",
"overridden",
"by",
"passing",
"C",
"{",
"combine",
"=",
"True",
"}",
"in",
"the",
"constructor",
".",
"If",
"C",
"{",
"combine",
"}",
"is",
"set",
"to",
"C",
"{",
"True",
"}",
"the",
"matching",
"tokens",
"are",
"returned",
"as",
"a",
"single",
"token",
"string",
"with",
"the",
"delimiters",
"included",
";",
"otherwise",
"the",
"matching",
"tokens",
"are",
"returned",
"as",
"a",
"list",
"of",
"tokens",
"with",
"the",
"delimiters",
"suppressed",
"."
] |
def delimitedList( expr, delim=",", combine=False ):
"""
Helper to define a delimited list of expressions - the delimiter defaults to ','.
By default, the list elements and delimiters can have intervening whitespace, and
comments, but this can be overridden by passing C{combine=True} in the constructor.
If C{combine} is set to C{True}, the matching tokens are returned as a single token
string, with the delimiters included; otherwise, the matching tokens are returned
as a list of tokens, with the delimiters suppressed.
Example::
delimitedList(Word(alphas)).parseString("aa,bb,cc") # -> ['aa', 'bb', 'cc']
delimitedList(Word(hexnums), delim=':', combine=True).parseString("AA:BB:CC:DD:EE") # -> ['AA:BB:CC:DD:EE']
"""
dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..."
if combine:
return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName)
else:
return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName)
|
[
"def",
"delimitedList",
"(",
"expr",
",",
"delim",
"=",
"\",\"",
",",
"combine",
"=",
"False",
")",
":",
"dlName",
"=",
"_ustr",
"(",
"expr",
")",
"+",
"\" [\"",
"+",
"_ustr",
"(",
"delim",
")",
"+",
"\" \"",
"+",
"_ustr",
"(",
"expr",
")",
"+",
"\"]...\"",
"if",
"combine",
":",
"return",
"Combine",
"(",
"expr",
"+",
"ZeroOrMore",
"(",
"delim",
"+",
"expr",
")",
")",
".",
"setName",
"(",
"dlName",
")",
"else",
":",
"return",
"(",
"expr",
"+",
"ZeroOrMore",
"(",
"Suppress",
"(",
"delim",
")",
"+",
"expr",
")",
")",
".",
"setName",
"(",
"dlName",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py2/setuptools/_vendor/pyparsing.py#L4450-L4467
|
||
openmm/openmm
|
cb293447c4fc8b03976dfe11399f107bab70f3d9
|
wrappers/python/openmm/app/charmmparameterset.py
|
python
|
CharmmParameterSet.condense
|
(self)
|
return self
|
This function goes through each of the parameter type dicts and
eliminates duplicate types. After calling this function, every unique
bond, angle, dihedral, improper, or cmap type will pair with EVERY key
in the type mapping dictionaries that points to the equivalent type
Example
-------
>>> params = CharmmParameterSet('charmm.prm').condense()
|
This function goes through each of the parameter type dicts and
eliminates duplicate types. After calling this function, every unique
bond, angle, dihedral, improper, or cmap type will pair with EVERY key
in the type mapping dictionaries that points to the equivalent type
|
[
"This",
"function",
"goes",
"through",
"each",
"of",
"the",
"parameter",
"type",
"dicts",
"and",
"eliminates",
"duplicate",
"types",
".",
"After",
"calling",
"this",
"function",
"every",
"unique",
"bond",
"angle",
"dihedral",
"improper",
"or",
"cmap",
"type",
"will",
"pair",
"with",
"EVERY",
"key",
"in",
"the",
"type",
"mapping",
"dictionaries",
"that",
"points",
"to",
"the",
"equivalent",
"type"
] |
def condense(self):
"""
This function goes through each of the parameter type dicts and
eliminates duplicate types. After calling this function, every unique
bond, angle, dihedral, improper, or cmap type will pair with EVERY key
in the type mapping dictionaries that points to the equivalent type
Example
-------
>>> params = CharmmParameterSet('charmm.prm').condense()
"""
# First scan through all of the bond types
self._condense_types(self.bond_types)
self._condense_types(self.angle_types)
self._condense_types(self.urey_bradley_types)
self._condense_types(self.improper_types)
self._condense_types(self.cmap_types)
# Dihedrals have to be handled separately, since each key corresponds to
# a list of (potentially multiterm) dihedral terms. Since all terms in a
# multiterm dihedral have to have a DIFFERENT periodicity, we don't have
# to condense _within_ a single list of torsions assigned to the same
# key (they're guaranteed to be different)
keylist = list(self.dihedral_types.keys())
for i in range(len(keylist) - 1):
key1 = keylist[i]
for dihedral in self.dihedral_types[key1]:
for j in range(i+1, len(keylist)):
key2 = keylist[j]
for jj, dihedral2 in enumerate(self.dihedral_types[key2]):
if dihedral2 == dihedral:
self.dihedral_types[key2][jj] = dihedral
return self
|
[
"def",
"condense",
"(",
"self",
")",
":",
"# First scan through all of the bond types",
"self",
".",
"_condense_types",
"(",
"self",
".",
"bond_types",
")",
"self",
".",
"_condense_types",
"(",
"self",
".",
"angle_types",
")",
"self",
".",
"_condense_types",
"(",
"self",
".",
"urey_bradley_types",
")",
"self",
".",
"_condense_types",
"(",
"self",
".",
"improper_types",
")",
"self",
".",
"_condense_types",
"(",
"self",
".",
"cmap_types",
")",
"# Dihedrals have to be handled separately, since each key corresponds to",
"# a list of (potentially multiterm) dihedral terms. Since all terms in a",
"# multiterm dihedral have to have a DIFFERENT periodicity, we don't have",
"# to condense _within_ a single list of torsions assigned to the same",
"# key (they're guaranteed to be different)",
"keylist",
"=",
"list",
"(",
"self",
".",
"dihedral_types",
".",
"keys",
"(",
")",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"keylist",
")",
"-",
"1",
")",
":",
"key1",
"=",
"keylist",
"[",
"i",
"]",
"for",
"dihedral",
"in",
"self",
".",
"dihedral_types",
"[",
"key1",
"]",
":",
"for",
"j",
"in",
"range",
"(",
"i",
"+",
"1",
",",
"len",
"(",
"keylist",
")",
")",
":",
"key2",
"=",
"keylist",
"[",
"j",
"]",
"for",
"jj",
",",
"dihedral2",
"in",
"enumerate",
"(",
"self",
".",
"dihedral_types",
"[",
"key2",
"]",
")",
":",
"if",
"dihedral2",
"==",
"dihedral",
":",
"self",
".",
"dihedral_types",
"[",
"key2",
"]",
"[",
"jj",
"]",
"=",
"dihedral",
"return",
"self"
] |
https://github.com/openmm/openmm/blob/cb293447c4fc8b03976dfe11399f107bab70f3d9/wrappers/python/openmm/app/charmmparameterset.py#L655-L686
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/fsspec/spec.py
|
python
|
AbstractBufferedFile._fetch_range
|
(self, start, end)
|
Get the specified set of bytes from remote
|
Get the specified set of bytes from remote
|
[
"Get",
"the",
"specified",
"set",
"of",
"bytes",
"from",
"remote"
] |
def _fetch_range(self, start, end):
"""Get the specified set of bytes from remote"""
raise NotImplementedError
|
[
"def",
"_fetch_range",
"(",
"self",
",",
"start",
",",
"end",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/fsspec/spec.py#L1206-L1208
|
||
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
torch/ao/quantization/fx/prepare.py
|
python
|
maybe_insert_observers_before_graph_output
|
(
graph_output_node: Node,
output_quantized_idxs: List[int],
node_name_to_target_dtype: Dict[str, Dict[str, Optional[torch.dtype]]],
qconfig_map: Dict[str, QConfigAny],
model: torch.nn.Module,
modules: Dict[str, torch.nn.Module],
graph: Graph,
)
|
If the output needs to be quantized and there are any nodes
in the output which are not already observed, inserts observers
for those nodes.
|
If the output needs to be quantized and there are any nodes
in the output which are not already observed, inserts observers
for those nodes.
|
[
"If",
"the",
"output",
"needs",
"to",
"be",
"quantized",
"and",
"there",
"are",
"any",
"nodes",
"in",
"the",
"output",
"which",
"are",
"not",
"already",
"observed",
"inserts",
"observers",
"for",
"those",
"nodes",
"."
] |
def maybe_insert_observers_before_graph_output(
graph_output_node: Node,
output_quantized_idxs: List[int],
node_name_to_target_dtype: Dict[str, Dict[str, Optional[torch.dtype]]],
qconfig_map: Dict[str, QConfigAny],
model: torch.nn.Module,
modules: Dict[str, torch.nn.Module],
graph: Graph,
) -> None:
"""
If the output needs to be quantized and there are any nodes
in the output which are not already observed, inserts observers
for those nodes.
"""
# TODO(future PR): update the output_quantized_idxs API to match
# arbitrary data structures. There is always a single output, and
# that output can have arbitrary nesting of values. List[int] is
# not the right data type for this.
assert output_quantized_idxs == [0] or output_quantized_idxs == [], \
'unrecognized format of output_quantized_idxs'
# Currently dequants are inserted in the convert step. So, we only
# have to do anything if the output is hardcoded to be quantized
if output_quantized_idxs == []:
return
# TODO(future PR): support more dtypes in model outputs, if necessary
output_target_dtype = torch.quint8
def _recursive_maybe_replace_node_with_obs(
maybe_node: Argument,
target_dtype: torch.dtype,
node_name_to_target_dtype: Dict[str, Dict[str, Optional[torch.dtype]]],
qconfig_map: Dict[str, QConfigAny],
model: torch.nn.Module,
modules: Dict[str, torch.nn.Module],
graph: Graph,
) -> Argument:
"""
Navigate an arbitrary data structure of lists, tuples, dicts.
For each container type, recurse on all inputs. Once any Node
is found, insert an observer if needed and do not recurse further.
For example, given a structure of
{'foo1': [[bar1]], 'foo2': {'foo3': [[[bar3]]]}}
we recurse down to bar1 and bar3, observe them if necessary,
and if we inserted an observer then replace the original node
with its observer.
Returns the data structure with all nodes needing observation being
replaced by their observers.
"""
if isinstance(maybe_node, Node):
# check dtype of this node
this_node_dtype = get_arg_target_dtype_as_output(
maybe_node, modules, node_name_to_target_dtype)
if this_node_dtype != target_dtype:
# insert observer
qconfig = qconfig_map.get(maybe_node.name)
# TODO(future PR): see if we need to allow specifying qconfig
# on output nodes, to remove the restriction below.
assert qconfig is not None, \
'Quantizing the output node without a qconfig is not supported'
observer_mod = qconfig.activation()
observer_node = insert_observer(
maybe_node, maybe_node, observer_mod, model, modules, graph)
return observer_node
else:
return maybe_node
elif isinstance(maybe_node, (list, tuple)):
results = []
for inner_node in maybe_node:
results.append(_recursive_maybe_replace_node_with_obs(
inner_node, target_dtype, node_name_to_target_dtype,
qconfig_map, model, modules, graph))
if isinstance(maybe_node, list):
return results
else:
return tuple(results)
elif isinstance(maybe_node, dict):
results_dict = {}
for k, inner_v in maybe_node.items():
results_dict[k] = _recursive_maybe_replace_node_with_obs(
inner_v, target_dtype, node_name_to_target_dtype,
qconfig_map, model, modules, graph)
return results_dict
else:
return results
new_args = []
for old_arg in graph_output_node.args:
new_args.append(
_recursive_maybe_replace_node_with_obs(
old_arg, output_target_dtype, node_name_to_target_dtype,
qconfig_map, model, modules, graph))
graph_output_node.args = tuple(new_args)
|
[
"def",
"maybe_insert_observers_before_graph_output",
"(",
"graph_output_node",
":",
"Node",
",",
"output_quantized_idxs",
":",
"List",
"[",
"int",
"]",
",",
"node_name_to_target_dtype",
":",
"Dict",
"[",
"str",
",",
"Dict",
"[",
"str",
",",
"Optional",
"[",
"torch",
".",
"dtype",
"]",
"]",
"]",
",",
"qconfig_map",
":",
"Dict",
"[",
"str",
",",
"QConfigAny",
"]",
",",
"model",
":",
"torch",
".",
"nn",
".",
"Module",
",",
"modules",
":",
"Dict",
"[",
"str",
",",
"torch",
".",
"nn",
".",
"Module",
"]",
",",
"graph",
":",
"Graph",
",",
")",
"->",
"None",
":",
"# TODO(future PR): update the output_quantized_idxs API to match",
"# arbitrary data structures. There is always a single output, and",
"# that output can have arbitrary nesting of values. List[int] is",
"# not the right data type for this.",
"assert",
"output_quantized_idxs",
"==",
"[",
"0",
"]",
"or",
"output_quantized_idxs",
"==",
"[",
"]",
",",
"'unrecognized format of output_quantized_idxs'",
"# Currently dequants are inserted in the convert step. So, we only",
"# have to do anything if the output is hardcoded to be quantized",
"if",
"output_quantized_idxs",
"==",
"[",
"]",
":",
"return",
"# TODO(future PR): support more dtypes in model outputs, if necessary",
"output_target_dtype",
"=",
"torch",
".",
"quint8",
"def",
"_recursive_maybe_replace_node_with_obs",
"(",
"maybe_node",
":",
"Argument",
",",
"target_dtype",
":",
"torch",
".",
"dtype",
",",
"node_name_to_target_dtype",
":",
"Dict",
"[",
"str",
",",
"Dict",
"[",
"str",
",",
"Optional",
"[",
"torch",
".",
"dtype",
"]",
"]",
"]",
",",
"qconfig_map",
":",
"Dict",
"[",
"str",
",",
"QConfigAny",
"]",
",",
"model",
":",
"torch",
".",
"nn",
".",
"Module",
",",
"modules",
":",
"Dict",
"[",
"str",
",",
"torch",
".",
"nn",
".",
"Module",
"]",
",",
"graph",
":",
"Graph",
",",
")",
"->",
"Argument",
":",
"\"\"\"\n Navigate an arbitrary data structure of lists, tuples, dicts.\n For each container type, recurse on all inputs. Once any Node\n is found, insert an observer if needed and do not recurse further.\n\n For example, given a structure of\n\n {'foo1': [[bar1]], 'foo2': {'foo3': [[[bar3]]]}}\n\n we recurse down to bar1 and bar3, observe them if necessary,\n and if we inserted an observer then replace the original node\n with its observer.\n\n Returns the data structure with all nodes needing observation being\n replaced by their observers.\n \"\"\"",
"if",
"isinstance",
"(",
"maybe_node",
",",
"Node",
")",
":",
"# check dtype of this node",
"this_node_dtype",
"=",
"get_arg_target_dtype_as_output",
"(",
"maybe_node",
",",
"modules",
",",
"node_name_to_target_dtype",
")",
"if",
"this_node_dtype",
"!=",
"target_dtype",
":",
"# insert observer",
"qconfig",
"=",
"qconfig_map",
".",
"get",
"(",
"maybe_node",
".",
"name",
")",
"# TODO(future PR): see if we need to allow specifying qconfig",
"# on output nodes, to remove the restriction below.",
"assert",
"qconfig",
"is",
"not",
"None",
",",
"'Quantizing the output node without a qconfig is not supported'",
"observer_mod",
"=",
"qconfig",
".",
"activation",
"(",
")",
"observer_node",
"=",
"insert_observer",
"(",
"maybe_node",
",",
"maybe_node",
",",
"observer_mod",
",",
"model",
",",
"modules",
",",
"graph",
")",
"return",
"observer_node",
"else",
":",
"return",
"maybe_node",
"elif",
"isinstance",
"(",
"maybe_node",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"results",
"=",
"[",
"]",
"for",
"inner_node",
"in",
"maybe_node",
":",
"results",
".",
"append",
"(",
"_recursive_maybe_replace_node_with_obs",
"(",
"inner_node",
",",
"target_dtype",
",",
"node_name_to_target_dtype",
",",
"qconfig_map",
",",
"model",
",",
"modules",
",",
"graph",
")",
")",
"if",
"isinstance",
"(",
"maybe_node",
",",
"list",
")",
":",
"return",
"results",
"else",
":",
"return",
"tuple",
"(",
"results",
")",
"elif",
"isinstance",
"(",
"maybe_node",
",",
"dict",
")",
":",
"results_dict",
"=",
"{",
"}",
"for",
"k",
",",
"inner_v",
"in",
"maybe_node",
".",
"items",
"(",
")",
":",
"results_dict",
"[",
"k",
"]",
"=",
"_recursive_maybe_replace_node_with_obs",
"(",
"inner_v",
",",
"target_dtype",
",",
"node_name_to_target_dtype",
",",
"qconfig_map",
",",
"model",
",",
"modules",
",",
"graph",
")",
"return",
"results_dict",
"else",
":",
"return",
"results",
"new_args",
"=",
"[",
"]",
"for",
"old_arg",
"in",
"graph_output_node",
".",
"args",
":",
"new_args",
".",
"append",
"(",
"_recursive_maybe_replace_node_with_obs",
"(",
"old_arg",
",",
"output_target_dtype",
",",
"node_name_to_target_dtype",
",",
"qconfig_map",
",",
"model",
",",
"modules",
",",
"graph",
")",
")",
"graph_output_node",
".",
"args",
"=",
"tuple",
"(",
"new_args",
")"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/ao/quantization/fx/prepare.py#L684-L782
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/distribute/all_reduce.py
|
python
|
_apply_unary_to_chunks
|
(f, chunks_by_dev)
|
return output
|
Apply a unary op to each tensor in chunks_by_dev, on same device.
Args:
f: a unary function over T `tf.Tensor`.
chunks_by_dev: list of lists of T `tf.Tensor`.
Returns:
new list of lists of T `tf.Tensor` with the same structure as
chunks_by_dev containing the derived tensors.
|
Apply a unary op to each tensor in chunks_by_dev, on same device.
|
[
"Apply",
"a",
"unary",
"op",
"to",
"each",
"tensor",
"in",
"chunks_by_dev",
"on",
"same",
"device",
"."
] |
def _apply_unary_to_chunks(f, chunks_by_dev):
"""Apply a unary op to each tensor in chunks_by_dev, on same device.
Args:
f: a unary function over T `tf.Tensor`.
chunks_by_dev: list of lists of T `tf.Tensor`.
Returns:
new list of lists of T `tf.Tensor` with the same structure as
chunks_by_dev containing the derived tensors.
"""
output = []
for x in chunks_by_dev:
with ops.colocate_with(x[0]):
output.append([f(t) for t in x])
return output
|
[
"def",
"_apply_unary_to_chunks",
"(",
"f",
",",
"chunks_by_dev",
")",
":",
"output",
"=",
"[",
"]",
"for",
"x",
"in",
"chunks_by_dev",
":",
"with",
"ops",
".",
"colocate_with",
"(",
"x",
"[",
"0",
"]",
")",
":",
"output",
".",
"append",
"(",
"[",
"f",
"(",
"t",
")",
"for",
"t",
"in",
"x",
"]",
")",
"return",
"output"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/distribute/all_reduce.py#L359-L374
|
|
tiann/android-native-debug
|
198903ed9346dc4a74327a63cb98d449b97d8047
|
app/source/art/tools/cpplint.py
|
python
|
CheckIncludeLine
|
(filename, clean_lines, linenum, include_state, error)
|
Check rules that are applicable to #include lines.
Strings on #include lines are NOT removed from elided line, to make
certain tasks easier. However, to prevent false positives, checks
applicable to #include lines in CheckLanguage must be put here.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
|
Check rules that are applicable to #include lines.
|
[
"Check",
"rules",
"that",
"are",
"applicable",
"to",
"#include",
"lines",
"."
] |
def CheckIncludeLine(filename, clean_lines, linenum, include_state, error):
"""Check rules that are applicable to #include lines.
Strings on #include lines are NOT removed from elided line, to make
certain tasks easier. However, to prevent false positives, checks
applicable to #include lines in CheckLanguage must be put here.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
include_state: An _IncludeState instance in which the headers are inserted.
error: The function to call with any errors found.
"""
fileinfo = FileInfo(filename)
line = clean_lines.lines[linenum]
# "include" should use the new style "foo/bar.h" instead of just "bar.h"
if _RE_PATTERN_INCLUDE_NEW_STYLE.search(line):
error(filename, linenum, 'build/include', 4,
'Include the directory when naming .h files')
# we shouldn't include a file more than once. actually, there are a
# handful of instances where doing so is okay, but in general it's
# not.
match = _RE_PATTERN_INCLUDE.search(line)
if match:
include = match.group(2)
is_system = (match.group(1) == '<')
if include in include_state:
error(filename, linenum, 'build/include', 4,
'"%s" already included at %s:%s' %
(include, filename, include_state[include]))
else:
include_state[include] = linenum
# We want to ensure that headers appear in the right order:
# 1) for foo.cc, foo.h (preferred location)
# 2) c system files
# 3) cpp system files
# 4) for foo.cc, foo.h (deprecated location)
# 5) other google headers
#
# We classify each include statement as one of those 5 types
# using a number of techniques. The include_state object keeps
# track of the highest type seen, and complains if we see a
# lower type after that.
error_message = include_state.CheckNextIncludeOrder(
_ClassifyInclude(fileinfo, include, is_system))
if error_message:
error(filename, linenum, 'build/include_order', 4,
'%s. Should be: %s.h, c system, c++ system, other.' %
(error_message, fileinfo.BaseName()))
if not include_state.IsInAlphabeticalOrder(include):
error(filename, linenum, 'build/include_alpha', 4,
'Include "%s" not in alphabetical order' % include)
# Look for any of the stream classes that are part of standard C++.
match = _RE_PATTERN_INCLUDE.match(line)
if match:
include = match.group(2)
if Match(r'(f|ind|io|i|o|parse|pf|stdio|str|)?stream$', include):
# Many unit tests use cout, so we exempt them.
if not _IsTestFilename(filename):
error(filename, linenum, 'readability/streams', 3,
'Streams are highly discouraged.')
|
[
"def",
"CheckIncludeLine",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"include_state",
",",
"error",
")",
":",
"fileinfo",
"=",
"FileInfo",
"(",
"filename",
")",
"line",
"=",
"clean_lines",
".",
"lines",
"[",
"linenum",
"]",
"# \"include\" should use the new style \"foo/bar.h\" instead of just \"bar.h\"",
"if",
"_RE_PATTERN_INCLUDE_NEW_STYLE",
".",
"search",
"(",
"line",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include'",
",",
"4",
",",
"'Include the directory when naming .h files'",
")",
"# we shouldn't include a file more than once. actually, there are a",
"# handful of instances where doing so is okay, but in general it's",
"# not.",
"match",
"=",
"_RE_PATTERN_INCLUDE",
".",
"search",
"(",
"line",
")",
"if",
"match",
":",
"include",
"=",
"match",
".",
"group",
"(",
"2",
")",
"is_system",
"=",
"(",
"match",
".",
"group",
"(",
"1",
")",
"==",
"'<'",
")",
"if",
"include",
"in",
"include_state",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include'",
",",
"4",
",",
"'\"%s\" already included at %s:%s'",
"%",
"(",
"include",
",",
"filename",
",",
"include_state",
"[",
"include",
"]",
")",
")",
"else",
":",
"include_state",
"[",
"include",
"]",
"=",
"linenum",
"# We want to ensure that headers appear in the right order:",
"# 1) for foo.cc, foo.h (preferred location)",
"# 2) c system files",
"# 3) cpp system files",
"# 4) for foo.cc, foo.h (deprecated location)",
"# 5) other google headers",
"#",
"# We classify each include statement as one of those 5 types",
"# using a number of techniques. The include_state object keeps",
"# track of the highest type seen, and complains if we see a",
"# lower type after that.",
"error_message",
"=",
"include_state",
".",
"CheckNextIncludeOrder",
"(",
"_ClassifyInclude",
"(",
"fileinfo",
",",
"include",
",",
"is_system",
")",
")",
"if",
"error_message",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include_order'",
",",
"4",
",",
"'%s. Should be: %s.h, c system, c++ system, other.'",
"%",
"(",
"error_message",
",",
"fileinfo",
".",
"BaseName",
"(",
")",
")",
")",
"if",
"not",
"include_state",
".",
"IsInAlphabeticalOrder",
"(",
"include",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'build/include_alpha'",
",",
"4",
",",
"'Include \"%s\" not in alphabetical order'",
"%",
"include",
")",
"# Look for any of the stream classes that are part of standard C++.",
"match",
"=",
"_RE_PATTERN_INCLUDE",
".",
"match",
"(",
"line",
")",
"if",
"match",
":",
"include",
"=",
"match",
".",
"group",
"(",
"2",
")",
"if",
"Match",
"(",
"r'(f|ind|io|i|o|parse|pf|stdio|str|)?stream$'",
",",
"include",
")",
":",
"# Many unit tests use cout, so we exempt them.",
"if",
"not",
"_IsTestFilename",
"(",
"filename",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/streams'",
",",
"3",
",",
"'Streams are highly discouraged.'",
")"
] |
https://github.com/tiann/android-native-debug/blob/198903ed9346dc4a74327a63cb98d449b97d8047/app/source/art/tools/cpplint.py#L3028-L3094
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/ultimatelistctrl.py
|
python
|
UltimateListMainWindow.DragFinish
|
(self, event)
|
return True
|
A drag and drop operation has just finished.
:param `event`: a :class:`MouseEvent` event to be processed.
|
A drag and drop operation has just finished.
|
[
"A",
"drag",
"and",
"drop",
"operation",
"has",
"just",
"finished",
"."
] |
def DragFinish(self, event):
"""
A drag and drop operation has just finished.
:param `event`: a :class:`MouseEvent` event to be processed.
"""
if not self._isDragging:
return False
self._isDragging = False
self._dragCount = 0
self._dragItem = None
self.SetCursor(self._oldCursor)
self.Refresh()
le = UltimateListEvent(wxEVT_COMMAND_LIST_END_DRAG, self.GetParent().GetId())
le.SetEventObject(self.GetParent())
le.m_itemIndex = self._dropTarget
le.m_pointDrag = event.GetPosition()
self.GetParent().GetEventHandler().ProcessEvent(le)
return True
|
[
"def",
"DragFinish",
"(",
"self",
",",
"event",
")",
":",
"if",
"not",
"self",
".",
"_isDragging",
":",
"return",
"False",
"self",
".",
"_isDragging",
"=",
"False",
"self",
".",
"_dragCount",
"=",
"0",
"self",
".",
"_dragItem",
"=",
"None",
"self",
".",
"SetCursor",
"(",
"self",
".",
"_oldCursor",
")",
"self",
".",
"Refresh",
"(",
")",
"le",
"=",
"UltimateListEvent",
"(",
"wxEVT_COMMAND_LIST_END_DRAG",
",",
"self",
".",
"GetParent",
"(",
")",
".",
"GetId",
"(",
")",
")",
"le",
".",
"SetEventObject",
"(",
"self",
".",
"GetParent",
"(",
")",
")",
"le",
".",
"m_itemIndex",
"=",
"self",
".",
"_dropTarget",
"le",
".",
"m_pointDrag",
"=",
"event",
".",
"GetPosition",
"(",
")",
"self",
".",
"GetParent",
"(",
")",
".",
"GetEventHandler",
"(",
")",
".",
"ProcessEvent",
"(",
"le",
")",
"return",
"True"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ultimatelistctrl.py#L7815-L7837
|
|
natanielruiz/android-yolo
|
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
|
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/tensor_signature.py
|
python
|
create_placeholders_from_signatures
|
(signatures)
|
return {
key: signatures[key].get_placeholder()
for key in signatures}
|
Creates placeholders from given signatures.
Args:
signatures: Dict of `TensorSignature` objects or single `TensorSignature`,
or `None`.
Returns:
Dict of `tf.placeholder` objects or single `tf.placeholder`, or `None`.
|
Creates placeholders from given signatures.
|
[
"Creates",
"placeholders",
"from",
"given",
"signatures",
"."
] |
def create_placeholders_from_signatures(signatures):
"""Creates placeholders from given signatures.
Args:
signatures: Dict of `TensorSignature` objects or single `TensorSignature`,
or `None`.
Returns:
Dict of `tf.placeholder` objects or single `tf.placeholder`, or `None`.
"""
if signatures is None:
return None
if not isinstance(signatures, dict):
return signatures.get_placeholder()
return {
key: signatures[key].get_placeholder()
for key in signatures}
|
[
"def",
"create_placeholders_from_signatures",
"(",
"signatures",
")",
":",
"if",
"signatures",
"is",
"None",
":",
"return",
"None",
"if",
"not",
"isinstance",
"(",
"signatures",
",",
"dict",
")",
":",
"return",
"signatures",
".",
"get_placeholder",
"(",
")",
"return",
"{",
"key",
":",
"signatures",
"[",
"key",
"]",
".",
"get_placeholder",
"(",
")",
"for",
"key",
"in",
"signatures",
"}"
] |
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/tensor_signature.py#L140-L156
|
|
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/contrib/lookup/lookup_ops.py
|
python
|
KeyValueTensorInitializer.initialize
|
(self, table)
|
return init_op
|
Initializes the given `table` with `keys` and `values` tensors.
Args:
table: The table to initialize.
Returns:
The operation that initializes the table.
Raises:
TypeError: when the keys and values data types do not match the table
key and value data types.
|
Initializes the given `table` with `keys` and `values` tensors.
|
[
"Initializes",
"the",
"given",
"table",
"with",
"keys",
"and",
"values",
"tensors",
"."
] |
def initialize(self, table):
"""Initializes the given `table` with `keys` and `values` tensors.
Args:
table: The table to initialize.
Returns:
The operation that initializes the table.
Raises:
TypeError: when the keys and values data types do not match the table
key and value data types.
"""
# pylint: disable=protected-access
table._check_table_dtypes(self._keys.dtype, self._values.dtype)
with ops.op_scope([table], self._name) as scope:
init_op = gen_data_flow_ops._initialize_table(table.table_ref,
self._keys,
self._values,
name=scope)
# pylint: enable=protected-access
ops.add_to_collection(ops.GraphKeys.TABLE_INITIALIZERS, init_op)
return init_op
|
[
"def",
"initialize",
"(",
"self",
",",
"table",
")",
":",
"# pylint: disable=protected-access",
"table",
".",
"_check_table_dtypes",
"(",
"self",
".",
"_keys",
".",
"dtype",
",",
"self",
".",
"_values",
".",
"dtype",
")",
"with",
"ops",
".",
"op_scope",
"(",
"[",
"table",
"]",
",",
"self",
".",
"_name",
")",
"as",
"scope",
":",
"init_op",
"=",
"gen_data_flow_ops",
".",
"_initialize_table",
"(",
"table",
".",
"table_ref",
",",
"self",
".",
"_keys",
",",
"self",
".",
"_values",
",",
"name",
"=",
"scope",
")",
"# pylint: enable=protected-access",
"ops",
".",
"add_to_collection",
"(",
"ops",
".",
"GraphKeys",
".",
"TABLE_INITIALIZERS",
",",
"init_op",
")",
"return",
"init_op"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/lookup/lookup_ops.py#L284-L306
|
|
toggl-open-source/toggldesktop
|
91865205885531cc8fd9e8d613dad49d625d56e7
|
third_party/cpplint/cpplint.py
|
python
|
_GetTextInside
|
(text, start_pattern)
|
return text[start_position:position - 1]
|
r"""Retrieves all the text between matching open and close parentheses.
Given a string of lines and a regular expression string, retrieve all the text
following the expression and between opening punctuation symbols like
(, [, or {, and the matching close-punctuation symbol. This properly nested
occurrences of the punctuations, so for the text like
printf(a(), b(c()));
a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
start_pattern must match string having an open punctuation symbol at the end.
Args:
text: The lines to extract text. Its comments and strings must be elided.
It can be single line and can span multiple lines.
start_pattern: The regexp string indicating where to start extracting
the text.
Returns:
The extracted text.
None if either the opening string or ending punctuation could not be found.
|
r"""Retrieves all the text between matching open and close parentheses.
|
[
"r",
"Retrieves",
"all",
"the",
"text",
"between",
"matching",
"open",
"and",
"close",
"parentheses",
"."
] |
def _GetTextInside(text, start_pattern):
r"""Retrieves all the text between matching open and close parentheses.
Given a string of lines and a regular expression string, retrieve all the text
following the expression and between opening punctuation symbols like
(, [, or {, and the matching close-punctuation symbol. This properly nested
occurrences of the punctuations, so for the text like
printf(a(), b(c()));
a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'.
start_pattern must match string having an open punctuation symbol at the end.
Args:
text: The lines to extract text. Its comments and strings must be elided.
It can be single line and can span multiple lines.
start_pattern: The regexp string indicating where to start extracting
the text.
Returns:
The extracted text.
None if either the opening string or ending punctuation could not be found.
"""
# TODO(unknown): Audit cpplint.py to see what places could be profitably
# rewritten to use _GetTextInside (and use inferior regexp matching today).
# Give opening punctuations to get the matching close-punctuations.
matching_punctuation = {'(': ')', '{': '}', '[': ']'}
closing_punctuation = set(matching_punctuation.itervalues())
# Find the position to start extracting text.
match = re.search(start_pattern, text, re.M)
if not match: # start_pattern not found in text.
return None
start_position = match.end(0)
assert start_position > 0, (
'start_pattern must ends with an opening punctuation.')
assert text[start_position - 1] in matching_punctuation, (
'start_pattern must ends with an opening punctuation.')
# Stack of closing punctuations we expect to have in text after position.
punctuation_stack = [matching_punctuation[text[start_position - 1]]]
position = start_position
while punctuation_stack and position < len(text):
if text[position] == punctuation_stack[-1]:
punctuation_stack.pop()
elif text[position] in closing_punctuation:
# A closing punctuation without matching opening punctuations.
return None
elif text[position] in matching_punctuation:
punctuation_stack.append(matching_punctuation[text[position]])
position += 1
if punctuation_stack:
# Opening punctuations left without matching close-punctuations.
return None
# punctuations match.
return text[start_position:position - 1]
|
[
"def",
"_GetTextInside",
"(",
"text",
",",
"start_pattern",
")",
":",
"# TODO(unknown): Audit cpplint.py to see what places could be profitably",
"# rewritten to use _GetTextInside (and use inferior regexp matching today).",
"# Give opening punctuations to get the matching close-punctuations.",
"matching_punctuation",
"=",
"{",
"'('",
":",
"')'",
",",
"'{'",
":",
"'}'",
",",
"'['",
":",
"']'",
"}",
"closing_punctuation",
"=",
"set",
"(",
"matching_punctuation",
".",
"itervalues",
"(",
")",
")",
"# Find the position to start extracting text.",
"match",
"=",
"re",
".",
"search",
"(",
"start_pattern",
",",
"text",
",",
"re",
".",
"M",
")",
"if",
"not",
"match",
":",
"# start_pattern not found in text.",
"return",
"None",
"start_position",
"=",
"match",
".",
"end",
"(",
"0",
")",
"assert",
"start_position",
">",
"0",
",",
"(",
"'start_pattern must ends with an opening punctuation.'",
")",
"assert",
"text",
"[",
"start_position",
"-",
"1",
"]",
"in",
"matching_punctuation",
",",
"(",
"'start_pattern must ends with an opening punctuation.'",
")",
"# Stack of closing punctuations we expect to have in text after position.",
"punctuation_stack",
"=",
"[",
"matching_punctuation",
"[",
"text",
"[",
"start_position",
"-",
"1",
"]",
"]",
"]",
"position",
"=",
"start_position",
"while",
"punctuation_stack",
"and",
"position",
"<",
"len",
"(",
"text",
")",
":",
"if",
"text",
"[",
"position",
"]",
"==",
"punctuation_stack",
"[",
"-",
"1",
"]",
":",
"punctuation_stack",
".",
"pop",
"(",
")",
"elif",
"text",
"[",
"position",
"]",
"in",
"closing_punctuation",
":",
"# A closing punctuation without matching opening punctuations.",
"return",
"None",
"elif",
"text",
"[",
"position",
"]",
"in",
"matching_punctuation",
":",
"punctuation_stack",
".",
"append",
"(",
"matching_punctuation",
"[",
"text",
"[",
"position",
"]",
"]",
")",
"position",
"+=",
"1",
"if",
"punctuation_stack",
":",
"# Opening punctuations left without matching close-punctuations.",
"return",
"None",
"# punctuations match.",
"return",
"text",
"[",
"start_position",
":",
"position",
"-",
"1",
"]"
] |
https://github.com/toggl-open-source/toggldesktop/blob/91865205885531cc8fd9e8d613dad49d625d56e7/third_party/cpplint/cpplint.py#L4679-L4732
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/Editra/src/syntax/_groovy.py
|
python
|
SyntaxData.GetKeywords
|
(self)
|
return [MAIN_KEYWORDS, SECONDARY_KEYWORDS]
|
Returns Specified Keywords List
|
Returns Specified Keywords List
|
[
"Returns",
"Specified",
"Keywords",
"List"
] |
def GetKeywords(self):
"""Returns Specified Keywords List """
return [MAIN_KEYWORDS, SECONDARY_KEYWORDS]
|
[
"def",
"GetKeywords",
"(",
"self",
")",
":",
"return",
"[",
"MAIN_KEYWORDS",
",",
"SECONDARY_KEYWORDS",
"]"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/syntax/_groovy.py#L103-L105
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/aui.py
|
python
|
PreAuiMDIParentFrame
|
(*args, **kwargs)
|
return val
|
PreAuiMDIParentFrame() -> AuiMDIParentFrame
|
PreAuiMDIParentFrame() -> AuiMDIParentFrame
|
[
"PreAuiMDIParentFrame",
"()",
"-",
">",
"AuiMDIParentFrame"
] |
def PreAuiMDIParentFrame(*args, **kwargs):
"""PreAuiMDIParentFrame() -> AuiMDIParentFrame"""
val = _aui.new_PreAuiMDIParentFrame(*args, **kwargs)
val._setOORInfo(val)
return val
|
[
"def",
"PreAuiMDIParentFrame",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"val",
"=",
"_aui",
".",
"new_PreAuiMDIParentFrame",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"val",
".",
"_setOORInfo",
"(",
"val",
")",
"return",
"val"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/aui.py#L1499-L1503
|
|
htcondor/htcondor
|
4829724575176d1d6c936e4693dfd78a728569b0
|
src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/Skype4Py/voicemail.py
|
python
|
IVoicemail.StopPlayback
|
(self)
|
Stops playing downloaded voicemail.
|
Stops playing downloaded voicemail.
|
[
"Stops",
"playing",
"downloaded",
"voicemail",
"."
] |
def StopPlayback(self):
'''Stops playing downloaded voicemail.
'''
self._Alter('STOPPLAYBACK')
|
[
"def",
"StopPlayback",
"(",
"self",
")",
":",
"self",
".",
"_Alter",
"(",
"'STOPPLAYBACK'",
")"
] |
https://github.com/htcondor/htcondor/blob/4829724575176d1d6c936e4693dfd78a728569b0/src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/Skype4Py/voicemail.py#L145-L148
|
||
CRYTEK/CRYENGINE
|
232227c59a220cbbd311576f0fbeba7bb53b2a8c
|
Code/Tools/waf-1.7.13/waflib/Scripting.py
|
python
|
Dist.get_tar_path
|
(self, node)
|
return node.abspath()
|
return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names
|
return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names
|
[
"return",
"the",
"path",
"to",
"use",
"for",
"a",
"node",
"in",
"the",
"tar",
"archive",
"the",
"purpose",
"of",
"this",
"is",
"to",
"let",
"subclases",
"resolve",
"symbolic",
"links",
"or",
"to",
"change",
"file",
"names"
] |
def get_tar_path(self, node):
"""
return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names
"""
return node.abspath()
|
[
"def",
"get_tar_path",
"(",
"self",
",",
"node",
")",
":",
"return",
"node",
".",
"abspath",
"(",
")"
] |
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Code/Tools/waf-1.7.13/waflib/Scripting.py#L481-L486
|
|
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
|
python
|
xmlDoc.saveFormatFile
|
(self, filename, format)
|
return ret
|
Dump an XML document to a file. Will use compression if
compiled in and enabled. If @filename is "-" the stdout
file is used. If @format is set then the document will be
indented on output. Note that @format = 1 provide node
indenting only if xmlIndentTreeOutput = 1 or
xmlKeepBlanksDefault(0) was called
|
Dump an XML document to a file. Will use compression if
compiled in and enabled. If
|
[
"Dump",
"an",
"XML",
"document",
"to",
"a",
"file",
".",
"Will",
"use",
"compression",
"if",
"compiled",
"in",
"and",
"enabled",
".",
"If"
] |
def saveFormatFile(self, filename, format):
"""Dump an XML document to a file. Will use compression if
compiled in and enabled. If @filename is "-" the stdout
file is used. If @format is set then the document will be
indented on output. Note that @format = 1 provide node
indenting only if xmlIndentTreeOutput = 1 or
xmlKeepBlanksDefault(0) was called """
ret = libxml2mod.xmlSaveFormatFile(filename, self._o, format)
return ret
|
[
"def",
"saveFormatFile",
"(",
"self",
",",
"filename",
",",
"format",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlSaveFormatFile",
"(",
"filename",
",",
"self",
".",
"_o",
",",
"format",
")",
"return",
"ret"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L3711-L3719
|
|
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/tempfile.py
|
python
|
gettempdir
|
()
|
return tempdir
|
Accessor for tempfile.tempdir.
|
Accessor for tempfile.tempdir.
|
[
"Accessor",
"for",
"tempfile",
".",
"tempdir",
"."
] |
def gettempdir():
"""Accessor for tempfile.tempdir."""
global tempdir
if tempdir is None:
_once_lock.acquire()
try:
if tempdir is None:
tempdir = _get_default_tempdir()
finally:
_once_lock.release()
return tempdir
|
[
"def",
"gettempdir",
"(",
")",
":",
"global",
"tempdir",
"if",
"tempdir",
"is",
"None",
":",
"_once_lock",
".",
"acquire",
"(",
")",
"try",
":",
"if",
"tempdir",
"is",
"None",
":",
"tempdir",
"=",
"_get_default_tempdir",
"(",
")",
"finally",
":",
"_once_lock",
".",
"release",
"(",
")",
"return",
"tempdir"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/tempfile.py#L258-L268
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/asyncio/protocols.py
|
python
|
BaseProtocol.connection_lost
|
(self, exc)
|
Called when the connection is lost or closed.
The argument is an exception object or None (the latter
meaning a regular EOF is received or the connection was
aborted or closed).
|
Called when the connection is lost or closed.
|
[
"Called",
"when",
"the",
"connection",
"is",
"lost",
"or",
"closed",
"."
] |
def connection_lost(self, exc):
"""Called when the connection is lost or closed.
The argument is an exception object or None (the latter
meaning a regular EOF is received or the connection was
aborted or closed).
"""
|
[
"def",
"connection_lost",
"(",
"self",
",",
"exc",
")",
":"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/asyncio/protocols.py#L27-L33
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_controls.py
|
python
|
TextCtrl.GetClassDefaultAttributes
|
(*args, **kwargs)
|
return _controls_.TextCtrl_GetClassDefaultAttributes(*args, **kwargs)
|
GetClassDefaultAttributes(int variant=WINDOW_VARIANT_NORMAL) -> VisualAttributes
Get the default attributes for this class. This is useful if you want
to use the same font or colour in your own control as in a standard
control -- which is a much better idea than hard coding specific
colours or fonts which might look completely out of place on the
user's system, especially if it uses themes.
The variant parameter is only relevant under Mac currently and is
ignore under other platforms. Under Mac, it will change the size of
the returned font. See `wx.Window.SetWindowVariant` for more about
this.
|
GetClassDefaultAttributes(int variant=WINDOW_VARIANT_NORMAL) -> VisualAttributes
|
[
"GetClassDefaultAttributes",
"(",
"int",
"variant",
"=",
"WINDOW_VARIANT_NORMAL",
")",
"-",
">",
"VisualAttributes"
] |
def GetClassDefaultAttributes(*args, **kwargs):
"""
GetClassDefaultAttributes(int variant=WINDOW_VARIANT_NORMAL) -> VisualAttributes
Get the default attributes for this class. This is useful if you want
to use the same font or colour in your own control as in a standard
control -- which is a much better idea than hard coding specific
colours or fonts which might look completely out of place on the
user's system, especially if it uses themes.
The variant parameter is only relevant under Mac currently and is
ignore under other platforms. Under Mac, it will change the size of
the returned font. See `wx.Window.SetWindowVariant` for more about
this.
"""
return _controls_.TextCtrl_GetClassDefaultAttributes(*args, **kwargs)
|
[
"def",
"GetClassDefaultAttributes",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"TextCtrl_GetClassDefaultAttributes",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L2055-L2070
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_misc.py
|
python
|
DateSpan.__sub__
|
(*args, **kwargs)
|
return _misc_.DateSpan___sub__(*args, **kwargs)
|
__sub__(self, DateSpan other) -> DateSpan
|
__sub__(self, DateSpan other) -> DateSpan
|
[
"__sub__",
"(",
"self",
"DateSpan",
"other",
")",
"-",
">",
"DateSpan"
] |
def __sub__(*args, **kwargs):
"""__sub__(self, DateSpan other) -> DateSpan"""
return _misc_.DateSpan___sub__(*args, **kwargs)
|
[
"def",
"__sub__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"DateSpan___sub__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_misc.py#L4725-L4727
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.