nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
sequence | function
stringlengths 34
151k
| function_tokens
sequence | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bigartm/bigartm | 47e37f982de87aa67bfd475ff1f39da696b181b3 | utils/cpplint.py | python | _BackupFilters | () | Saves the current filter list to backup storage. | Saves the current filter list to backup storage. | [
"Saves",
"the",
"current",
"filter",
"list",
"to",
"backup",
"storage",
"."
] | def _BackupFilters():
""" Saves the current filter list to backup storage."""
_cpplint_state.BackupFilters() | [
"def",
"_BackupFilters",
"(",
")",
":",
"_cpplint_state",
".",
"BackupFilters",
"(",
")"
] | https://github.com/bigartm/bigartm/blob/47e37f982de87aa67bfd475ff1f39da696b181b3/utils/cpplint.py#L905-L907 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/masked/numctrl.py | python | NumCtrl.SetMax | (self, max=None) | return bRet | Sets the maximum value of the control. If a value of None
is provided, then the control will have no explicit maximum value.
If the value specified is less than the current minimum value, then
the function returns False and the maximum will not change from its
current setting. On success, the function returns True.
If successful and the current value is greater than the new upper
bound, if the control is limited the value will be automatically
adjusted to this maximum value; if not limited, the value in the
control will be colored as invalid.
If max > the max value allowed by the width of the control,
the function will return False, and the max will not be set. | Sets the maximum value of the control. If a value of None
is provided, then the control will have no explicit maximum value.
If the value specified is less than the current minimum value, then
the function returns False and the maximum will not change from its
current setting. On success, the function returns True. | [
"Sets",
"the",
"maximum",
"value",
"of",
"the",
"control",
".",
"If",
"a",
"value",
"of",
"None",
"is",
"provided",
"then",
"the",
"control",
"will",
"have",
"no",
"explicit",
"maximum",
"value",
".",
"If",
"the",
"value",
"specified",
"is",
"less",
"than",
"the",
"current",
"minimum",
"value",
"then",
"the",
"function",
"returns",
"False",
"and",
"the",
"maximum",
"will",
"not",
"change",
"from",
"its",
"current",
"setting",
".",
"On",
"success",
"the",
"function",
"returns",
"True",
"."
] | def SetMax(self, max=None):
"""
Sets the maximum value of the control. If a value of None
is provided, then the control will have no explicit maximum value.
If the value specified is less than the current minimum value, then
the function returns False and the maximum will not change from its
current setting. On success, the function returns True.
If successful and the current value is greater than the new upper
bound, if the control is limited the value will be automatically
adjusted to this maximum value; if not limited, the value in the
control will be colored as invalid.
If max > the max value allowed by the width of the control,
the function will return False, and the max will not be set.
"""
if( self._min is None
or max is None
or (self._min is not None and self._min <= max) ):
try:
self.SetParameters(max=max)
bRet = True
except ValueError:
bRet = False
else:
bRet = False
return bRet | [
"def",
"SetMax",
"(",
"self",
",",
"max",
"=",
"None",
")",
":",
"if",
"(",
"self",
".",
"_min",
"is",
"None",
"or",
"max",
"is",
"None",
"or",
"(",
"self",
".",
"_min",
"is",
"not",
"None",
"and",
"self",
".",
"_min",
"<=",
"max",
")",
")",
":",
"try",
":",
"self",
".",
"SetParameters",
"(",
"max",
"=",
"max",
")",
"bRet",
"=",
"True",
"except",
"ValueError",
":",
"bRet",
"=",
"False",
"else",
":",
"bRet",
"=",
"False",
"return",
"bRet"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/masked/numctrl.py#L1350-L1377 |
|
HackWebRTC/webrtc | 7abfc990c00ab35090fff285fcf635d1d7892433 | rtc_tools/compare_videos.py | python | _RunFrameAnalyzer | (options, yuv_directory=None) | return frame_analyzer.returncode | Run frame analyzer to compare the videos and print output. | Run frame analyzer to compare the videos and print output. | [
"Run",
"frame",
"analyzer",
"to",
"compare",
"the",
"videos",
"and",
"print",
"output",
"."
] | def _RunFrameAnalyzer(options, yuv_directory=None):
"""Run frame analyzer to compare the videos and print output."""
cmd = [
options.frame_analyzer,
'--label=%s' % options.label,
'--reference_file=%s' % options.ref_video,
'--test_file=%s' % options.test_video,
'--width=%d' % options.yuv_frame_width,
'--height=%d' % options.yuv_frame_height,
]
if options.chartjson_result_file:
cmd.append('--chartjson_result_file=%s' % options.chartjson_result_file)
if options.aligned_output_file:
cmd.append('--aligned_output_file=%s' % options.aligned_output_file)
if yuv_directory:
cmd.append('--yuv_directory=%s' % yuv_directory)
frame_analyzer = subprocess.Popen(cmd, stdin=_DevNull(),
stdout=sys.stdout, stderr=sys.stderr)
frame_analyzer.wait()
if frame_analyzer.returncode != 0:
print('Failed to run frame analyzer.')
return frame_analyzer.returncode | [
"def",
"_RunFrameAnalyzer",
"(",
"options",
",",
"yuv_directory",
"=",
"None",
")",
":",
"cmd",
"=",
"[",
"options",
".",
"frame_analyzer",
",",
"'--label=%s'",
"%",
"options",
".",
"label",
",",
"'--reference_file=%s'",
"%",
"options",
".",
"ref_video",
",",
"'--test_file=%s'",
"%",
"options",
".",
"test_video",
",",
"'--width=%d'",
"%",
"options",
".",
"yuv_frame_width",
",",
"'--height=%d'",
"%",
"options",
".",
"yuv_frame_height",
",",
"]",
"if",
"options",
".",
"chartjson_result_file",
":",
"cmd",
".",
"append",
"(",
"'--chartjson_result_file=%s'",
"%",
"options",
".",
"chartjson_result_file",
")",
"if",
"options",
".",
"aligned_output_file",
":",
"cmd",
".",
"append",
"(",
"'--aligned_output_file=%s'",
"%",
"options",
".",
"aligned_output_file",
")",
"if",
"yuv_directory",
":",
"cmd",
".",
"append",
"(",
"'--yuv_directory=%s'",
"%",
"yuv_directory",
")",
"frame_analyzer",
"=",
"subprocess",
".",
"Popen",
"(",
"cmd",
",",
"stdin",
"=",
"_DevNull",
"(",
")",
",",
"stdout",
"=",
"sys",
".",
"stdout",
",",
"stderr",
"=",
"sys",
".",
"stderr",
")",
"frame_analyzer",
".",
"wait",
"(",
")",
"if",
"frame_analyzer",
".",
"returncode",
"!=",
"0",
":",
"print",
"(",
"'Failed to run frame analyzer.'",
")",
"return",
"frame_analyzer",
".",
"returncode"
] | https://github.com/HackWebRTC/webrtc/blob/7abfc990c00ab35090fff285fcf635d1d7892433/rtc_tools/compare_videos.py#L88-L109 |
|
mingchen/protobuf-ios | 0958df34558cd54cb7b6e6ca5c8855bf3d475046 | compiler/python/google/protobuf/service.py | python | RpcController.StartCancel | (self) | Initiate cancellation.
Advises the RPC system that the caller desires that the RPC call be
canceled. The RPC system may cancel it immediately, may wait awhile and
then cancel it, or may not even cancel the call at all. If the call is
canceled, the "done" callback will still be called and the RpcController
will indicate that the call failed at that time. | Initiate cancellation. | [
"Initiate",
"cancellation",
"."
] | def StartCancel(self):
"""Initiate cancellation.
Advises the RPC system that the caller desires that the RPC call be
canceled. The RPC system may cancel it immediately, may wait awhile and
then cancel it, or may not even cancel the call at all. If the call is
canceled, the "done" callback will still be called and the RpcController
will indicate that the call failed at that time.
"""
raise NotImplementedError | [
"def",
"StartCancel",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] | https://github.com/mingchen/protobuf-ios/blob/0958df34558cd54cb7b6e6ca5c8855bf3d475046/compiler/python/google/protobuf/service.py#L150-L159 |
||
Cantera/cantera | 0119484b261967ccb55a0066c020599cacc312e4 | site_scons/buildutils.py | python | Option._build_description | (self, backticks: bool = True, indent: int = 3) | return f"{'':<{indent}}{description}\n" | Assemble description block (help text) | Assemble description block (help text) | [
"Assemble",
"description",
"block",
"(",
"help",
"text",
")"
] | def _build_description(self, backticks: bool = True, indent: int = 3) -> str:
"""Assemble description block (help text)"""
if not backticks:
# Help text, wrapped and indented
self.set_wrapper_indent(indent)
out = self.wrapper.wrap(re.sub(r"\s+", " ", self.description))
return "\n".join(out) + "\n"
# assemble description
linebreak = "\n" + " " * indent
description = linebreak.join(self.description.split("\n"))
pat = r'"([a-zA-Z0-9\-\+\*$_.,: =/\'\\]+)"'
double_quoted = []
for item in re.findall(pat, description):
# enclose double-quoted strings in '``'
found = f'"{item}"'
double_quoted += [found]
replacement = f"``{found}``"
description = description.replace(found, replacement)
pat = r"\'([a-zA-Z0-9\-\+\*$_.,:=/\\]+)\'"
for item in re.findall(pat, description):
# replace "'" for single-quoted words by '``'; do not replace "'" when
# whitespace is enclosed or if word is part of double-quoted string
if any([item in dq for dq in double_quoted]):
continue
found = f"'{item}'"
replacement = found.replace("'", "``")
description = description.replace(found, replacement)
pat = r"\*([^\*]+)"
asterisks = re.findall(pat, description)
if len(asterisks) == 1:
# catch unbalanced '*', for example in '*nix'
found = f"*{asterisks[0]}"
replacement = f"\{found}"
description = description.replace(found, replacement)
return f"{'':<{indent}}{description}\n" | [
"def",
"_build_description",
"(",
"self",
",",
"backticks",
":",
"bool",
"=",
"True",
",",
"indent",
":",
"int",
"=",
"3",
")",
"->",
"str",
":",
"if",
"not",
"backticks",
":",
"# Help text, wrapped and indented",
"self",
".",
"set_wrapper_indent",
"(",
"indent",
")",
"out",
"=",
"self",
".",
"wrapper",
".",
"wrap",
"(",
"re",
".",
"sub",
"(",
"r\"\\s+\"",
",",
"\" \"",
",",
"self",
".",
"description",
")",
")",
"return",
"\"\\n\"",
".",
"join",
"(",
"out",
")",
"+",
"\"\\n\"",
"# assemble description",
"linebreak",
"=",
"\"\\n\"",
"+",
"\" \"",
"*",
"indent",
"description",
"=",
"linebreak",
".",
"join",
"(",
"self",
".",
"description",
".",
"split",
"(",
"\"\\n\"",
")",
")",
"pat",
"=",
"r'\"([a-zA-Z0-9\\-\\+\\*$_.,: =/\\'\\\\]+)\"'",
"double_quoted",
"=",
"[",
"]",
"for",
"item",
"in",
"re",
".",
"findall",
"(",
"pat",
",",
"description",
")",
":",
"# enclose double-quoted strings in '``'",
"found",
"=",
"f'\"{item}\"'",
"double_quoted",
"+=",
"[",
"found",
"]",
"replacement",
"=",
"f\"``{found}``\"",
"description",
"=",
"description",
".",
"replace",
"(",
"found",
",",
"replacement",
")",
"pat",
"=",
"r\"\\'([a-zA-Z0-9\\-\\+\\*$_.,:=/\\\\]+)\\'\"",
"for",
"item",
"in",
"re",
".",
"findall",
"(",
"pat",
",",
"description",
")",
":",
"# replace \"'\" for single-quoted words by '``'; do not replace \"'\" when",
"# whitespace is enclosed or if word is part of double-quoted string",
"if",
"any",
"(",
"[",
"item",
"in",
"dq",
"for",
"dq",
"in",
"double_quoted",
"]",
")",
":",
"continue",
"found",
"=",
"f\"'{item}'\"",
"replacement",
"=",
"found",
".",
"replace",
"(",
"\"'\"",
",",
"\"``\"",
")",
"description",
"=",
"description",
".",
"replace",
"(",
"found",
",",
"replacement",
")",
"pat",
"=",
"r\"\\*([^\\*]+)\"",
"asterisks",
"=",
"re",
".",
"findall",
"(",
"pat",
",",
"description",
")",
"if",
"len",
"(",
"asterisks",
")",
"==",
"1",
":",
"# catch unbalanced '*', for example in '*nix'",
"found",
"=",
"f\"*{asterisks[0]}\"",
"replacement",
"=",
"f\"\\{found}\"",
"description",
"=",
"description",
".",
"replace",
"(",
"found",
",",
"replacement",
")",
"return",
"f\"{'':<{indent}}{description}\\n\""
] | https://github.com/Cantera/cantera/blob/0119484b261967ccb55a0066c020599cacc312e4/site_scons/buildutils.py#L141-L178 |
|
google/or-tools | 2cb85b4eead4c38e1c54b48044f92087cf165bce | examples/python/cvrptw_plot.py | python | Customers.return_dem_callback | (self) | return dem_return | Return a callback function that gives the demands.
Returns:
function: dem_return(a) A function that takes the 'from' node
index and returns the distance in km. | Return a callback function that gives the demands. | [
"Return",
"a",
"callback",
"function",
"that",
"gives",
"the",
"demands",
"."
] | def return_dem_callback(self):
"""
Return a callback function that gives the demands.
Returns:
function: dem_return(a) A function that takes the 'from' node
index and returns the distance in km.
"""
def dem_return(from_index):
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = self.manager.IndexToNode(from_index)
return (self.customers[from_node].demand)
return dem_return | [
"def",
"return_dem_callback",
"(",
"self",
")",
":",
"def",
"dem_return",
"(",
"from_index",
")",
":",
"# Convert from routing variable Index to distance matrix NodeIndex.",
"from_node",
"=",
"self",
".",
"manager",
".",
"IndexToNode",
"(",
"from_index",
")",
"return",
"(",
"self",
".",
"customers",
"[",
"from_node",
"]",
".",
"demand",
")",
"return",
"dem_return"
] | https://github.com/google/or-tools/blob/2cb85b4eead4c38e1c54b48044f92087cf165bce/examples/python/cvrptw_plot.py#L285-L299 |
|
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | third_party/jinja2/loaders.py | python | BaseLoader.load | (self, environment, name, globals=None) | return environment.template_class.from_code(environment, code,
globals, uptodate) | Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly. | Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly. | [
"Loads",
"a",
"template",
".",
"This",
"method",
"looks",
"up",
"the",
"template",
"in",
"the",
"cache",
"or",
"loads",
"one",
"by",
"calling",
":",
"meth",
":",
"get_source",
".",
"Subclasses",
"should",
"not",
"override",
"this",
"method",
"as",
"loaders",
"working",
"on",
"collections",
"of",
"other",
"loaders",
"(",
"such",
"as",
":",
"class",
":",
"PrefixLoader",
"or",
":",
"class",
":",
"ChoiceLoader",
")",
"will",
"not",
"call",
"this",
"method",
"but",
"get_source",
"directly",
"."
] | def load(self, environment, name, globals=None):
"""Loads a template. This method looks up the template in the cache
or loads one by calling :meth:`get_source`. Subclasses should not
override this method as loaders working on collections of other
loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`)
will not call this method but `get_source` directly.
"""
code = None
if globals is None:
globals = {}
# first we try to get the source for this template together
# with the filename and the uptodate function.
source, filename, uptodate = self.get_source(environment, name)
# try to load the code from the bytecode cache if there is a
# bytecode cache configured.
bcc = environment.bytecode_cache
if bcc is not None:
bucket = bcc.get_bucket(environment, name, filename, source)
code = bucket.code
# if we don't have code so far (not cached, no longer up to
# date) etc. we compile the template
if code is None:
code = environment.compile(source, name, filename)
# if the bytecode cache is available and the bucket doesn't
# have a code so far, we give the bucket the new code and put
# it back to the bytecode cache.
if bcc is not None and bucket.code is None:
bucket.code = code
bcc.set_bucket(bucket)
return environment.template_class.from_code(environment, code,
globals, uptodate) | [
"def",
"load",
"(",
"self",
",",
"environment",
",",
"name",
",",
"globals",
"=",
"None",
")",
":",
"code",
"=",
"None",
"if",
"globals",
"is",
"None",
":",
"globals",
"=",
"{",
"}",
"# first we try to get the source for this template together",
"# with the filename and the uptodate function.",
"source",
",",
"filename",
",",
"uptodate",
"=",
"self",
".",
"get_source",
"(",
"environment",
",",
"name",
")",
"# try to load the code from the bytecode cache if there is a",
"# bytecode cache configured.",
"bcc",
"=",
"environment",
".",
"bytecode_cache",
"if",
"bcc",
"is",
"not",
"None",
":",
"bucket",
"=",
"bcc",
".",
"get_bucket",
"(",
"environment",
",",
"name",
",",
"filename",
",",
"source",
")",
"code",
"=",
"bucket",
".",
"code",
"# if we don't have code so far (not cached, no longer up to",
"# date) etc. we compile the template",
"if",
"code",
"is",
"None",
":",
"code",
"=",
"environment",
".",
"compile",
"(",
"source",
",",
"name",
",",
"filename",
")",
"# if the bytecode cache is available and the bucket doesn't",
"# have a code so far, we give the bucket the new code and put",
"# it back to the bytecode cache.",
"if",
"bcc",
"is",
"not",
"None",
"and",
"bucket",
".",
"code",
"is",
"None",
":",
"bucket",
".",
"code",
"=",
"code",
"bcc",
".",
"set_bucket",
"(",
"bucket",
")",
"return",
"environment",
".",
"template_class",
".",
"from_code",
"(",
"environment",
",",
"code",
",",
"globals",
",",
"uptodate",
")"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/jinja2/loaders.py#L100-L135 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/stats/_multivariate.py | python | ortho_group_gen.rvs | (self, dim, size=1, random_state=None) | return H | Draw random samples from O(N).
Parameters
----------
dim : integer
Dimension of rotation space (N).
size : integer, optional
Number of samples to draw (default 1).
Returns
-------
rvs : ndarray or scalar
Random size N-dimensional matrices, dimension (size, dim, dim) | Draw random samples from O(N). | [
"Draw",
"random",
"samples",
"from",
"O",
"(",
"N",
")",
"."
] | def rvs(self, dim, size=1, random_state=None):
"""
Draw random samples from O(N).
Parameters
----------
dim : integer
Dimension of rotation space (N).
size : integer, optional
Number of samples to draw (default 1).
Returns
-------
rvs : ndarray or scalar
Random size N-dimensional matrices, dimension (size, dim, dim)
"""
random_state = self._get_random_state(random_state)
size = int(size)
if size > 1:
return np.array([self.rvs(dim, size=1, random_state=random_state)
for i in range(size)])
dim = self._process_parameters(dim)
H = np.eye(dim)
for n in range(dim):
x = random_state.normal(size=(dim-n,))
# random sign, 50/50, but chosen carefully to avoid roundoff error
D = np.sign(x[0]) if x[0] != 0 else 1
x[0] += D*np.sqrt((x*x).sum())
# Householder transformation
Hx = -D*(np.eye(dim-n) - 2.*np.outer(x, x)/(x*x).sum())
mat = np.eye(dim)
mat[n:, n:] = Hx
H = np.dot(H, mat)
return H | [
"def",
"rvs",
"(",
"self",
",",
"dim",
",",
"size",
"=",
"1",
",",
"random_state",
"=",
"None",
")",
":",
"random_state",
"=",
"self",
".",
"_get_random_state",
"(",
"random_state",
")",
"size",
"=",
"int",
"(",
"size",
")",
"if",
"size",
">",
"1",
":",
"return",
"np",
".",
"array",
"(",
"[",
"self",
".",
"rvs",
"(",
"dim",
",",
"size",
"=",
"1",
",",
"random_state",
"=",
"random_state",
")",
"for",
"i",
"in",
"range",
"(",
"size",
")",
"]",
")",
"dim",
"=",
"self",
".",
"_process_parameters",
"(",
"dim",
")",
"H",
"=",
"np",
".",
"eye",
"(",
"dim",
")",
"for",
"n",
"in",
"range",
"(",
"dim",
")",
":",
"x",
"=",
"random_state",
".",
"normal",
"(",
"size",
"=",
"(",
"dim",
"-",
"n",
",",
")",
")",
"# random sign, 50/50, but chosen carefully to avoid roundoff error",
"D",
"=",
"np",
".",
"sign",
"(",
"x",
"[",
"0",
"]",
")",
"if",
"x",
"[",
"0",
"]",
"!=",
"0",
"else",
"1",
"x",
"[",
"0",
"]",
"+=",
"D",
"*",
"np",
".",
"sqrt",
"(",
"(",
"x",
"*",
"x",
")",
".",
"sum",
"(",
")",
")",
"# Householder transformation",
"Hx",
"=",
"-",
"D",
"*",
"(",
"np",
".",
"eye",
"(",
"dim",
"-",
"n",
")",
"-",
"2.",
"*",
"np",
".",
"outer",
"(",
"x",
",",
"x",
")",
"/",
"(",
"x",
"*",
"x",
")",
".",
"sum",
"(",
")",
")",
"mat",
"=",
"np",
".",
"eye",
"(",
"dim",
")",
"mat",
"[",
"n",
":",
",",
"n",
":",
"]",
"=",
"Hx",
"H",
"=",
"np",
".",
"dot",
"(",
"H",
",",
"mat",
")",
"return",
"H"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/stats/_multivariate.py#L3501-L3538 |
|
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/external/bazel_tools/third_party/py/gflags/__init__.py | python | FlagValues.__GetFlagFileLines | (self, filename, parsed_file_list) | return flag_line_list | Returns the useful (!=comments, etc) lines from a file with flags.
Args:
filename: A string, the name of the flag file.
parsed_file_list: A list of the names of the files we have
already read. MUTATED BY THIS FUNCTION.
Returns:
List of strings. See the note below.
NOTE(springer): This function checks for a nested --flagfile=<foo>
tag and handles the lower file recursively. It returns a list of
all the lines that _could_ contain command flags. This is
EVERYTHING except whitespace lines and comments (lines starting
with '#' or '//'). | Returns the useful (!=comments, etc) lines from a file with flags. | [
"Returns",
"the",
"useful",
"(",
"!",
"=",
"comments",
"etc",
")",
"lines",
"from",
"a",
"file",
"with",
"flags",
"."
] | def __GetFlagFileLines(self, filename, parsed_file_list):
"""Returns the useful (!=comments, etc) lines from a file with flags.
Args:
filename: A string, the name of the flag file.
parsed_file_list: A list of the names of the files we have
already read. MUTATED BY THIS FUNCTION.
Returns:
List of strings. See the note below.
NOTE(springer): This function checks for a nested --flagfile=<foo>
tag and handles the lower file recursively. It returns a list of
all the lines that _could_ contain command flags. This is
EVERYTHING except whitespace lines and comments (lines starting
with '#' or '//').
"""
line_list = [] # All line from flagfile.
flag_line_list = [] # Subset of lines w/o comments, blanks, flagfile= tags.
try:
file_obj = open(filename, 'r')
except IOError, e_msg:
raise CantOpenFlagFileError('ERROR:: Unable to open flagfile: %s' % e_msg)
line_list = file_obj.readlines()
file_obj.close()
parsed_file_list.append(filename)
# This is where we check each line in the file we just read.
for line in line_list:
if line.isspace():
pass
# Checks for comment (a line that starts with '#').
elif line.startswith('#') or line.startswith('//'):
pass
# Checks for a nested "--flagfile=<bar>" flag in the current file.
# If we find one, recursively parse down into that file.
elif self.__IsFlagFileDirective(line):
sub_filename = self.ExtractFilename(line)
# We do a little safety check for reparsing a file we've already done.
if not sub_filename in parsed_file_list:
included_flags = self.__GetFlagFileLines(sub_filename,
parsed_file_list)
flag_line_list.extend(included_flags)
else: # Case of hitting a circularly included file.
sys.stderr.write('Warning: Hit circular flagfile dependency: %s\n' %
(sub_filename,))
else:
# Any line that's not a comment or a nested flagfile should get
# copied into 2nd position. This leaves earlier arguments
# further back in the list, thus giving them higher priority.
flag_line_list.append(line.strip())
return flag_line_list | [
"def",
"__GetFlagFileLines",
"(",
"self",
",",
"filename",
",",
"parsed_file_list",
")",
":",
"line_list",
"=",
"[",
"]",
"# All line from flagfile.",
"flag_line_list",
"=",
"[",
"]",
"# Subset of lines w/o comments, blanks, flagfile= tags.",
"try",
":",
"file_obj",
"=",
"open",
"(",
"filename",
",",
"'r'",
")",
"except",
"IOError",
",",
"e_msg",
":",
"raise",
"CantOpenFlagFileError",
"(",
"'ERROR:: Unable to open flagfile: %s'",
"%",
"e_msg",
")",
"line_list",
"=",
"file_obj",
".",
"readlines",
"(",
")",
"file_obj",
".",
"close",
"(",
")",
"parsed_file_list",
".",
"append",
"(",
"filename",
")",
"# This is where we check each line in the file we just read.",
"for",
"line",
"in",
"line_list",
":",
"if",
"line",
".",
"isspace",
"(",
")",
":",
"pass",
"# Checks for comment (a line that starts with '#').",
"elif",
"line",
".",
"startswith",
"(",
"'#'",
")",
"or",
"line",
".",
"startswith",
"(",
"'//'",
")",
":",
"pass",
"# Checks for a nested \"--flagfile=<bar>\" flag in the current file.",
"# If we find one, recursively parse down into that file.",
"elif",
"self",
".",
"__IsFlagFileDirective",
"(",
"line",
")",
":",
"sub_filename",
"=",
"self",
".",
"ExtractFilename",
"(",
"line",
")",
"# We do a little safety check for reparsing a file we've already done.",
"if",
"not",
"sub_filename",
"in",
"parsed_file_list",
":",
"included_flags",
"=",
"self",
".",
"__GetFlagFileLines",
"(",
"sub_filename",
",",
"parsed_file_list",
")",
"flag_line_list",
".",
"extend",
"(",
"included_flags",
")",
"else",
":",
"# Case of hitting a circularly included file.",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Warning: Hit circular flagfile dependency: %s\\n'",
"%",
"(",
"sub_filename",
",",
")",
")",
"else",
":",
"# Any line that's not a comment or a nested flagfile should get",
"# copied into 2nd position. This leaves earlier arguments",
"# further back in the list, thus giving them higher priority.",
"flag_line_list",
".",
"append",
"(",
"line",
".",
"strip",
"(",
")",
")",
"return",
"flag_line_list"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/external/bazel_tools/third_party/py/gflags/__init__.py#L1552-L1604 |
|
sdhash/sdhash | b9eff63e4e5867e910f41fd69032bbb1c94a2a5e | sdhash-ui/cherrypy/wsgiserver/wsgiserver3.py | python | WSGIGateway.get_environ | (self) | Return a new environ dict targeting the given wsgi.version | Return a new environ dict targeting the given wsgi.version | [
"Return",
"a",
"new",
"environ",
"dict",
"targeting",
"the",
"given",
"wsgi",
".",
"version"
] | def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
raise NotImplemented | [
"def",
"get_environ",
"(",
"self",
")",
":",
"raise",
"NotImplemented"
] | https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/sdhash-ui/cherrypy/wsgiserver/wsgiserver3.py#L1822-L1824 |
||
davidstutz/mesh-voxelization | 81a237c3b345062e364b180a8a4fc7ac98e107a4 | examples/fill_occupancy.py | python | read_hdf5 | (file, key = 'tensor') | return tensor | Read a tensor, i.e. numpy array, from HDF5.
:param file: path to file to read
:type file: str
:param key: key to read
:type key: str
:return: tensor
:rtype: numpy.ndarray | Read a tensor, i.e. numpy array, from HDF5. | [
"Read",
"a",
"tensor",
"i",
".",
"e",
".",
"numpy",
"array",
"from",
"HDF5",
"."
] | def read_hdf5(file, key = 'tensor'):
"""
Read a tensor, i.e. numpy array, from HDF5.
:param file: path to file to read
:type file: str
:param key: key to read
:type key: str
:return: tensor
:rtype: numpy.ndarray
"""
assert os.path.exists(file), 'file %s not found' % file
h5f = h5py.File(file, 'r')
assert key in h5f.keys(), 'key %s not found in file %s' % (key, file)
tensor = h5f[key][()]
h5f.close()
return tensor | [
"def",
"read_hdf5",
"(",
"file",
",",
"key",
"=",
"'tensor'",
")",
":",
"assert",
"os",
".",
"path",
".",
"exists",
"(",
"file",
")",
",",
"'file %s not found'",
"%",
"file",
"h5f",
"=",
"h5py",
".",
"File",
"(",
"file",
",",
"'r'",
")",
"assert",
"key",
"in",
"h5f",
".",
"keys",
"(",
")",
",",
"'key %s not found in file %s'",
"%",
"(",
"key",
",",
"file",
")",
"tensor",
"=",
"h5f",
"[",
"key",
"]",
"[",
"(",
")",
"]",
"h5f",
".",
"close",
"(",
")",
"return",
"tensor"
] | https://github.com/davidstutz/mesh-voxelization/blob/81a237c3b345062e364b180a8a4fc7ac98e107a4/examples/fill_occupancy.py#L34-L54 |
|
baidu/tera | dbcd28af792d879d961bf9fc7eb60de81b437646 | src/sdk/python/TeraSdk.py | python | ScanDescriptor.SetPackInterval | (self, interval) | 设置scan操作的超时时长,单位ms
服务端在scan操作达到约 interval 毫秒后尽快返回给client结果
Args:
iinterval(long): 一次scan的超时时长,单位ms | 设置scan操作的超时时长,单位ms
服务端在scan操作达到约 interval 毫秒后尽快返回给client结果 | [
"设置scan操作的超时时长,单位ms",
"服务端在scan操作达到约",
"interval",
"毫秒后尽快返回给client结果"
] | def SetPackInterval(self, interval):
"""
设置scan操作的超时时长,单位ms
服务端在scan操作达到约 interval 毫秒后尽快返回给client结果
Args:
iinterval(long): 一次scan的超时时长,单位ms
"""
lib.tera_scan_descriptor_set_pack_interval(self.desc, interval) | [
"def",
"SetPackInterval",
"(",
"self",
",",
"interval",
")",
":",
"lib",
".",
"tera_scan_descriptor_set_pack_interval",
"(",
"self",
".",
"desc",
",",
"interval",
")"
] | https://github.com/baidu/tera/blob/dbcd28af792d879d961bf9fc7eb60de81b437646/src/sdk/python/TeraSdk.py#L110-L118 |
||
cyberbotics/webots | af7fa7d68dcf7b4550f1f2e132092b41e83698fc | projects/default/controllers/sumo_supervisor/SumoSupervisor.py | python | SumoSupervisor.update_vehicles_position_and_velocity | (self, step, rotateWheels) | Update the actual position (using angular and linear velocities) of all the vehicles in Webots. | Update the actual position (using angular and linear velocities) of all the vehicles in Webots. | [
"Update",
"the",
"actual",
"position",
"(",
"using",
"angular",
"and",
"linear",
"velocities",
")",
"of",
"all",
"the",
"vehicles",
"in",
"Webots",
"."
] | def update_vehicles_position_and_velocity(self, step, rotateWheels):
"""Update the actual position (using angular and linear velocities) of all the vehicles in Webots."""
for i in range(0, self.vehicleNumber):
if self.vehicles[i].inUse:
self.vehicles[i].translation.setSFVec3f(self.vehicles[i].currentPos)
self.vehicles[i].rotation.setSFRotation(self.vehicles[i].currentRot)
velocity = []
velocity.append(self.vehicles[i].targetPos[0] - self.vehicles[i].currentPos[0])
velocity.append(self.vehicles[i].targetPos[1] - self.vehicles[i].currentPos[1])
velocity.append(self.vehicles[i].targetPos[2] - self.vehicles[i].currentPos[2])
for j in range(0, 3):
diffAngle = self.vehicles[i].currentAngles[j] - self.vehicles[i].targetAngles[j]
diffAngle = (diffAngle + 2*math.pi) % (2*math.pi)
if (diffAngle > math.pi):
diffAngle -= 2*math.pi
velocity.append(diffAngle)
velocity[:] = [1000 * x / step for x in velocity]
self.vehicles[i].node.setVelocity(velocity)
if rotateWheels:
angularVelocity = [0, self.vehicles[i].speed / self.vehicles[i].wheelRadius, 0]
for wheelAngularVelocity in self.vehicles[i].wheelsAngularVelocity:
wheelAngularVelocity.setSFVec3f(angularVelocity) | [
"def",
"update_vehicles_position_and_velocity",
"(",
"self",
",",
"step",
",",
"rotateWheels",
")",
":",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"self",
".",
"vehicleNumber",
")",
":",
"if",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"inUse",
":",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"translation",
".",
"setSFVec3f",
"(",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"currentPos",
")",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"rotation",
".",
"setSFRotation",
"(",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"currentRot",
")",
"velocity",
"=",
"[",
"]",
"velocity",
".",
"append",
"(",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"targetPos",
"[",
"0",
"]",
"-",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"currentPos",
"[",
"0",
"]",
")",
"velocity",
".",
"append",
"(",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"targetPos",
"[",
"1",
"]",
"-",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"currentPos",
"[",
"1",
"]",
")",
"velocity",
".",
"append",
"(",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"targetPos",
"[",
"2",
"]",
"-",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"currentPos",
"[",
"2",
"]",
")",
"for",
"j",
"in",
"range",
"(",
"0",
",",
"3",
")",
":",
"diffAngle",
"=",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"currentAngles",
"[",
"j",
"]",
"-",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"targetAngles",
"[",
"j",
"]",
"diffAngle",
"=",
"(",
"diffAngle",
"+",
"2",
"*",
"math",
".",
"pi",
")",
"%",
"(",
"2",
"*",
"math",
".",
"pi",
")",
"if",
"(",
"diffAngle",
">",
"math",
".",
"pi",
")",
":",
"diffAngle",
"-=",
"2",
"*",
"math",
".",
"pi",
"velocity",
".",
"append",
"(",
"diffAngle",
")",
"velocity",
"[",
":",
"]",
"=",
"[",
"1000",
"*",
"x",
"/",
"step",
"for",
"x",
"in",
"velocity",
"]",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"node",
".",
"setVelocity",
"(",
"velocity",
")",
"if",
"rotateWheels",
":",
"angularVelocity",
"=",
"[",
"0",
",",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"speed",
"/",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"wheelRadius",
",",
"0",
"]",
"for",
"wheelAngularVelocity",
"in",
"self",
".",
"vehicles",
"[",
"i",
"]",
".",
"wheelsAngularVelocity",
":",
"wheelAngularVelocity",
".",
"setSFVec3f",
"(",
"angularVelocity",
")"
] | https://github.com/cyberbotics/webots/blob/af7fa7d68dcf7b4550f1f2e132092b41e83698fc/projects/default/controllers/sumo_supervisor/SumoSupervisor.py#L324-L345 |
||
SoarGroup/Soar | a1c5e249499137a27da60533c72969eef3b8ab6b | scons/scons-local-4.1.0/SCons/Node/FS.py | python | FS.VariantDir | (self, variant_dir, src_dir, duplicate=1) | Link the supplied variant directory to the source directory
for purposes of building files. | Link the supplied variant directory to the source directory
for purposes of building files. | [
"Link",
"the",
"supplied",
"variant",
"directory",
"to",
"the",
"source",
"directory",
"for",
"purposes",
"of",
"building",
"files",
"."
] | def VariantDir(self, variant_dir, src_dir, duplicate=1):
"""Link the supplied variant directory to the source directory
for purposes of building files."""
if not isinstance(src_dir, SCons.Node.Node):
src_dir = self.Dir(src_dir)
if not isinstance(variant_dir, SCons.Node.Node):
variant_dir = self.Dir(variant_dir)
if src_dir.is_under(variant_dir):
raise SCons.Errors.UserError("Source directory cannot be under variant directory.")
if variant_dir.srcdir:
if variant_dir.srcdir == src_dir:
return # We already did this.
raise SCons.Errors.UserError("'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir))
variant_dir.link(src_dir, duplicate) | [
"def",
"VariantDir",
"(",
"self",
",",
"variant_dir",
",",
"src_dir",
",",
"duplicate",
"=",
"1",
")",
":",
"if",
"not",
"isinstance",
"(",
"src_dir",
",",
"SCons",
".",
"Node",
".",
"Node",
")",
":",
"src_dir",
"=",
"self",
".",
"Dir",
"(",
"src_dir",
")",
"if",
"not",
"isinstance",
"(",
"variant_dir",
",",
"SCons",
".",
"Node",
".",
"Node",
")",
":",
"variant_dir",
"=",
"self",
".",
"Dir",
"(",
"variant_dir",
")",
"if",
"src_dir",
".",
"is_under",
"(",
"variant_dir",
")",
":",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"Source directory cannot be under variant directory.\"",
")",
"if",
"variant_dir",
".",
"srcdir",
":",
"if",
"variant_dir",
".",
"srcdir",
"==",
"src_dir",
":",
"return",
"# We already did this.",
"raise",
"SCons",
".",
"Errors",
".",
"UserError",
"(",
"\"'%s' already has a source directory: '%s'.\"",
"%",
"(",
"variant_dir",
",",
"variant_dir",
".",
"srcdir",
")",
")",
"variant_dir",
".",
"link",
"(",
"src_dir",
",",
"duplicate",
")"
] | https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Node/FS.py#L1413-L1427 |
||
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/dataset/common.py | python | cluster_files_reader | (files_pattern,
trainer_count,
trainer_id,
loader=pickle.load) | return reader | Create a reader that yield element from the given files, select
a file set according trainer count and trainer_id
:param files_pattern: the files which generating by split(...)
:param trainer_count: total trainer count
:param trainer_id: the trainer rank id
:param loader: is a callable function that load object from file, this
function will be called as loader(f) and f is a file object.
Default is cPickle.load | Create a reader that yield element from the given files, select
a file set according trainer count and trainer_id | [
"Create",
"a",
"reader",
"that",
"yield",
"element",
"from",
"the",
"given",
"files",
"select",
"a",
"file",
"set",
"according",
"trainer",
"count",
"and",
"trainer_id"
] | def cluster_files_reader(files_pattern,
trainer_count,
trainer_id,
loader=pickle.load):
"""
Create a reader that yield element from the given files, select
a file set according trainer count and trainer_id
:param files_pattern: the files which generating by split(...)
:param trainer_count: total trainer count
:param trainer_id: the trainer rank id
:param loader: is a callable function that load object from file, this
function will be called as loader(f) and f is a file object.
Default is cPickle.load
"""
def reader():
if not callable(loader):
raise TypeError("loader should be callable.")
file_list = glob.glob(files_pattern)
file_list.sort()
my_file_list = []
for idx, fn in enumerate(file_list):
if idx % trainer_count == trainer_id:
print("append file: %s" % fn)
my_file_list.append(fn)
for fn in my_file_list:
with open(fn, "r") as f:
lines = loader(f)
for line in lines:
yield line
return reader | [
"def",
"cluster_files_reader",
"(",
"files_pattern",
",",
"trainer_count",
",",
"trainer_id",
",",
"loader",
"=",
"pickle",
".",
"load",
")",
":",
"def",
"reader",
"(",
")",
":",
"if",
"not",
"callable",
"(",
"loader",
")",
":",
"raise",
"TypeError",
"(",
"\"loader should be callable.\"",
")",
"file_list",
"=",
"glob",
".",
"glob",
"(",
"files_pattern",
")",
"file_list",
".",
"sort",
"(",
")",
"my_file_list",
"=",
"[",
"]",
"for",
"idx",
",",
"fn",
"in",
"enumerate",
"(",
"file_list",
")",
":",
"if",
"idx",
"%",
"trainer_count",
"==",
"trainer_id",
":",
"print",
"(",
"\"append file: %s\"",
"%",
"fn",
")",
"my_file_list",
".",
"append",
"(",
"fn",
")",
"for",
"fn",
"in",
"my_file_list",
":",
"with",
"open",
"(",
"fn",
",",
"\"r\"",
")",
"as",
"f",
":",
"lines",
"=",
"loader",
"(",
"f",
")",
"for",
"line",
"in",
"lines",
":",
"yield",
"line",
"return",
"reader"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/dataset/common.py#L167-L199 |
|
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/parallel_for/pfor.py | python | PFor.all_indices_partitioned | (self) | return self._all_indices_partitioned | all_indices_partitioned property.
Returns:
True if we are inside a control flow construct and not all pfor iterations
may be active. | all_indices_partitioned property. | [
"all_indices_partitioned",
"property",
"."
] | def all_indices_partitioned(self):
"""all_indices_partitioned property.
Returns:
True if we are inside a control flow construct and not all pfor iterations
may be active.
"""
return self._all_indices_partitioned | [
"def",
"all_indices_partitioned",
"(",
"self",
")",
":",
"return",
"self",
".",
"_all_indices_partitioned"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/parallel_for/pfor.py#L1446-L1453 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py | python | Listbox.get | (self, first, last=None) | Get list of items from FIRST to LAST (not included). | Get list of items from FIRST to LAST (not included). | [
"Get",
"list",
"of",
"items",
"from",
"FIRST",
"to",
"LAST",
"(",
"not",
"included",
")",
"."
] | def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first) | [
"def",
"get",
"(",
"self",
",",
"first",
",",
"last",
"=",
"None",
")",
":",
"if",
"last",
":",
"return",
"self",
".",
"tk",
".",
"splitlist",
"(",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'get'",
",",
"first",
",",
"last",
")",
")",
"else",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"'get'",
",",
"first",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py#L2566-L2572 |
||
carla-simulator/carla | 8854804f4d7748e14d937ec763a2912823a7e5f5 | PythonAPI/carla/agents/navigation/behavior_agent.py | python | BehaviorAgent.pedestrian_avoid_manager | (self, waypoint) | return walker_state, walker, distance | This module is in charge of warning in case of a collision
with any pedestrian.
:param location: current location of the agent
:param waypoint: current waypoint of the agent
:return vehicle_state: True if there is a walker nearby, False if not
:return vehicle: nearby walker
:return distance: distance to nearby walker | This module is in charge of warning in case of a collision
with any pedestrian. | [
"This",
"module",
"is",
"in",
"charge",
"of",
"warning",
"in",
"case",
"of",
"a",
"collision",
"with",
"any",
"pedestrian",
"."
] | def pedestrian_avoid_manager(self, waypoint):
"""
This module is in charge of warning in case of a collision
with any pedestrian.
:param location: current location of the agent
:param waypoint: current waypoint of the agent
:return vehicle_state: True if there is a walker nearby, False if not
:return vehicle: nearby walker
:return distance: distance to nearby walker
"""
walker_list = self._world.get_actors().filter("*walker.pedestrian*")
def dist(w): return w.get_location().distance(waypoint.transform.location)
walker_list = [w for w in walker_list if dist(w) < 10]
if self._direction == RoadOption.CHANGELANELEFT:
walker_state, walker, distance = self._vehicle_obstacle_detected(walker_list, max(
self._behavior.min_proximity_threshold, self._speed_limit / 2), up_angle_th=90, lane_offset=-1)
elif self._direction == RoadOption.CHANGELANERIGHT:
walker_state, walker, distance = self._vehicle_obstacle_detected(walker_list, max(
self._behavior.min_proximity_threshold, self._speed_limit / 2), up_angle_th=90, lane_offset=1)
else:
walker_state, walker, distance = self._vehicle_obstacle_detected(walker_list, max(
self._behavior.min_proximity_threshold, self._speed_limit / 3), up_angle_th=60)
return walker_state, walker, distance | [
"def",
"pedestrian_avoid_manager",
"(",
"self",
",",
"waypoint",
")",
":",
"walker_list",
"=",
"self",
".",
"_world",
".",
"get_actors",
"(",
")",
".",
"filter",
"(",
"\"*walker.pedestrian*\"",
")",
"def",
"dist",
"(",
"w",
")",
":",
"return",
"w",
".",
"get_location",
"(",
")",
".",
"distance",
"(",
"waypoint",
".",
"transform",
".",
"location",
")",
"walker_list",
"=",
"[",
"w",
"for",
"w",
"in",
"walker_list",
"if",
"dist",
"(",
"w",
")",
"<",
"10",
"]",
"if",
"self",
".",
"_direction",
"==",
"RoadOption",
".",
"CHANGELANELEFT",
":",
"walker_state",
",",
"walker",
",",
"distance",
"=",
"self",
".",
"_vehicle_obstacle_detected",
"(",
"walker_list",
",",
"max",
"(",
"self",
".",
"_behavior",
".",
"min_proximity_threshold",
",",
"self",
".",
"_speed_limit",
"/",
"2",
")",
",",
"up_angle_th",
"=",
"90",
",",
"lane_offset",
"=",
"-",
"1",
")",
"elif",
"self",
".",
"_direction",
"==",
"RoadOption",
".",
"CHANGELANERIGHT",
":",
"walker_state",
",",
"walker",
",",
"distance",
"=",
"self",
".",
"_vehicle_obstacle_detected",
"(",
"walker_list",
",",
"max",
"(",
"self",
".",
"_behavior",
".",
"min_proximity_threshold",
",",
"self",
".",
"_speed_limit",
"/",
"2",
")",
",",
"up_angle_th",
"=",
"90",
",",
"lane_offset",
"=",
"1",
")",
"else",
":",
"walker_state",
",",
"walker",
",",
"distance",
"=",
"self",
".",
"_vehicle_obstacle_detected",
"(",
"walker_list",
",",
"max",
"(",
"self",
".",
"_behavior",
".",
"min_proximity_threshold",
",",
"self",
".",
"_speed_limit",
"/",
"3",
")",
",",
"up_angle_th",
"=",
"60",
")",
"return",
"walker_state",
",",
"walker",
",",
"distance"
] | https://github.com/carla-simulator/carla/blob/8854804f4d7748e14d937ec763a2912823a7e5f5/PythonAPI/carla/agents/navigation/behavior_agent.py#L169-L195 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/pyparsing.py | python | ParseBaseException.__getattr__ | (self, aname) | supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text | supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text | [
"supported",
"attributes",
"by",
"name",
"are",
":",
"-",
"lineno",
"-",
"returns",
"the",
"line",
"number",
"of",
"the",
"exception",
"text",
"-",
"col",
"-",
"returns",
"the",
"column",
"number",
"of",
"the",
"exception",
"text",
"-",
"line",
"-",
"returns",
"the",
"line",
"containing",
"the",
"exception",
"text"
] | def __getattr__(self, aname):
"""supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
"""
if aname == "lineno":
return lineno(self.loc, self.pstr)
elif aname in ("col", "column"):
return col(self.loc, self.pstr)
elif aname == "line":
return line(self.loc, self.pstr)
else:
raise AttributeError(aname) | [
"def",
"__getattr__",
"(",
"self",
",",
"aname",
")",
":",
"if",
"aname",
"==",
"\"lineno\"",
":",
"return",
"lineno",
"(",
"self",
".",
"loc",
",",
"self",
".",
"pstr",
")",
"elif",
"aname",
"in",
"(",
"\"col\"",
",",
"\"column\"",
")",
":",
"return",
"col",
"(",
"self",
".",
"loc",
",",
"self",
".",
"pstr",
")",
"elif",
"aname",
"==",
"\"line\"",
":",
"return",
"line",
"(",
"self",
".",
"loc",
",",
"self",
".",
"pstr",
")",
"else",
":",
"raise",
"AttributeError",
"(",
"aname",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/pyparsing.py#L323-L336 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/path.py/path.py | python | FastPath.__prepare | (self, pattern, normcase=None) | return pattern, normcase | Prepares a fmatch_pattern for use with ``FastPath.__fnmatch`.
`pattern` - A filename pattern with wildcards,
for example ``'*.py'``. If the pattern contains a `normcase`
attribute, it is applied to the name and path prior to comparison.
`normcase` - (optional) A function used to normalize the pattern and
filename before matching. Defaults to :meth:`self.module`, which defaults
to :meth:`os.path.normcase`.
.. seealso:: :func:`FastPath.__fnmatch` | Prepares a fmatch_pattern for use with ``FastPath.__fnmatch`.
`pattern` - A filename pattern with wildcards,
for example ``'*.py'``. If the pattern contains a `normcase`
attribute, it is applied to the name and path prior to comparison.
`normcase` - (optional) A function used to normalize the pattern and
filename before matching. Defaults to :meth:`self.module`, which defaults
to :meth:`os.path.normcase`.
.. seealso:: :func:`FastPath.__fnmatch` | [
"Prepares",
"a",
"fmatch_pattern",
"for",
"use",
"with",
"FastPath",
".",
"__fnmatch",
".",
"pattern",
"-",
"A",
"filename",
"pattern",
"with",
"wildcards",
"for",
"example",
"*",
".",
"py",
".",
"If",
"the",
"pattern",
"contains",
"a",
"normcase",
"attribute",
"it",
"is",
"applied",
"to",
"the",
"name",
"and",
"path",
"prior",
"to",
"comparison",
".",
"normcase",
"-",
"(",
"optional",
")",
"A",
"function",
"used",
"to",
"normalize",
"the",
"pattern",
"and",
"filename",
"before",
"matching",
".",
"Defaults",
"to",
":",
"meth",
":",
"self",
".",
"module",
"which",
"defaults",
"to",
":",
"meth",
":",
"os",
".",
"path",
".",
"normcase",
".",
"..",
"seealso",
"::",
":",
"func",
":",
"FastPath",
".",
"__fnmatch"
] | def __prepare(self, pattern, normcase=None):
""" Prepares a fmatch_pattern for use with ``FastPath.__fnmatch`.
`pattern` - A filename pattern with wildcards,
for example ``'*.py'``. If the pattern contains a `normcase`
attribute, it is applied to the name and path prior to comparison.
`normcase` - (optional) A function used to normalize the pattern and
filename before matching. Defaults to :meth:`self.module`, which defaults
to :meth:`os.path.normcase`.
.. seealso:: :func:`FastPath.__fnmatch`
"""
if not normcase:
normcase = getattr(pattern, 'normcase', self.module.normcase)
pattern = normcase(pattern)
return pattern, normcase | [
"def",
"__prepare",
"(",
"self",
",",
"pattern",
",",
"normcase",
"=",
"None",
")",
":",
"if",
"not",
"normcase",
":",
"normcase",
"=",
"getattr",
"(",
"pattern",
",",
"'normcase'",
",",
"self",
".",
"module",
".",
"normcase",
")",
"pattern",
"=",
"normcase",
"(",
"pattern",
")",
"return",
"pattern",
",",
"normcase"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/path.py/path.py#L1881-L1894 |
|
deepmind/reverb | ef3c8f0be1b720a741d2dee335e15e44668c291a | reverb/structured_writer.py | python | infer_signature | (configs: Sequence[Config],
step_spec: reverb_types.SpecNest) | return tree.map_structure_with_path(_validate_and_convert_to_spec, *patterns) | Infers the table signature from the configs that generate its items.
Args:
configs: All the configs used to generate items for the table.
step_spec: A structured example of the step that will be appended to the
`StructuredWriter`.
Returns:
A nested structure of `TensorSpec` describing the trajectories of the table.
Raises:
ValueError: If no configs are provided.
ValueError: If configs doesn't produce trajectories of identical structure.
ValueError: If configs targets does not all target the same table.
ValueError: If configs produce trajectories with incompatible tensors (i.e.
tensors cannot be concatenated). | Infers the table signature from the configs that generate its items. | [
"Infers",
"the",
"table",
"signature",
"from",
"the",
"configs",
"that",
"generate",
"its",
"items",
"."
] | def infer_signature(configs: Sequence[Config],
step_spec: reverb_types.SpecNest) -> reverb_types.SpecNest:
"""Infers the table signature from the configs that generate its items.
Args:
configs: All the configs used to generate items for the table.
step_spec: A structured example of the step that will be appended to the
`StructuredWriter`.
Returns:
A nested structure of `TensorSpec` describing the trajectories of the table.
Raises:
ValueError: If no configs are provided.
ValueError: If configs doesn't produce trajectories of identical structure.
ValueError: If configs targets does not all target the same table.
ValueError: If configs produce trajectories with incompatible tensors (i.e.
tensors cannot be concatenated).
"""
if not configs:
raise ValueError('At least one config must be provided.')
if any(c.pattern_structure != configs[0].pattern_structure for c in configs):
raise ValueError(
'All configs must have exactly the same pattern_structure.')
if any(c.table != configs[0].table for c in configs):
raise ValueError(
f'All configs must target the same table but provided configs '
f'included {", ".join(sorted(set(c.table for c in configs)))}.')
flat_step_spec = tree.flatten(step_spec)
def _validate_and_convert_to_spec(path, *nodes):
# Check that all nodes share the same dtype.
dtypes = [flat_step_spec[node.flat_source_index].dtype for node in nodes]
if any(dtype != dtypes[0] for dtype in dtypes):
raise ValueError(
f'Configs produce trajectories with multiple dtypes at {path}. '
f'Got {dtypes}.')
# Create shapes for all nodes.
shapes = []
for node in nodes:
shape = list(flat_step_spec[node.flat_source_index].shape)
if node.HasField('start'):
length = (node.stop - node.start) // (node.step or 1)
shape = [length, *shape]
shapes.append(tensor_shape.TensorShape(shape))
# Check that all shapes are either completely identical or at least
# identical in all dimensions but the first.
if (any(shape.rank != shapes[0].rank for shape in shapes) or
(shapes[0].rank > 1 and
any(shape[1:] != shapes[0][1:] for shape in shapes))):
raise ValueError(
f'Configs produce trajectories with incompatible shapes at {path}. '
f'Got {shapes}.')
# Merge the shapes into a single shape. If the first dimension varies then
# we set the leading dimension as undefined.
if all(shape == shapes[0] for shape in shapes):
merged_shape = shapes[0]
else:
merged_shape = [None, *shapes[0][1:]]
return tensor_spec.TensorSpec(
shape=merged_shape,
dtype=dtypes[0],
name='/'.join(str(x) for x in path))
patterns = [unpack_pattern(config) for config in configs]
return tree.map_structure_with_path(_validate_and_convert_to_spec, *patterns) | [
"def",
"infer_signature",
"(",
"configs",
":",
"Sequence",
"[",
"Config",
"]",
",",
"step_spec",
":",
"reverb_types",
".",
"SpecNest",
")",
"->",
"reverb_types",
".",
"SpecNest",
":",
"if",
"not",
"configs",
":",
"raise",
"ValueError",
"(",
"'At least one config must be provided.'",
")",
"if",
"any",
"(",
"c",
".",
"pattern_structure",
"!=",
"configs",
"[",
"0",
"]",
".",
"pattern_structure",
"for",
"c",
"in",
"configs",
")",
":",
"raise",
"ValueError",
"(",
"'All configs must have exactly the same pattern_structure.'",
")",
"if",
"any",
"(",
"c",
".",
"table",
"!=",
"configs",
"[",
"0",
"]",
".",
"table",
"for",
"c",
"in",
"configs",
")",
":",
"raise",
"ValueError",
"(",
"f'All configs must target the same table but provided configs '",
"f'included {\", \".join(sorted(set(c.table for c in configs)))}.'",
")",
"flat_step_spec",
"=",
"tree",
".",
"flatten",
"(",
"step_spec",
")",
"def",
"_validate_and_convert_to_spec",
"(",
"path",
",",
"*",
"nodes",
")",
":",
"# Check that all nodes share the same dtype.",
"dtypes",
"=",
"[",
"flat_step_spec",
"[",
"node",
".",
"flat_source_index",
"]",
".",
"dtype",
"for",
"node",
"in",
"nodes",
"]",
"if",
"any",
"(",
"dtype",
"!=",
"dtypes",
"[",
"0",
"]",
"for",
"dtype",
"in",
"dtypes",
")",
":",
"raise",
"ValueError",
"(",
"f'Configs produce trajectories with multiple dtypes at {path}. '",
"f'Got {dtypes}.'",
")",
"# Create shapes for all nodes.",
"shapes",
"=",
"[",
"]",
"for",
"node",
"in",
"nodes",
":",
"shape",
"=",
"list",
"(",
"flat_step_spec",
"[",
"node",
".",
"flat_source_index",
"]",
".",
"shape",
")",
"if",
"node",
".",
"HasField",
"(",
"'start'",
")",
":",
"length",
"=",
"(",
"node",
".",
"stop",
"-",
"node",
".",
"start",
")",
"//",
"(",
"node",
".",
"step",
"or",
"1",
")",
"shape",
"=",
"[",
"length",
",",
"*",
"shape",
"]",
"shapes",
".",
"append",
"(",
"tensor_shape",
".",
"TensorShape",
"(",
"shape",
")",
")",
"# Check that all shapes are either completely identical or at least",
"# identical in all dimensions but the first.",
"if",
"(",
"any",
"(",
"shape",
".",
"rank",
"!=",
"shapes",
"[",
"0",
"]",
".",
"rank",
"for",
"shape",
"in",
"shapes",
")",
"or",
"(",
"shapes",
"[",
"0",
"]",
".",
"rank",
">",
"1",
"and",
"any",
"(",
"shape",
"[",
"1",
":",
"]",
"!=",
"shapes",
"[",
"0",
"]",
"[",
"1",
":",
"]",
"for",
"shape",
"in",
"shapes",
")",
")",
")",
":",
"raise",
"ValueError",
"(",
"f'Configs produce trajectories with incompatible shapes at {path}. '",
"f'Got {shapes}.'",
")",
"# Merge the shapes into a single shape. If the first dimension varies then",
"# we set the leading dimension as undefined.",
"if",
"all",
"(",
"shape",
"==",
"shapes",
"[",
"0",
"]",
"for",
"shape",
"in",
"shapes",
")",
":",
"merged_shape",
"=",
"shapes",
"[",
"0",
"]",
"else",
":",
"merged_shape",
"=",
"[",
"None",
",",
"*",
"shapes",
"[",
"0",
"]",
"[",
"1",
":",
"]",
"]",
"return",
"tensor_spec",
".",
"TensorSpec",
"(",
"shape",
"=",
"merged_shape",
",",
"dtype",
"=",
"dtypes",
"[",
"0",
"]",
",",
"name",
"=",
"'/'",
".",
"join",
"(",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"path",
")",
")",
"patterns",
"=",
"[",
"unpack_pattern",
"(",
"config",
")",
"for",
"config",
"in",
"configs",
"]",
"return",
"tree",
".",
"map_structure_with_path",
"(",
"_validate_and_convert_to_spec",
",",
"*",
"patterns",
")"
] | https://github.com/deepmind/reverb/blob/ef3c8f0be1b720a741d2dee335e15e44668c291a/reverb/structured_writer.py#L308-L381 |
|
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/slim/python/slim/nets/alexnet.py | python | alexnet_v2 | (inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='alexnet_v2') | AlexNet version 2.
Described in: http://arxiv.org/pdf/1404.5997v2.pdf
Parameters from:
github.com/akrizhevsky/cuda-convnet2/blob/master/layers/
layers-imagenet-1gpu.cfg
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224. To use in fully
convolutional mode, set spatial_squeeze to false.
The LRN layers have been removed and change the initializers from
random_normal_initializer to xavier_initializer.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict. | AlexNet version 2. | [
"AlexNet",
"version",
"2",
"."
] | def alexnet_v2(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='alexnet_v2'):
"""AlexNet version 2.
Described in: http://arxiv.org/pdf/1404.5997v2.pdf
Parameters from:
github.com/akrizhevsky/cuda-convnet2/blob/master/layers/
layers-imagenet-1gpu.cfg
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224. To use in fully
convolutional mode, set spatial_squeeze to false.
The LRN layers have been removed and change the initializers from
random_normal_initializer to xavier_initializer.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with variable_scope.variable_scope(scope, 'alexnet_v2', [inputs]) as sc:
end_points_collection = sc.original_name_scope + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with arg_scope(
[layers.conv2d, layers_lib.fully_connected, layers_lib.max_pool2d],
outputs_collections=[end_points_collection]):
net = layers.conv2d(
inputs, 64, [11, 11], 4, padding='VALID', scope='conv1')
net = layers_lib.max_pool2d(net, [3, 3], 2, scope='pool1')
net = layers.conv2d(net, 192, [5, 5], scope='conv2')
net = layers_lib.max_pool2d(net, [3, 3], 2, scope='pool2')
net = layers.conv2d(net, 384, [3, 3], scope='conv3')
net = layers.conv2d(net, 384, [3, 3], scope='conv4')
net = layers.conv2d(net, 256, [3, 3], scope='conv5')
net = layers_lib.max_pool2d(net, [3, 3], 2, scope='pool5')
# Use conv2d instead of fully_connected layers.
with arg_scope(
[layers.conv2d],
weights_initializer=trunc_normal(0.005),
biases_initializer=init_ops.constant_initializer(0.1)):
net = layers.conv2d(net, 4096, [5, 5], padding='VALID', scope='fc6')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout6')
net = layers.conv2d(net, 4096, [1, 1], scope='fc7')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout7')
net = layers.conv2d(
net,
num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
biases_initializer=init_ops.zeros_initializer(),
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = array_ops.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points | [
"def",
"alexnet_v2",
"(",
"inputs",
",",
"num_classes",
"=",
"1000",
",",
"is_training",
"=",
"True",
",",
"dropout_keep_prob",
"=",
"0.5",
",",
"spatial_squeeze",
"=",
"True",
",",
"scope",
"=",
"'alexnet_v2'",
")",
":",
"with",
"variable_scope",
".",
"variable_scope",
"(",
"scope",
",",
"'alexnet_v2'",
",",
"[",
"inputs",
"]",
")",
"as",
"sc",
":",
"end_points_collection",
"=",
"sc",
".",
"original_name_scope",
"+",
"'_end_points'",
"# Collect outputs for conv2d, fully_connected and max_pool2d.",
"with",
"arg_scope",
"(",
"[",
"layers",
".",
"conv2d",
",",
"layers_lib",
".",
"fully_connected",
",",
"layers_lib",
".",
"max_pool2d",
"]",
",",
"outputs_collections",
"=",
"[",
"end_points_collection",
"]",
")",
":",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"inputs",
",",
"64",
",",
"[",
"11",
",",
"11",
"]",
",",
"4",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'conv1'",
")",
"net",
"=",
"layers_lib",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"3",
",",
"3",
"]",
",",
"2",
",",
"scope",
"=",
"'pool1'",
")",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"net",
",",
"192",
",",
"[",
"5",
",",
"5",
"]",
",",
"scope",
"=",
"'conv2'",
")",
"net",
"=",
"layers_lib",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"3",
",",
"3",
"]",
",",
"2",
",",
"scope",
"=",
"'pool2'",
")",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"net",
",",
"384",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv3'",
")",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"net",
",",
"384",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv4'",
")",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"net",
",",
"256",
",",
"[",
"3",
",",
"3",
"]",
",",
"scope",
"=",
"'conv5'",
")",
"net",
"=",
"layers_lib",
".",
"max_pool2d",
"(",
"net",
",",
"[",
"3",
",",
"3",
"]",
",",
"2",
",",
"scope",
"=",
"'pool5'",
")",
"# Use conv2d instead of fully_connected layers.",
"with",
"arg_scope",
"(",
"[",
"layers",
".",
"conv2d",
"]",
",",
"weights_initializer",
"=",
"trunc_normal",
"(",
"0.005",
")",
",",
"biases_initializer",
"=",
"init_ops",
".",
"constant_initializer",
"(",
"0.1",
")",
")",
":",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"net",
",",
"4096",
",",
"[",
"5",
",",
"5",
"]",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'fc6'",
")",
"net",
"=",
"layers_lib",
".",
"dropout",
"(",
"net",
",",
"dropout_keep_prob",
",",
"is_training",
"=",
"is_training",
",",
"scope",
"=",
"'dropout6'",
")",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"net",
",",
"4096",
",",
"[",
"1",
",",
"1",
"]",
",",
"scope",
"=",
"'fc7'",
")",
"net",
"=",
"layers_lib",
".",
"dropout",
"(",
"net",
",",
"dropout_keep_prob",
",",
"is_training",
"=",
"is_training",
",",
"scope",
"=",
"'dropout7'",
")",
"net",
"=",
"layers",
".",
"conv2d",
"(",
"net",
",",
"num_classes",
",",
"[",
"1",
",",
"1",
"]",
",",
"activation_fn",
"=",
"None",
",",
"normalizer_fn",
"=",
"None",
",",
"biases_initializer",
"=",
"init_ops",
".",
"zeros_initializer",
"(",
")",
",",
"scope",
"=",
"'fc8'",
")",
"# Convert end_points_collection into a end_point dict.",
"end_points",
"=",
"utils",
".",
"convert_collection_to_dict",
"(",
"end_points_collection",
")",
"if",
"spatial_squeeze",
":",
"net",
"=",
"array_ops",
".",
"squeeze",
"(",
"net",
",",
"[",
"1",
",",
"2",
"]",
",",
"name",
"=",
"'fc8/squeezed'",
")",
"end_points",
"[",
"sc",
".",
"name",
"+",
"'/fc8'",
"]",
"=",
"net",
"return",
"net",
",",
"end_points"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/slim/python/slim/nets/alexnet.py#L63-L135 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | tools/site_compare/drivers/win32/mouse.py | python | PressButton | (down, button='left') | Simulate a mouse button press or release at the current mouse location.
Args:
down: whether the button is pressed or released
button: which button is pressed
Returns:
None | Simulate a mouse button press or release at the current mouse location. | [
"Simulate",
"a",
"mouse",
"button",
"press",
"or",
"release",
"at",
"the",
"current",
"mouse",
"location",
"."
] | def PressButton(down, button='left'):
"""Simulate a mouse button press or release at the current mouse location.
Args:
down: whether the button is pressed or released
button: which button is pressed
Returns:
None
"""
# Put the mouse_event flags in a convenient dictionary by button
flags = {
'left': (win32con.MOUSEEVENTF_LEFTUP, win32con.MOUSEEVENTF_LEFTDOWN),
'middle': (win32con.MOUSEEVENTF_MIDDLEUP, win32con.MOUSEEVENTF_MIDDLEDOWN),
'right': (win32con.MOUSEEVENTF_RIGHTUP, win32con.MOUSEEVENTF_RIGHTDOWN)
}
# hit the button
win32api.mouse_event(flags[button][down], 0, 0) | [
"def",
"PressButton",
"(",
"down",
",",
"button",
"=",
"'left'",
")",
":",
"# Put the mouse_event flags in a convenient dictionary by button",
"flags",
"=",
"{",
"'left'",
":",
"(",
"win32con",
".",
"MOUSEEVENTF_LEFTUP",
",",
"win32con",
".",
"MOUSEEVENTF_LEFTDOWN",
")",
",",
"'middle'",
":",
"(",
"win32con",
".",
"MOUSEEVENTF_MIDDLEUP",
",",
"win32con",
".",
"MOUSEEVENTF_MIDDLEDOWN",
")",
",",
"'right'",
":",
"(",
"win32con",
".",
"MOUSEEVENTF_RIGHTUP",
",",
"win32con",
".",
"MOUSEEVENTF_RIGHTDOWN",
")",
"}",
"# hit the button",
"win32api",
".",
"mouse_event",
"(",
"flags",
"[",
"button",
"]",
"[",
"down",
"]",
",",
"0",
",",
"0",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/site_compare/drivers/win32/mouse.py#L47-L66 |
||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/ART_skeletonBuilder_UI.py | python | SkeletonBuilder_UI.faceInfo_UI_Cancel | (self, *args) | Closes the faceInfo user interface
@author chrise | Closes the faceInfo user interface | [
"Closes",
"the",
"faceInfo",
"user",
"interface"
] | def faceInfo_UI_Cancel(self, *args):
'''
Closes the faceInfo user interface
@author chrise
'''
cmds.deleteUI("faceInfo_UI") | [
"def",
"faceInfo_UI_Cancel",
"(",
"self",
",",
"*",
"args",
")",
":",
"cmds",
".",
"deleteUI",
"(",
"\"faceInfo_UI\"",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/ART_skeletonBuilder_UI.py#L2957-L2962 |
||
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleControl.py | python | CWSCDReductionControl.add_k_shift_vector | (self, k_x, k_y, k_z) | return return_k_index | Add a k-shift vector
:param k_x:
:param k_y:
:param k_z:
:return: k_index of the (k_x, k_y, k_z) | Add a k-shift vector
:param k_x:
:param k_y:
:param k_z:
:return: k_index of the (k_x, k_y, k_z) | [
"Add",
"a",
"k",
"-",
"shift",
"vector",
":",
"param",
"k_x",
":",
":",
"param",
"k_y",
":",
":",
"param",
"k_z",
":",
":",
"return",
":",
"k_index",
"of",
"the",
"(",
"k_x",
"k_y",
"k_z",
")"
] | def add_k_shift_vector(self, k_x, k_y, k_z):
"""
Add a k-shift vector
:param k_x:
:param k_y:
:param k_z:
:return: k_index of the (k_x, k_y, k_z)
"""
# check
assert isinstance(k_x, float), 'Kx is wrong'
assert isinstance(k_y, float), 'Ky is wrong'
assert isinstance(k_z, float), 'Kz is wrong'
k_shift_vector = (k_x, k_y, k_z)
self._kShiftDict[self._kVectorIndex] = [k_shift_vector, []]
# make progress
return_k_index = self._kVectorIndex
self._kVectorIndex += 1
return return_k_index | [
"def",
"add_k_shift_vector",
"(",
"self",
",",
"k_x",
",",
"k_y",
",",
"k_z",
")",
":",
"# check",
"assert",
"isinstance",
"(",
"k_x",
",",
"float",
")",
",",
"'Kx is wrong'",
"assert",
"isinstance",
"(",
"k_y",
",",
"float",
")",
",",
"'Ky is wrong'",
"assert",
"isinstance",
"(",
"k_z",
",",
"float",
")",
",",
"'Kz is wrong'",
"k_shift_vector",
"=",
"(",
"k_x",
",",
"k_y",
",",
"k_z",
")",
"self",
".",
"_kShiftDict",
"[",
"self",
".",
"_kVectorIndex",
"]",
"=",
"[",
"k_shift_vector",
",",
"[",
"]",
"]",
"# make progress",
"return_k_index",
"=",
"self",
".",
"_kVectorIndex",
"self",
".",
"_kVectorIndex",
"+=",
"1",
"return",
"return_k_index"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/HFIR_4Circle_Reduction/reduce4circleControl.py#L293-L313 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/dataview.py | python | DataViewIndexListModel.RowChanged | (*args, **kwargs) | return _dataview.DataViewIndexListModel_RowChanged(*args, **kwargs) | RowChanged(self, unsigned int row)
Call this after a row has been changed. | RowChanged(self, unsigned int row) | [
"RowChanged",
"(",
"self",
"unsigned",
"int",
"row",
")"
] | def RowChanged(*args, **kwargs):
"""
RowChanged(self, unsigned int row)
Call this after a row has been changed.
"""
return _dataview.DataViewIndexListModel_RowChanged(*args, **kwargs) | [
"def",
"RowChanged",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_dataview",
".",
"DataViewIndexListModel_RowChanged",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/dataview.py#L871-L877 |
|
ricardoquesada/Spidermonkey | 4a75ea2543408bd1b2c515aa95901523eeef7858 | build/upload.py | python | OptionalEnvironmentVariable | (v) | return None | Return the value of the environment variable named v, or None
if it's unset (or empty). | Return the value of the environment variable named v, or None
if it's unset (or empty). | [
"Return",
"the",
"value",
"of",
"the",
"environment",
"variable",
"named",
"v",
"or",
"None",
"if",
"it",
"s",
"unset",
"(",
"or",
"empty",
")",
"."
] | def OptionalEnvironmentVariable(v):
"""Return the value of the environment variable named v, or None
if it's unset (or empty)."""
if v in os.environ and os.environ[v] != "":
return os.environ[v]
return None | [
"def",
"OptionalEnvironmentVariable",
"(",
"v",
")",
":",
"if",
"v",
"in",
"os",
".",
"environ",
"and",
"os",
".",
"environ",
"[",
"v",
"]",
"!=",
"\"\"",
":",
"return",
"os",
".",
"environ",
"[",
"v",
"]",
"return",
"None"
] | https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/build/upload.py#L37-L42 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/graphy/graphy/backends/google_chart_api/encoders.py | python | LineChartEncoder._GetLineStyles | (self, chart) | return util.JoinLists(line_style = styles) | Get LineStyle parameters. | Get LineStyle parameters. | [
"Get",
"LineStyle",
"parameters",
"."
] | def _GetLineStyles(self, chart):
"""Get LineStyle parameters."""
styles = []
for series in chart.data:
style = series.style
if style:
styles.append('%s,%s,%s' % (style.width, style.on, style.off))
else:
# If one style is missing, they must all be missing
# TODO: Add a test for this; throw a more meaningful exception
assert (not styles)
return util.JoinLists(line_style = styles) | [
"def",
"_GetLineStyles",
"(",
"self",
",",
"chart",
")",
":",
"styles",
"=",
"[",
"]",
"for",
"series",
"in",
"chart",
".",
"data",
":",
"style",
"=",
"series",
".",
"style",
"if",
"style",
":",
"styles",
".",
"append",
"(",
"'%s,%s,%s'",
"%",
"(",
"style",
".",
"width",
",",
"style",
".",
"on",
",",
"style",
".",
"off",
")",
")",
"else",
":",
"# If one style is missing, they must all be missing",
"# TODO: Add a test for this; throw a more meaningful exception",
"assert",
"(",
"not",
"styles",
")",
"return",
"util",
".",
"JoinLists",
"(",
"line_style",
"=",
"styles",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/graphy/graphy/backends/google_chart_api/encoders.py#L220-L231 |
|
google/filament | d21f092645b8e1e312307cbf89f1484891347c63 | third_party/spirv-tools/utils/generate_grammar_tables.py | python | generate_capability_arrays | (caps) | return '\n'.join(arrays) | Returns the arrays of capabilities.
Arguments:
- caps: a sequence of sequence of capability names | Returns the arrays of capabilities. | [
"Returns",
"the",
"arrays",
"of",
"capabilities",
"."
] | def generate_capability_arrays(caps):
"""Returns the arrays of capabilities.
Arguments:
- caps: a sequence of sequence of capability names
"""
caps = sorted(set([tuple(c) for c in caps if c]))
arrays = [
'static const SpvCapability {}[] = {};'.format(
get_capability_array_name(c), compose_capability_list(c))
for c in caps]
return '\n'.join(arrays) | [
"def",
"generate_capability_arrays",
"(",
"caps",
")",
":",
"caps",
"=",
"sorted",
"(",
"set",
"(",
"[",
"tuple",
"(",
"c",
")",
"for",
"c",
"in",
"caps",
"if",
"c",
"]",
")",
")",
"arrays",
"=",
"[",
"'static const SpvCapability {}[] = {};'",
".",
"format",
"(",
"get_capability_array_name",
"(",
"c",
")",
",",
"compose_capability_list",
"(",
"c",
")",
")",
"for",
"c",
"in",
"caps",
"]",
"return",
"'\\n'",
".",
"join",
"(",
"arrays",
")"
] | https://github.com/google/filament/blob/d21f092645b8e1e312307cbf89f1484891347c63/third_party/spirv-tools/utils/generate_grammar_tables.py#L95-L106 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/msvc.py | python | EnvironmentInfo.return_env | (self, exists=True) | return env | Return environment dict.
Parameters
----------
exists: bool
It True, only return existing paths.
Return
------
dict
environment | Return environment dict. | [
"Return",
"environment",
"dict",
"."
] | def return_env(self, exists=True):
"""
Return environment dict.
Parameters
----------
exists: bool
It True, only return existing paths.
Return
------
dict
environment
"""
env = dict(
include=self._build_paths('include',
[self.VCIncludes,
self.OSIncludes,
self.UCRTIncludes,
self.NetFxSDKIncludes],
exists),
lib=self._build_paths('lib',
[self.VCLibraries,
self.OSLibraries,
self.FxTools,
self.UCRTLibraries,
self.NetFxSDKLibraries],
exists),
libpath=self._build_paths('libpath',
[self.VCLibraries,
self.FxTools,
self.VCStoreRefs,
self.OSLibpath],
exists),
path=self._build_paths('path',
[self.VCTools,
self.VSTools,
self.VsTDb,
self.SdkTools,
self.SdkSetup,
self.FxTools,
self.MSBuild,
self.HTMLHelpWorkshop,
self.FSharp],
exists),
)
if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
env['py_vcruntime_redist'] = self.VCRuntimeRedist
return env | [
"def",
"return_env",
"(",
"self",
",",
"exists",
"=",
"True",
")",
":",
"env",
"=",
"dict",
"(",
"include",
"=",
"self",
".",
"_build_paths",
"(",
"'include'",
",",
"[",
"self",
".",
"VCIncludes",
",",
"self",
".",
"OSIncludes",
",",
"self",
".",
"UCRTIncludes",
",",
"self",
".",
"NetFxSDKIncludes",
"]",
",",
"exists",
")",
",",
"lib",
"=",
"self",
".",
"_build_paths",
"(",
"'lib'",
",",
"[",
"self",
".",
"VCLibraries",
",",
"self",
".",
"OSLibraries",
",",
"self",
".",
"FxTools",
",",
"self",
".",
"UCRTLibraries",
",",
"self",
".",
"NetFxSDKLibraries",
"]",
",",
"exists",
")",
",",
"libpath",
"=",
"self",
".",
"_build_paths",
"(",
"'libpath'",
",",
"[",
"self",
".",
"VCLibraries",
",",
"self",
".",
"FxTools",
",",
"self",
".",
"VCStoreRefs",
",",
"self",
".",
"OSLibpath",
"]",
",",
"exists",
")",
",",
"path",
"=",
"self",
".",
"_build_paths",
"(",
"'path'",
",",
"[",
"self",
".",
"VCTools",
",",
"self",
".",
"VSTools",
",",
"self",
".",
"VsTDb",
",",
"self",
".",
"SdkTools",
",",
"self",
".",
"SdkSetup",
",",
"self",
".",
"FxTools",
",",
"self",
".",
"MSBuild",
",",
"self",
".",
"HTMLHelpWorkshop",
",",
"self",
".",
"FSharp",
"]",
",",
"exists",
")",
",",
")",
"if",
"self",
".",
"vs_ver",
">=",
"14",
"and",
"isfile",
"(",
"self",
".",
"VCRuntimeRedist",
")",
":",
"env",
"[",
"'py_vcruntime_redist'",
"]",
"=",
"self",
".",
"VCRuntimeRedist",
"return",
"env"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/msvc.py#L1720-L1768 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/uuid.py | python | _windll_getnode | () | Get the hardware address on Windows using ctypes. | Get the hardware address on Windows using ctypes. | [
"Get",
"the",
"hardware",
"address",
"on",
"Windows",
"using",
"ctypes",
"."
] | def _windll_getnode():
"""Get the hardware address on Windows using ctypes."""
import ctypes
_load_system_functions()
_buffer = ctypes.create_string_buffer(16)
if _UuidCreate(_buffer) == 0:
return UUID(bytes=bytes_(_buffer.raw)).node | [
"def",
"_windll_getnode",
"(",
")",
":",
"import",
"ctypes",
"_load_system_functions",
"(",
")",
"_buffer",
"=",
"ctypes",
".",
"create_string_buffer",
"(",
"16",
")",
"if",
"_UuidCreate",
"(",
"_buffer",
")",
"==",
"0",
":",
"return",
"UUID",
"(",
"bytes",
"=",
"bytes_",
"(",
"_buffer",
".",
"raw",
")",
")",
".",
"node"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/uuid.py#L654-L660 |
||
vslavik/poedit | f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a | deps/boost/tools/build/src/build/targets.py | python | BasicTarget.match | (self, property_set_, debug) | Returns the alternative condition for this alternative, if
the condition is satisfied by 'property_set'. | Returns the alternative condition for this alternative, if
the condition is satisfied by 'property_set'. | [
"Returns",
"the",
"alternative",
"condition",
"for",
"this",
"alternative",
"if",
"the",
"condition",
"is",
"satisfied",
"by",
"property_set",
"."
] | def match (self, property_set_, debug):
""" Returns the alternative condition for this alternative, if
the condition is satisfied by 'property_set'.
"""
# The condition is composed of all base non-conditional properties.
# It's not clear if we should expand 'self.requirements_' or not.
# For one thing, it would be nice to be able to put
# <toolset>msvc-6.0
# in requirements.
# On the other hand, if we have <variant>release in condition it
# does not make sense to require <optimization>full to be in
# build request just to select this variant.
assert isinstance(property_set_, property_set.PropertySet)
bcondition = self.requirements_.base ()
ccondition = self.requirements_.conditional ()
condition = b2.util.set.difference (bcondition, ccondition)
if debug:
print " next alternative: required properties:", [str(p) for p in condition]
if b2.util.set.contains (condition, property_set_.all()):
if debug:
print " matched"
return condition
else:
return None | [
"def",
"match",
"(",
"self",
",",
"property_set_",
",",
"debug",
")",
":",
"# The condition is composed of all base non-conditional properties.",
"# It's not clear if we should expand 'self.requirements_' or not.",
"# For one thing, it would be nice to be able to put",
"# <toolset>msvc-6.0",
"# in requirements.",
"# On the other hand, if we have <variant>release in condition it",
"# does not make sense to require <optimization>full to be in",
"# build request just to select this variant.",
"assert",
"isinstance",
"(",
"property_set_",
",",
"property_set",
".",
"PropertySet",
")",
"bcondition",
"=",
"self",
".",
"requirements_",
".",
"base",
"(",
")",
"ccondition",
"=",
"self",
".",
"requirements_",
".",
"conditional",
"(",
")",
"condition",
"=",
"b2",
".",
"util",
".",
"set",
".",
"difference",
"(",
"bcondition",
",",
"ccondition",
")",
"if",
"debug",
":",
"print",
"\" next alternative: required properties:\"",
",",
"[",
"str",
"(",
"p",
")",
"for",
"p",
"in",
"condition",
"]",
"if",
"b2",
".",
"util",
".",
"set",
".",
"contains",
"(",
"condition",
",",
"property_set_",
".",
"all",
"(",
")",
")",
":",
"if",
"debug",
":",
"print",
"\" matched\"",
"return",
"condition",
"else",
":",
"return",
"None"
] | https://github.com/vslavik/poedit/blob/f7a9daa0a10037e090aa0a86f5ce0f24ececdf6a/deps/boost/tools/build/src/build/targets.py#L1103-L1131 |
||
Polidea/SiriusObfuscator | b0e590d8130e97856afe578869b83a209e2b19be | SymbolExtractorAndRenamer/swift/utils/gyb.py | python | strip_trailing_nl | (s) | return s[:-1] if s.endswith('\n') else s | If s ends with a newline, drop it; else return s intact | If s ends with a newline, drop it; else return s intact | [
"If",
"s",
"ends",
"with",
"a",
"newline",
"drop",
"it",
";",
"else",
"return",
"s",
"intact"
] | def strip_trailing_nl(s):
"""If s ends with a newline, drop it; else return s intact"""
return s[:-1] if s.endswith('\n') else s | [
"def",
"strip_trailing_nl",
"(",
"s",
")",
":",
"return",
"s",
"[",
":",
"-",
"1",
"]",
"if",
"s",
".",
"endswith",
"(",
"'\\n'",
")",
"else",
"s"
] | https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/swift/utils/gyb.py#L40-L42 |
|
miyosuda/TensorFlowAndroidDemo | 35903e0221aa5f109ea2dbef27f20b52e317f42d | jni-build/jni/include/tensorflow/python/framework/tensor_shape.py | python | as_dimension | (value) | Converts the given value to a Dimension.
A Dimenson input will be returned unmodified.
An input of `None` will be converted to an unknown Dimension.
An integer input will be converted to a Dimension with that value.
Args:
value: The value to be converted.
Returns:
A Dimension corresponding to the given value. | Converts the given value to a Dimension. | [
"Converts",
"the",
"given",
"value",
"to",
"a",
"Dimension",
"."
] | def as_dimension(value):
"""Converts the given value to a Dimension.
A Dimenson input will be returned unmodified.
An input of `None` will be converted to an unknown Dimension.
An integer input will be converted to a Dimension with that value.
Args:
value: The value to be converted.
Returns:
A Dimension corresponding to the given value.
"""
if isinstance(value, Dimension):
return value
else:
return Dimension(value) | [
"def",
"as_dimension",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"Dimension",
")",
":",
"return",
"value",
"else",
":",
"return",
"Dimension",
"(",
"value",
")"
] | https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/framework/tensor_shape.py#L358-L374 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_misc.py | python | Joystick.GetPollingMin | (*args, **kwargs) | return _misc_.Joystick_GetPollingMin(*args, **kwargs) | GetPollingMin(self) -> int | GetPollingMin(self) -> int | [
"GetPollingMin",
"(",
"self",
")",
"-",
">",
"int"
] | def GetPollingMin(*args, **kwargs):
"""GetPollingMin(self) -> int"""
return _misc_.Joystick_GetPollingMin(*args, **kwargs) | [
"def",
"GetPollingMin",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"Joystick_GetPollingMin",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_misc.py#L2226-L2228 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/propgrid.py | python | EditEnumProperty.__init__ | (self, *args) | __init__(self, String label, String name, wxChar labels, long values,
String value) -> EditEnumProperty
__init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL),
wxArrayString labels=wxArrayString(),
wxArrayInt values=wxArrayInt(),
String value=wxEmptyString) -> EditEnumProperty
__init__(self, String label, String name, PGChoices choices, String value=wxEmptyString) -> EditEnumProperty
__init__(self, String label, String name, wxChar labels, long values,
PGChoices choicesCache, String value) -> EditEnumProperty | __init__(self, String label, String name, wxChar labels, long values,
String value) -> EditEnumProperty
__init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL),
wxArrayString labels=wxArrayString(),
wxArrayInt values=wxArrayInt(),
String value=wxEmptyString) -> EditEnumProperty
__init__(self, String label, String name, PGChoices choices, String value=wxEmptyString) -> EditEnumProperty
__init__(self, String label, String name, wxChar labels, long values,
PGChoices choicesCache, String value) -> EditEnumProperty | [
"__init__",
"(",
"self",
"String",
"label",
"String",
"name",
"wxChar",
"labels",
"long",
"values",
"String",
"value",
")",
"-",
">",
"EditEnumProperty",
"__init__",
"(",
"self",
"String",
"label",
"=",
"(",
"*",
"wxPGProperty",
"::",
"sm_wxPG_LABEL",
")",
"String",
"name",
"=",
"(",
"*",
"wxPGProperty",
"::",
"sm_wxPG_LABEL",
")",
"wxArrayString",
"labels",
"=",
"wxArrayString",
"()",
"wxArrayInt",
"values",
"=",
"wxArrayInt",
"()",
"String",
"value",
"=",
"wxEmptyString",
")",
"-",
">",
"EditEnumProperty",
"__init__",
"(",
"self",
"String",
"label",
"String",
"name",
"PGChoices",
"choices",
"String",
"value",
"=",
"wxEmptyString",
")",
"-",
">",
"EditEnumProperty",
"__init__",
"(",
"self",
"String",
"label",
"String",
"name",
"wxChar",
"labels",
"long",
"values",
"PGChoices",
"choicesCache",
"String",
"value",
")",
"-",
">",
"EditEnumProperty"
] | def __init__(self, *args):
"""
__init__(self, String label, String name, wxChar labels, long values,
String value) -> EditEnumProperty
__init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL),
wxArrayString labels=wxArrayString(),
wxArrayInt values=wxArrayInt(),
String value=wxEmptyString) -> EditEnumProperty
__init__(self, String label, String name, PGChoices choices, String value=wxEmptyString) -> EditEnumProperty
__init__(self, String label, String name, wxChar labels, long values,
PGChoices choicesCache, String value) -> EditEnumProperty
"""
_propgrid.EditEnumProperty_swiginit(self,_propgrid.new_EditEnumProperty(*args)) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
")",
":",
"_propgrid",
".",
"EditEnumProperty_swiginit",
"(",
"self",
",",
"_propgrid",
".",
"new_EditEnumProperty",
"(",
"*",
"args",
")",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/propgrid.py#L3015-L3027 |
||
htcondor/htcondor | 4829724575176d1d6c936e4693dfd78a728569b0 | src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/twitter.py | python | Status.AsDict | (self) | return data | A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance | A dict representation of this twitter.Status instance. | [
"A",
"dict",
"representation",
"of",
"this",
"twitter",
".",
"Status",
"instance",
"."
] | def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.user:
data['user'] = self.user.AsDict()
return data | [
"def",
"AsDict",
"(",
"self",
")",
":",
"data",
"=",
"{",
"}",
"if",
"self",
".",
"created_at",
":",
"data",
"[",
"'created_at'",
"]",
"=",
"self",
".",
"created_at",
"if",
"self",
".",
"id",
":",
"data",
"[",
"'id'",
"]",
"=",
"self",
".",
"id",
"if",
"self",
".",
"text",
":",
"data",
"[",
"'text'",
"]",
"=",
"self",
".",
"text",
"if",
"self",
".",
"user",
":",
"data",
"[",
"'user'",
"]",
"=",
"self",
".",
"user",
".",
"AsDict",
"(",
")",
"return",
"data"
] | https://github.com/htcondor/htcondor/blob/4829724575176d1d6c936e4693dfd78a728569b0/src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/twitter.py#L249-L266 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/xrc.py | python | XmlSubclassFactory.__init__ | (self, *args, **kwargs) | __init__(self) -> XmlSubclassFactory | __init__(self) -> XmlSubclassFactory | [
"__init__",
"(",
"self",
")",
"-",
">",
"XmlSubclassFactory"
] | def __init__(self, *args, **kwargs):
"""__init__(self) -> XmlSubclassFactory"""
_xrc.XmlSubclassFactory_swiginit(self,_xrc.new_XmlSubclassFactory(*args, **kwargs))
XmlSubclassFactory._setCallbackInfo(self, self, XmlSubclassFactory) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_xrc",
".",
"XmlSubclassFactory_swiginit",
"(",
"self",
",",
"_xrc",
".",
"new_XmlSubclassFactory",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"XmlSubclassFactory",
".",
"_setCallbackInfo",
"(",
"self",
",",
"self",
",",
"XmlSubclassFactory",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/xrc.py#L278-L281 |
||
TGAC/KAT | e8870331de2b4bb0a1b3b91c6afb8fb9d59e9216 | deps/boost/tools/build/src/build/property.py | python | PropertyMap.find | (self, properties) | return self.find_replace (properties) | Return the value associated with properties
or any subset of it. If more than one
subset has value assigned to it, return the
value for the longest subset, if it's unique. | Return the value associated with properties
or any subset of it. If more than one
subset has value assigned to it, return the
value for the longest subset, if it's unique. | [
"Return",
"the",
"value",
"associated",
"with",
"properties",
"or",
"any",
"subset",
"of",
"it",
".",
"If",
"more",
"than",
"one",
"subset",
"has",
"value",
"assigned",
"to",
"it",
"return",
"the",
"value",
"for",
"the",
"longest",
"subset",
"if",
"it",
"s",
"unique",
"."
] | def find (self, properties):
""" Return the value associated with properties
or any subset of it. If more than one
subset has value assigned to it, return the
value for the longest subset, if it's unique.
"""
assert is_iterable_typed(properties, basestring)
return self.find_replace (properties) | [
"def",
"find",
"(",
"self",
",",
"properties",
")",
":",
"assert",
"is_iterable_typed",
"(",
"properties",
",",
"basestring",
")",
"return",
"self",
".",
"find_replace",
"(",
"properties",
")"
] | https://github.com/TGAC/KAT/blob/e8870331de2b4bb0a1b3b91c6afb8fb9d59e9216/deps/boost/tools/build/src/build/property.py#L598-L605 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqt/mantidqt/plotting/markers.py | python | VerticalMarker.mouse_move_stop | (self) | Stop moving. | Stop moving. | [
"Stop",
"moving",
"."
] | def mouse_move_stop(self):
"""
Stop moving.
"""
self.is_moving = False | [
"def",
"mouse_move_stop",
"(",
"self",
")",
":",
"self",
".",
"is_moving",
"=",
"False"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqt/mantidqt/plotting/markers.py#L339-L343 |
||
H-uru/Plasma | c2140ea046e82e9c199e257a7f2e7edb42602871 | Scripts/Python/xDialogClothingBB.py | python | xDialogClothingBB.IWhatShirtAmIWearing | (self,avatar) | return kNoShirtIdx | Find out what shirt we are already wearing - returns index | Find out what shirt we are already wearing - returns index | [
"Find",
"out",
"what",
"shirt",
"we",
"are",
"already",
"wearing",
"-",
"returns",
"index"
] | def IWhatShirtAmIWearing(self,avatar):
"Find out what shirt we are already wearing - returns index"
global ShirtNames
worn = avatar.avatar.getAvatarClothingList()
#PtDebugPrint("xDialogClothingBB: I am currently wearing ",worn)
for item in worn:
try:
shirtIdx = ShirtNames.index(item)
return shirtIdx
except ValueError:
# see if its a shirt... maybe they are wearing something that is not in their closet
if item[-len(kShirtIdentifier):] == kShirtIdentifier:
ShirtNames.append(item)
try:
shirtIdx = ShirtNames.index(item)
return shirtIdx
except ValueError:
pass
return kNoShirtIdx | [
"def",
"IWhatShirtAmIWearing",
"(",
"self",
",",
"avatar",
")",
":",
"global",
"ShirtNames",
"worn",
"=",
"avatar",
".",
"avatar",
".",
"getAvatarClothingList",
"(",
")",
"#PtDebugPrint(\"xDialogClothingBB: I am currently wearing \",worn)",
"for",
"item",
"in",
"worn",
":",
"try",
":",
"shirtIdx",
"=",
"ShirtNames",
".",
"index",
"(",
"item",
")",
"return",
"shirtIdx",
"except",
"ValueError",
":",
"# see if its a shirt... maybe they are wearing something that is not in their closet",
"if",
"item",
"[",
"-",
"len",
"(",
"kShirtIdentifier",
")",
":",
"]",
"==",
"kShirtIdentifier",
":",
"ShirtNames",
".",
"append",
"(",
"item",
")",
"try",
":",
"shirtIdx",
"=",
"ShirtNames",
".",
"index",
"(",
"item",
")",
"return",
"shirtIdx",
"except",
"ValueError",
":",
"pass",
"return",
"kNoShirtIdx"
] | https://github.com/H-uru/Plasma/blob/c2140ea046e82e9c199e257a7f2e7edb42602871/Scripts/Python/xDialogClothingBB.py#L195-L213 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/closure_linter/closure_linter/tokenutil.py | python | InsertTokenAfter | (new_token, token) | Insert new_token after token.
Args:
new_token: A token to be added to the stream
token: A token already in the stream | Insert new_token after token. | [
"Insert",
"new_token",
"after",
"token",
"."
] | def InsertTokenAfter(new_token, token):
"""Insert new_token after token.
Args:
new_token: A token to be added to the stream
token: A token already in the stream
"""
new_token.previous = token
new_token.next = token.next
new_token.metadata = copy.copy(token.metadata)
if token.IsCode():
new_token.metadata.last_code = token
if new_token.IsCode():
following_token = token.next
while following_token and following_token.metadata.last_code == token:
following_token.metadata.last_code = new_token
following_token = following_token.next
token.next = new_token
if new_token.next:
new_token.next.previous = new_token
if new_token.start_index is None:
if new_token.line_number == token.line_number:
new_token.start_index = token.start_index + len(token.string)
else:
new_token.start_index = 0
iterator = new_token.next
while iterator and iterator.line_number == new_token.line_number:
iterator.start_index += len(new_token.string)
iterator = iterator.next | [
"def",
"InsertTokenAfter",
"(",
"new_token",
",",
"token",
")",
":",
"new_token",
".",
"previous",
"=",
"token",
"new_token",
".",
"next",
"=",
"token",
".",
"next",
"new_token",
".",
"metadata",
"=",
"copy",
".",
"copy",
"(",
"token",
".",
"metadata",
")",
"if",
"token",
".",
"IsCode",
"(",
")",
":",
"new_token",
".",
"metadata",
".",
"last_code",
"=",
"token",
"if",
"new_token",
".",
"IsCode",
"(",
")",
":",
"following_token",
"=",
"token",
".",
"next",
"while",
"following_token",
"and",
"following_token",
".",
"metadata",
".",
"last_code",
"==",
"token",
":",
"following_token",
".",
"metadata",
".",
"last_code",
"=",
"new_token",
"following_token",
"=",
"following_token",
".",
"next",
"token",
".",
"next",
"=",
"new_token",
"if",
"new_token",
".",
"next",
":",
"new_token",
".",
"next",
".",
"previous",
"=",
"new_token",
"if",
"new_token",
".",
"start_index",
"is",
"None",
":",
"if",
"new_token",
".",
"line_number",
"==",
"token",
".",
"line_number",
":",
"new_token",
".",
"start_index",
"=",
"token",
".",
"start_index",
"+",
"len",
"(",
"token",
".",
"string",
")",
"else",
":",
"new_token",
".",
"start_index",
"=",
"0",
"iterator",
"=",
"new_token",
".",
"next",
"while",
"iterator",
"and",
"iterator",
".",
"line_number",
"==",
"new_token",
".",
"line_number",
":",
"iterator",
".",
"start_index",
"+=",
"len",
"(",
"new_token",
".",
"string",
")",
"iterator",
"=",
"iterator",
".",
"next"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/closure_linter/closure_linter/tokenutil.py#L290-L324 |
||
HKUST-Aerial-Robotics/VINS-Mobile | ab1c4ffb8d3ea1fcc01c4f5c651abc0c390807f9 | VINS_ThirdPartyLib/ceres-solver/internal/ceres/generate_eliminator_specialization.py | python | Specialize | () | Generate specialization code and the conditionals to instantiate it. | Generate specialization code and the conditionals to instantiate it. | [
"Generate",
"specialization",
"code",
"and",
"the",
"conditionals",
"to",
"instantiate",
"it",
"."
] | def Specialize():
"""
Generate specialization code and the conditionals to instantiate it.
"""
f = open("schur_eliminator.cc", "w")
f.write(HEADER)
f.write(FACTORY_FILE_HEADER)
for row_block_size, e_block_size, f_block_size in SPECIALIZATIONS:
output = SpecializationFilename("generated/schur_eliminator",
row_block_size,
e_block_size,
f_block_size) + ".cc"
fptr = open(output, "w")
fptr.write(HEADER)
template = SPECIALIZATION_FILE
if (row_block_size == "Eigen::Dynamic" and
e_block_size == "Eigen::Dynamic" and
f_block_size == "Eigen::Dynamic"):
template = DYNAMIC_FILE
fptr.write(template % (row_block_size, e_block_size, f_block_size))
fptr.close()
f.write(FACTORY_CONDITIONAL % (row_block_size,
e_block_size,
f_block_size,
row_block_size,
e_block_size,
f_block_size))
f.write(FACTORY_FOOTER)
f.close() | [
"def",
"Specialize",
"(",
")",
":",
"f",
"=",
"open",
"(",
"\"schur_eliminator.cc\"",
",",
"\"w\"",
")",
"f",
".",
"write",
"(",
"HEADER",
")",
"f",
".",
"write",
"(",
"FACTORY_FILE_HEADER",
")",
"for",
"row_block_size",
",",
"e_block_size",
",",
"f_block_size",
"in",
"SPECIALIZATIONS",
":",
"output",
"=",
"SpecializationFilename",
"(",
"\"generated/schur_eliminator\"",
",",
"row_block_size",
",",
"e_block_size",
",",
"f_block_size",
")",
"+",
"\".cc\"",
"fptr",
"=",
"open",
"(",
"output",
",",
"\"w\"",
")",
"fptr",
".",
"write",
"(",
"HEADER",
")",
"template",
"=",
"SPECIALIZATION_FILE",
"if",
"(",
"row_block_size",
"==",
"\"Eigen::Dynamic\"",
"and",
"e_block_size",
"==",
"\"Eigen::Dynamic\"",
"and",
"f_block_size",
"==",
"\"Eigen::Dynamic\"",
")",
":",
"template",
"=",
"DYNAMIC_FILE",
"fptr",
".",
"write",
"(",
"template",
"%",
"(",
"row_block_size",
",",
"e_block_size",
",",
"f_block_size",
")",
")",
"fptr",
".",
"close",
"(",
")",
"f",
".",
"write",
"(",
"FACTORY_CONDITIONAL",
"%",
"(",
"row_block_size",
",",
"e_block_size",
",",
"f_block_size",
",",
"row_block_size",
",",
"e_block_size",
",",
"f_block_size",
")",
")",
"f",
".",
"write",
"(",
"FACTORY_FOOTER",
")",
"f",
".",
"close",
"(",
")"
] | https://github.com/HKUST-Aerial-Robotics/VINS-Mobile/blob/ab1c4ffb8d3ea1fcc01c4f5c651abc0c390807f9/VINS_ThirdPartyLib/ceres-solver/internal/ceres/generate_eliminator_specialization.py#L195-L227 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/sandbox.py | python | override_temp | (replacement) | Monkey-patch tempfile.tempdir with replacement, ensuring it exists | Monkey-patch tempfile.tempdir with replacement, ensuring it exists | [
"Monkey",
"-",
"patch",
"tempfile",
".",
"tempdir",
"with",
"replacement",
"ensuring",
"it",
"exists"
] | def override_temp(replacement):
"""
Monkey-patch tempfile.tempdir with replacement, ensuring it exists
"""
pkg_resources.py31compat.makedirs(replacement, exist_ok=True)
saved = tempfile.tempdir
tempfile.tempdir = replacement
try:
yield
finally:
tempfile.tempdir = saved | [
"def",
"override_temp",
"(",
"replacement",
")",
":",
"pkg_resources",
".",
"py31compat",
".",
"makedirs",
"(",
"replacement",
",",
"exist_ok",
"=",
"True",
")",
"saved",
"=",
"tempfile",
".",
"tempdir",
"tempfile",
".",
"tempdir",
"=",
"replacement",
"try",
":",
"yield",
"finally",
":",
"tempfile",
".",
"tempdir",
"=",
"saved"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/setuptools/sandbox.py#L69-L82 |
||
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | scripts/Python/static-binding/lldb.py | python | SBSection.GetPermissions | (self) | return _lldb.SBSection_GetPermissions(self) | GetPermissions(SBSection self) -> uint32_t | GetPermissions(SBSection self) -> uint32_t | [
"GetPermissions",
"(",
"SBSection",
"self",
")",
"-",
">",
"uint32_t"
] | def GetPermissions(self):
"""GetPermissions(SBSection self) -> uint32_t"""
return _lldb.SBSection_GetPermissions(self) | [
"def",
"GetPermissions",
"(",
"self",
")",
":",
"return",
"_lldb",
".",
"SBSection_GetPermissions",
"(",
"self",
")"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L9325-L9327 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cuda/cudadrv/driver.py | python | Context.enable_peer_access | (self, peer_context, flags=0) | Enable peer access between the current context and the peer context | Enable peer access between the current context and the peer context | [
"Enable",
"peer",
"access",
"between",
"the",
"current",
"context",
"and",
"the",
"peer",
"context"
] | def enable_peer_access(self, peer_context, flags=0):
"""Enable peer access between the current context and the peer context
"""
assert flags == 0, '*flags* is reserved and MUST be zero'
driver.cuCtxEnablePeerAccess(peer_context, flags) | [
"def",
"enable_peer_access",
"(",
"self",
",",
"peer_context",
",",
"flags",
"=",
"0",
")",
":",
"assert",
"flags",
"==",
"0",
",",
"'*flags* is reserved and MUST be zero'",
"driver",
".",
"cuCtxEnablePeerAccess",
"(",
"peer_context",
",",
"flags",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cuda/cudadrv/driver.py#L863-L867 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/MimeWriter.py | python | MimeWriter.nextpart | (self) | return self.__class__(self._fp) | Returns a new instance of MimeWriter which represents an
individual part in a multipart message.
This may be used to write the part as well as used for creating
recursively complex multipart messages. The message must first be
initialized with the startmultipartbody() method before using the
nextpart() method. | Returns a new instance of MimeWriter which represents an
individual part in a multipart message. | [
"Returns",
"a",
"new",
"instance",
"of",
"MimeWriter",
"which",
"represents",
"an",
"individual",
"part",
"in",
"a",
"multipart",
"message",
"."
] | def nextpart(self):
"""Returns a new instance of MimeWriter which represents an
individual part in a multipart message.
This may be used to write the part as well as used for creating
recursively complex multipart messages. The message must first be
initialized with the startmultipartbody() method before using the
nextpart() method.
"""
self._fp.write("\n--" + self._boundary + "\n")
return self.__class__(self._fp) | [
"def",
"nextpart",
"(",
"self",
")",
":",
"self",
".",
"_fp",
".",
"write",
"(",
"\"\\n--\"",
"+",
"self",
".",
"_boundary",
"+",
"\"\\n\"",
")",
"return",
"self",
".",
"__class__",
"(",
"self",
".",
"_fp",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/MimeWriter.py#L163-L174 |
|
libornovax/master_thesis_code | 6eca474ed3cae673afde010caef338cf7349f839 | caffe/scripts/cpp_lint.py | python | CheckCaffeRandom | (filename, clean_lines, linenum, error) | Checks for calls to C random functions (rand, rand_r, random, ...).
Caffe code should (almost) always use the caffe_rng_* functions rather
than these, as the internal state of these C functions is independent of the
native Caffe RNG system which should produce deterministic results for a
fixed Caffe seed set using Caffe::set_random_seed(...).
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Checks for calls to C random functions (rand, rand_r, random, ...). | [
"Checks",
"for",
"calls",
"to",
"C",
"random",
"functions",
"(",
"rand",
"rand_r",
"random",
"...",
")",
"."
] | def CheckCaffeRandom(filename, clean_lines, linenum, error):
"""Checks for calls to C random functions (rand, rand_r, random, ...).
Caffe code should (almost) always use the caffe_rng_* functions rather
than these, as the internal state of these C functions is independent of the
native Caffe RNG system which should produce deterministic results for a
fixed Caffe seed set using Caffe::set_random_seed(...).
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
for function in c_random_function_list:
ix = line.find(function)
# Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison
if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and
line[ix - 1] not in ('_', '.', '>'))):
error(filename, linenum, 'caffe/random_fn', 2,
'Use caffe_rng_rand() (or other caffe_rng_* function) instead of '
+ function +
') to ensure results are deterministic for a fixed Caffe seed.') | [
"def",
"CheckCaffeRandom",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"for",
"function",
"in",
"c_random_function_list",
":",
"ix",
"=",
"line",
".",
"find",
"(",
"function",
")",
"# Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison",
"if",
"ix",
">=",
"0",
"and",
"(",
"ix",
"==",
"0",
"or",
"(",
"not",
"line",
"[",
"ix",
"-",
"1",
"]",
".",
"isalnum",
"(",
")",
"and",
"line",
"[",
"ix",
"-",
"1",
"]",
"not",
"in",
"(",
"'_'",
",",
"'.'",
",",
"'>'",
")",
")",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'caffe/random_fn'",
",",
"2",
",",
"'Use caffe_rng_rand() (or other caffe_rng_* function) instead of '",
"+",
"function",
"+",
"') to ensure results are deterministic for a fixed Caffe seed.'",
")"
] | https://github.com/libornovax/master_thesis_code/blob/6eca474ed3cae673afde010caef338cf7349f839/caffe/scripts/cpp_lint.py#L1640-L1663 |
||
Lavender105/DFF | 152397cec4a3dac2aa86e92a65cc27e6c8016ab9 | pytorch-encoding/encoding/nn/syncbn.py | python | _SyncBatchNorm._data_parallel_master | (self, intermediates) | return outputs | Reduce the sum and square-sum, compute the statistics, and broadcast it. | Reduce the sum and square-sum, compute the statistics, and broadcast it. | [
"Reduce",
"the",
"sum",
"and",
"square",
"-",
"sum",
"compute",
"the",
"statistics",
"and",
"broadcast",
"it",
"."
] | def _data_parallel_master(self, intermediates):
"""Reduce the sum and square-sum, compute the statistics, and broadcast it."""
# Always using same "device order" makes the ReduceAdd operation faster.
# Thanks to:: Tete Xiao (http://tetexiao.com/)
intermediates = sorted(intermediates, key=lambda i: i[1].sum.get_device())
to_reduce = [i[1][:2] for i in intermediates]
to_reduce = [j for i in to_reduce for j in i] # flatten
target_gpus = [i[1].sum.get_device() for i in intermediates]
sum_size = sum([i[1].sum_size for i in intermediates])
sum_, ssum = ReduceAddCoalesced.apply(target_gpus[0], 2, *to_reduce)
mean, inv_std = self._compute_mean_std(sum_, ssum, sum_size)
broadcasted = Broadcast.apply(target_gpus, mean, inv_std)
outputs = []
for i, rec in enumerate(intermediates):
outputs.append((rec[0], _MasterMessage(*broadcasted[i*2:i*2+2])))
return outputs | [
"def",
"_data_parallel_master",
"(",
"self",
",",
"intermediates",
")",
":",
"# Always using same \"device order\" makes the ReduceAdd operation faster.",
"# Thanks to:: Tete Xiao (http://tetexiao.com/)",
"intermediates",
"=",
"sorted",
"(",
"intermediates",
",",
"key",
"=",
"lambda",
"i",
":",
"i",
"[",
"1",
"]",
".",
"sum",
".",
"get_device",
"(",
")",
")",
"to_reduce",
"=",
"[",
"i",
"[",
"1",
"]",
"[",
":",
"2",
"]",
"for",
"i",
"in",
"intermediates",
"]",
"to_reduce",
"=",
"[",
"j",
"for",
"i",
"in",
"to_reduce",
"for",
"j",
"in",
"i",
"]",
"# flatten",
"target_gpus",
"=",
"[",
"i",
"[",
"1",
"]",
".",
"sum",
".",
"get_device",
"(",
")",
"for",
"i",
"in",
"intermediates",
"]",
"sum_size",
"=",
"sum",
"(",
"[",
"i",
"[",
"1",
"]",
".",
"sum_size",
"for",
"i",
"in",
"intermediates",
"]",
")",
"sum_",
",",
"ssum",
"=",
"ReduceAddCoalesced",
".",
"apply",
"(",
"target_gpus",
"[",
"0",
"]",
",",
"2",
",",
"*",
"to_reduce",
")",
"mean",
",",
"inv_std",
"=",
"self",
".",
"_compute_mean_std",
"(",
"sum_",
",",
"ssum",
",",
"sum_size",
")",
"broadcasted",
"=",
"Broadcast",
".",
"apply",
"(",
"target_gpus",
",",
"mean",
",",
"inv_std",
")",
"outputs",
"=",
"[",
"]",
"for",
"i",
",",
"rec",
"in",
"enumerate",
"(",
"intermediates",
")",
":",
"outputs",
".",
"append",
"(",
"(",
"rec",
"[",
"0",
"]",
",",
"_MasterMessage",
"(",
"*",
"broadcasted",
"[",
"i",
"*",
"2",
":",
"i",
"*",
"2",
"+",
"2",
"]",
")",
")",
")",
"return",
"outputs"
] | https://github.com/Lavender105/DFF/blob/152397cec4a3dac2aa86e92a65cc27e6c8016ab9/pytorch-encoding/encoding/nn/syncbn.py#L70-L91 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/requests/cookies.py | python | RequestsCookieJar.list_domains | (self) | return domains | Utility method to list all the domains in the jar. | Utility method to list all the domains in the jar. | [
"Utility",
"method",
"to",
"list",
"all",
"the",
"domains",
"in",
"the",
"jar",
"."
] | def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains | [
"def",
"list_domains",
"(",
"self",
")",
":",
"domains",
"=",
"[",
"]",
"for",
"cookie",
"in",
"iter",
"(",
"self",
")",
":",
"if",
"cookie",
".",
"domain",
"not",
"in",
"domains",
":",
"domains",
".",
"append",
"(",
"cookie",
".",
"domain",
")",
"return",
"domains"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/requests/cookies.py#L270-L276 |
|
microsoft/CNTK | e9396480025b9ca457d26b6f33dd07c474c6aa04 | Examples/Image/Detection/utils/nms_wrapper.py | python | apply_nms_to_test_set_results | (all_boxes, nms_threshold, conf_threshold, use_gpu_nms, device_id) | return nms_boxes, nms_keepIndices | Applies nms to the results of multiple images.
Args:
all_boxes: shape of all_boxes: e.g. 21 classes x 4952 images x 58 rois x 5 coords+score
nms_threshold: the threshold for discarding overlapping ROIs in nms
conf_threshold: a minimum value for the score of an ROI. ROIs with lower score will be discarded
Returns:
nms_boxes - the reduced set of rois after nms
nmsKeepIndices - the indices of the ROIs to keep after nms | Applies nms to the results of multiple images. | [
"Applies",
"nms",
"to",
"the",
"results",
"of",
"multiple",
"images",
"."
] | def apply_nms_to_test_set_results(all_boxes, nms_threshold, conf_threshold, use_gpu_nms, device_id):
'''
Applies nms to the results of multiple images.
Args:
all_boxes: shape of all_boxes: e.g. 21 classes x 4952 images x 58 rois x 5 coords+score
nms_threshold: the threshold for discarding overlapping ROIs in nms
conf_threshold: a minimum value for the score of an ROI. ROIs with lower score will be discarded
Returns:
nms_boxes - the reduced set of rois after nms
nmsKeepIndices - the indices of the ROIs to keep after nms
'''
num_classes = len(all_boxes)
num_images = len(all_boxes[0])
nms_boxes = [[[] for _ in range(num_images)]
for _ in range(num_classes)]
nms_keepIndices = [[[] for _ in range(num_images)]
for _ in range(num_classes)]
for cls_ind in range(num_classes):
for im_ind in range(num_images):
dets = all_boxes[cls_ind][im_ind]
if len(dets) == 0:
continue
if len(dets) == 1:
keep = [0]
else:
keep = nms(dets.astype(np.float32), nms_threshold, use_gpu_nms, device_id)
# also filter out low confidences
if conf_threshold > 0:
keep_conf_idx = np.where(dets[:, -1] > conf_threshold)
keep = list(set(keep_conf_idx[0]).intersection(keep))
if len(keep) == 0:
continue
nms_boxes[cls_ind][im_ind] = dets[keep, :].copy()
nms_keepIndices[cls_ind][im_ind] = keep
return nms_boxes, nms_keepIndices | [
"def",
"apply_nms_to_test_set_results",
"(",
"all_boxes",
",",
"nms_threshold",
",",
"conf_threshold",
",",
"use_gpu_nms",
",",
"device_id",
")",
":",
"num_classes",
"=",
"len",
"(",
"all_boxes",
")",
"num_images",
"=",
"len",
"(",
"all_boxes",
"[",
"0",
"]",
")",
"nms_boxes",
"=",
"[",
"[",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"num_images",
")",
"]",
"for",
"_",
"in",
"range",
"(",
"num_classes",
")",
"]",
"nms_keepIndices",
"=",
"[",
"[",
"[",
"]",
"for",
"_",
"in",
"range",
"(",
"num_images",
")",
"]",
"for",
"_",
"in",
"range",
"(",
"num_classes",
")",
"]",
"for",
"cls_ind",
"in",
"range",
"(",
"num_classes",
")",
":",
"for",
"im_ind",
"in",
"range",
"(",
"num_images",
")",
":",
"dets",
"=",
"all_boxes",
"[",
"cls_ind",
"]",
"[",
"im_ind",
"]",
"if",
"len",
"(",
"dets",
")",
"==",
"0",
":",
"continue",
"if",
"len",
"(",
"dets",
")",
"==",
"1",
":",
"keep",
"=",
"[",
"0",
"]",
"else",
":",
"keep",
"=",
"nms",
"(",
"dets",
".",
"astype",
"(",
"np",
".",
"float32",
")",
",",
"nms_threshold",
",",
"use_gpu_nms",
",",
"device_id",
")",
"# also filter out low confidences",
"if",
"conf_threshold",
">",
"0",
":",
"keep_conf_idx",
"=",
"np",
".",
"where",
"(",
"dets",
"[",
":",
",",
"-",
"1",
"]",
">",
"conf_threshold",
")",
"keep",
"=",
"list",
"(",
"set",
"(",
"keep_conf_idx",
"[",
"0",
"]",
")",
".",
"intersection",
"(",
"keep",
")",
")",
"if",
"len",
"(",
"keep",
")",
"==",
"0",
":",
"continue",
"nms_boxes",
"[",
"cls_ind",
"]",
"[",
"im_ind",
"]",
"=",
"dets",
"[",
"keep",
",",
":",
"]",
".",
"copy",
"(",
")",
"nms_keepIndices",
"[",
"cls_ind",
"]",
"[",
"im_ind",
"]",
"=",
"keep",
"return",
"nms_boxes",
",",
"nms_keepIndices"
] | https://github.com/microsoft/CNTK/blob/e9396480025b9ca457d26b6f33dd07c474c6aa04/Examples/Image/Detection/utils/nms_wrapper.py#L61-L100 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/graphviz/py3/graphviz/dot.py | python | Dot.__iter__ | (self, subgraph=False) | Yield the DOT source code line by line (as graph or subgraph). | Yield the DOT source code line by line (as graph or subgraph). | [
"Yield",
"the",
"DOT",
"source",
"code",
"line",
"by",
"line",
"(",
"as",
"graph",
"or",
"subgraph",
")",
"."
] | def __iter__(self, subgraph=False):
"""Yield the DOT source code line by line (as graph or subgraph)."""
if self.comment:
yield self._comment % self.comment
if subgraph:
if self.strict:
raise ValueError('subgraphs cannot be strict')
head = self._subgraph if self.name else self._subgraph_plain
else:
head = self._head_strict if self.strict else self._head
yield head % (self._quote(self.name) + ' ' if self.name else '')
for kw in ('graph', 'node', 'edge'):
attrs = getattr(self, f'{kw}_attr')
if attrs:
yield self._attr % (kw, self._attr_list(None, attrs))
for line in self.body:
yield line
yield self._tail | [
"def",
"__iter__",
"(",
"self",
",",
"subgraph",
"=",
"False",
")",
":",
"if",
"self",
".",
"comment",
":",
"yield",
"self",
".",
"_comment",
"%",
"self",
".",
"comment",
"if",
"subgraph",
":",
"if",
"self",
".",
"strict",
":",
"raise",
"ValueError",
"(",
"'subgraphs cannot be strict'",
")",
"head",
"=",
"self",
".",
"_subgraph",
"if",
"self",
".",
"name",
"else",
"self",
".",
"_subgraph_plain",
"else",
":",
"head",
"=",
"self",
".",
"_head_strict",
"if",
"self",
".",
"strict",
"else",
"self",
".",
"_head",
"yield",
"head",
"%",
"(",
"self",
".",
"_quote",
"(",
"self",
".",
"name",
")",
"+",
"' '",
"if",
"self",
".",
"name",
"else",
"''",
")",
"for",
"kw",
"in",
"(",
"'graph'",
",",
"'node'",
",",
"'edge'",
")",
":",
"attrs",
"=",
"getattr",
"(",
"self",
",",
"f'{kw}_attr'",
")",
"if",
"attrs",
":",
"yield",
"self",
".",
"_attr",
"%",
"(",
"kw",
",",
"self",
".",
"_attr_list",
"(",
"None",
",",
"attrs",
")",
")",
"for",
"line",
"in",
"self",
".",
"body",
":",
"yield",
"line",
"yield",
"self",
".",
"_tail"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/graphviz/py3/graphviz/dot.py#L95-L116 |
||
epam/Indigo | 30e40b4b1eb9bae0207435a26cfcb81ddcc42be1 | api/python/indigo/__init__.py | python | IndigoObject.getBond | (self, idx) | return self.dispatcher.IndigoObject(
self.dispatcher,
self.dispatcher._checkResult(
Indigo._lib.indigoGetBond(self.id, idx)
),
) | Molecule method returns bond by index
Args:
idx (int): bond index
Returns:
IndigoObject: bond object | Molecule method returns bond by index | [
"Molecule",
"method",
"returns",
"bond",
"by",
"index"
] | def getBond(self, idx):
"""Molecule method returns bond by index
Args:
idx (int): bond index
Returns:
IndigoObject: bond object
"""
self.dispatcher._setSessionId()
return self.dispatcher.IndigoObject(
self.dispatcher,
self.dispatcher._checkResult(
Indigo._lib.indigoGetBond(self.id, idx)
),
) | [
"def",
"getBond",
"(",
"self",
",",
"idx",
")",
":",
"self",
".",
"dispatcher",
".",
"_setSessionId",
"(",
")",
"return",
"self",
".",
"dispatcher",
".",
"IndigoObject",
"(",
"self",
".",
"dispatcher",
",",
"self",
".",
"dispatcher",
".",
"_checkResult",
"(",
"Indigo",
".",
"_lib",
".",
"indigoGetBond",
"(",
"self",
".",
"id",
",",
"idx",
")",
")",
",",
")"
] | https://github.com/epam/Indigo/blob/30e40b4b1eb9bae0207435a26cfcb81ddcc42be1/api/python/indigo/__init__.py#L2620-L2635 |
|
apiaryio/snowcrash | b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3 | tools/gyp/pylib/gyp/xcode_emulation.py | python | XcodeSettings.GetInstallNameBase | (self) | return install_base | Return DYLIB_INSTALL_NAME_BASE for this target. | Return DYLIB_INSTALL_NAME_BASE for this target. | [
"Return",
"DYLIB_INSTALL_NAME_BASE",
"for",
"this",
"target",
"."
] | def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base | [
"def",
"GetInstallNameBase",
"(",
"self",
")",
":",
"# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.",
"if",
"(",
"self",
".",
"spec",
"[",
"'type'",
"]",
"!=",
"'shared_library'",
"and",
"(",
"self",
".",
"spec",
"[",
"'type'",
"]",
"!=",
"'loadable_module'",
"or",
"self",
".",
"_IsBundle",
"(",
")",
")",
")",
":",
"return",
"None",
"install_base",
"=",
"self",
".",
"GetPerTargetSetting",
"(",
"'DYLIB_INSTALL_NAME_BASE'",
",",
"default",
"=",
"'/Library/Frameworks'",
"if",
"self",
".",
"_IsBundle",
"(",
")",
"else",
"'/usr/local/lib'",
")",
"return",
"install_base"
] | https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/pylib/gyp/xcode_emulation.py#L690-L699 |
|
natanielruiz/android-yolo | 1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f | jni-build/jni/include/tensorflow/python/training/server_lib.py | python | Server.__init__ | (self,
server_or_cluster_def,
job_name=None,
task_index=None,
protocol=None,
config=None,
start=True) | Creates a new server with the given definition.
The `job_name`, `task_index`, and `protocol` arguments are optional, and
override any information provided in `server_or_cluster_def`.
Args:
server_or_cluster_def: A `tf.train.ServerDef` or
`tf.train.ClusterDef` protocol buffer, or a
`tf.train.ClusterSpec` object, describing the server to be
created and/or the cluster of which it is a member.
job_name: (Optional.) Specifies the name of the job of which the server
is a member. Defaults to the value in `server_or_cluster_def`, if
specified.
task_index: (Optional.) Specifies the task index of the server in its
job. Defaults to the value in `server_or_cluster_def`, if specified.
Otherwise defaults to 0 if the server's job has only one task.
protocol: (Optional.) Specifies the protocol to be used by the server.
Acceptable values include `"grpc"`. Defaults to the value in
`server_or_cluster_def`, if specified. Otherwise defaults to `"grpc"`.
config: (Options.) A `tf.ConfigProto` that specifies default
configuration options for all sessions that run on this server.
start: (Optional.) Boolean, indicating whether to start the server
after creating it. Defaults to `True`.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
creating the TensorFlow server. | Creates a new server with the given definition. | [
"Creates",
"a",
"new",
"server",
"with",
"the",
"given",
"definition",
"."
] | def __init__(self,
server_or_cluster_def,
job_name=None,
task_index=None,
protocol=None,
config=None,
start=True):
"""Creates a new server with the given definition.
The `job_name`, `task_index`, and `protocol` arguments are optional, and
override any information provided in `server_or_cluster_def`.
Args:
server_or_cluster_def: A `tf.train.ServerDef` or
`tf.train.ClusterDef` protocol buffer, or a
`tf.train.ClusterSpec` object, describing the server to be
created and/or the cluster of which it is a member.
job_name: (Optional.) Specifies the name of the job of which the server
is a member. Defaults to the value in `server_or_cluster_def`, if
specified.
task_index: (Optional.) Specifies the task index of the server in its
job. Defaults to the value in `server_or_cluster_def`, if specified.
Otherwise defaults to 0 if the server's job has only one task.
protocol: (Optional.) Specifies the protocol to be used by the server.
Acceptable values include `"grpc"`. Defaults to the value in
`server_or_cluster_def`, if specified. Otherwise defaults to `"grpc"`.
config: (Options.) A `tf.ConfigProto` that specifies default
configuration options for all sessions that run on this server.
start: (Optional.) Boolean, indicating whether to start the server
after creating it. Defaults to `True`.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
creating the TensorFlow server.
"""
self._server_def = _make_server_def(server_or_cluster_def,
job_name, task_index, protocol, config)
with errors.raise_exception_on_not_ok_status() as status:
self._server = pywrap_tensorflow.PyServer_New(
self._server_def.SerializeToString(), status)
if start:
self.start() | [
"def",
"__init__",
"(",
"self",
",",
"server_or_cluster_def",
",",
"job_name",
"=",
"None",
",",
"task_index",
"=",
"None",
",",
"protocol",
"=",
"None",
",",
"config",
"=",
"None",
",",
"start",
"=",
"True",
")",
":",
"self",
".",
"_server_def",
"=",
"_make_server_def",
"(",
"server_or_cluster_def",
",",
"job_name",
",",
"task_index",
",",
"protocol",
",",
"config",
")",
"with",
"errors",
".",
"raise_exception_on_not_ok_status",
"(",
")",
"as",
"status",
":",
"self",
".",
"_server",
"=",
"pywrap_tensorflow",
".",
"PyServer_New",
"(",
"self",
".",
"_server_def",
".",
"SerializeToString",
"(",
")",
",",
"status",
")",
"if",
"start",
":",
"self",
".",
"start",
"(",
")"
] | https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/training/server_lib.py#L114-L155 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/richtext.py | python | RichTextBuffer.EndBold | (*args, **kwargs) | return _richtext.RichTextBuffer_EndBold(*args, **kwargs) | EndBold(self) -> bool | EndBold(self) -> bool | [
"EndBold",
"(",
"self",
")",
"-",
">",
"bool"
] | def EndBold(*args, **kwargs):
"""EndBold(self) -> bool"""
return _richtext.RichTextBuffer_EndBold(*args, **kwargs) | [
"def",
"EndBold",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextBuffer_EndBold",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/richtext.py#L2337-L2339 |
|
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/image/detection.py | python | DetRandomCropAug.__call__ | (self, src, label) | return (src, label) | Augmenter implementation body | Augmenter implementation body | [
"Augmenter",
"implementation",
"body"
] | def __call__(self, src, label):
"""Augmenter implementation body"""
crop = self._random_crop_proposal(label, src.shape[0], src.shape[1])
if crop:
x, y, w, h, label = crop
src = fixed_crop(src, x, y, w, h, None)
return (src, label) | [
"def",
"__call__",
"(",
"self",
",",
"src",
",",
"label",
")",
":",
"crop",
"=",
"self",
".",
"_random_crop_proposal",
"(",
"label",
",",
"src",
".",
"shape",
"[",
"0",
"]",
",",
"src",
".",
"shape",
"[",
"1",
"]",
")",
"if",
"crop",
":",
"x",
",",
"y",
",",
"w",
",",
"h",
",",
"label",
"=",
"crop",
"src",
"=",
"fixed_crop",
"(",
"src",
",",
"x",
",",
"y",
",",
"w",
",",
"h",
",",
"None",
")",
"return",
"(",
"src",
",",
"label",
")"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/image/detection.py#L206-L212 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/distribute/cross_device_ops.py | python | _ConcatAndSplitPacker.pack | (self, grouped_grads_and_vars) | return device_grad_packs | Pack tensors. | Pack tensors. | [
"Pack",
"tensors",
"."
] | def pack(self, grouped_grads_and_vars):
"""Pack tensors."""
self.grouped_grads_and_vars = grouped_grads_and_vars
self.all_device_shapes = []
self.all_device_sizes = []
device_grad_packs = []
for device_grads_and_vars in grouped_grads_and_vars:
with ops.colocate_with(device_grads_and_vars[0][0]):
# Flatten all the grads.
flat_grads = [
array_ops.reshape(g, [-1]) for g, _ in device_grads_and_vars
]
# Remember the original shape of all the grads.
device_shapes = [array_ops.shape(g) for g, _ in device_grads_and_vars]
# Remember the original sizes of all the grads.
device_sizes = [array_ops.size(g) for g, _ in device_grads_and_vars]
# Concat all the flat grads into a big flat tensor.
concat_grads = array_ops.concat(flat_grads, 0)
# Split the big tensor into num_splits packs. In cases where the
# total size is not divisible num_splits, the last pack gets
# more elements.
# TODO(zhengxq): it is also possible to optimize away all the concat
# as well.
num_splits = self.num_packs
# The array_ops.size function will sometimes remove static shapes. So if
# all gradient shapes are defined, we use another method to get the
# total size.
# TODO(yuefengz): move this logic to array_ops.size.
if all(g.shape.is_fully_defined() for g, _ in device_grads_and_vars):
total_grad_size = sum(
[g.shape.num_elements() for g, _ in device_grads_and_vars])
else:
total_grad_size = array_ops.size(concat_grads)
split_size = total_grad_size // num_splits
split_size_last = total_grad_size - split_size * (num_splits - 1)
split_sizes = [split_size] * (num_splits - 1) + [split_size_last]
grad_packs = array_ops.split(concat_grads, split_sizes)
# Ready to aggregate the repacked gradients, with fake variables.
# TODO(zhengxq): It is hacky to have to use fake variables.
# We should remove the need for variables in
# aggregate_gradients_using*.
device_grad_packs.append(zip(grad_packs, [None] * num_splits))
self.all_device_shapes.append(device_shapes)
self.all_device_sizes.append(device_sizes)
return device_grad_packs | [
"def",
"pack",
"(",
"self",
",",
"grouped_grads_and_vars",
")",
":",
"self",
".",
"grouped_grads_and_vars",
"=",
"grouped_grads_and_vars",
"self",
".",
"all_device_shapes",
"=",
"[",
"]",
"self",
".",
"all_device_sizes",
"=",
"[",
"]",
"device_grad_packs",
"=",
"[",
"]",
"for",
"device_grads_and_vars",
"in",
"grouped_grads_and_vars",
":",
"with",
"ops",
".",
"colocate_with",
"(",
"device_grads_and_vars",
"[",
"0",
"]",
"[",
"0",
"]",
")",
":",
"# Flatten all the grads.",
"flat_grads",
"=",
"[",
"array_ops",
".",
"reshape",
"(",
"g",
",",
"[",
"-",
"1",
"]",
")",
"for",
"g",
",",
"_",
"in",
"device_grads_and_vars",
"]",
"# Remember the original shape of all the grads.",
"device_shapes",
"=",
"[",
"array_ops",
".",
"shape",
"(",
"g",
")",
"for",
"g",
",",
"_",
"in",
"device_grads_and_vars",
"]",
"# Remember the original sizes of all the grads.",
"device_sizes",
"=",
"[",
"array_ops",
".",
"size",
"(",
"g",
")",
"for",
"g",
",",
"_",
"in",
"device_grads_and_vars",
"]",
"# Concat all the flat grads into a big flat tensor.",
"concat_grads",
"=",
"array_ops",
".",
"concat",
"(",
"flat_grads",
",",
"0",
")",
"# Split the big tensor into num_splits packs. In cases where the",
"# total size is not divisible num_splits, the last pack gets",
"# more elements.",
"# TODO(zhengxq): it is also possible to optimize away all the concat",
"# as well.",
"num_splits",
"=",
"self",
".",
"num_packs",
"# The array_ops.size function will sometimes remove static shapes. So if",
"# all gradient shapes are defined, we use another method to get the",
"# total size.",
"# TODO(yuefengz): move this logic to array_ops.size.",
"if",
"all",
"(",
"g",
".",
"shape",
".",
"is_fully_defined",
"(",
")",
"for",
"g",
",",
"_",
"in",
"device_grads_and_vars",
")",
":",
"total_grad_size",
"=",
"sum",
"(",
"[",
"g",
".",
"shape",
".",
"num_elements",
"(",
")",
"for",
"g",
",",
"_",
"in",
"device_grads_and_vars",
"]",
")",
"else",
":",
"total_grad_size",
"=",
"array_ops",
".",
"size",
"(",
"concat_grads",
")",
"split_size",
"=",
"total_grad_size",
"//",
"num_splits",
"split_size_last",
"=",
"total_grad_size",
"-",
"split_size",
"*",
"(",
"num_splits",
"-",
"1",
")",
"split_sizes",
"=",
"[",
"split_size",
"]",
"*",
"(",
"num_splits",
"-",
"1",
")",
"+",
"[",
"split_size_last",
"]",
"grad_packs",
"=",
"array_ops",
".",
"split",
"(",
"concat_grads",
",",
"split_sizes",
")",
"# Ready to aggregate the repacked gradients, with fake variables.",
"# TODO(zhengxq): It is hacky to have to use fake variables.",
"# We should remove the need for variables in",
"# aggregate_gradients_using*.",
"device_grad_packs",
".",
"append",
"(",
"zip",
"(",
"grad_packs",
",",
"[",
"None",
"]",
"*",
"num_splits",
")",
")",
"self",
".",
"all_device_shapes",
".",
"append",
"(",
"device_shapes",
")",
"self",
".",
"all_device_sizes",
".",
"append",
"(",
"device_sizes",
")",
"return",
"device_grad_packs"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/cross_device_ops.py#L726-L776 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib2to3/pytree.py | python | WildcardPattern.__init__ | (self, content=None, min=0, max=HUGE, name=None) | Initializer.
Args:
content: optional sequence of subsequences of patterns;
if absent, matches one node;
if present, each subsequence is an alternative [*]
min: optional minimum number of times to match, default 0
max: optional maximum number of times to match, default HUGE
name: optional name assigned to this match
[*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is
equivalent to (a b c | d e | f g h); if content is None,
this is equivalent to '.' in regular expression terms.
The min and max parameters work as follows:
min=0, max=maxint: .*
min=1, max=maxint: .+
min=0, max=1: .?
min=1, max=1: .
If content is not None, replace the dot with the parenthesized
list of alternatives, e.g. (a b c | d e | f g h)* | Initializer. | [
"Initializer",
"."
] | def __init__(self, content=None, min=0, max=HUGE, name=None):
"""
Initializer.
Args:
content: optional sequence of subsequences of patterns;
if absent, matches one node;
if present, each subsequence is an alternative [*]
min: optional minimum number of times to match, default 0
max: optional maximum number of times to match, default HUGE
name: optional name assigned to this match
[*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is
equivalent to (a b c | d e | f g h); if content is None,
this is equivalent to '.' in regular expression terms.
The min and max parameters work as follows:
min=0, max=maxint: .*
min=1, max=maxint: .+
min=0, max=1: .?
min=1, max=1: .
If content is not None, replace the dot with the parenthesized
list of alternatives, e.g. (a b c | d e | f g h)*
"""
assert 0 <= min <= max <= HUGE, (min, max)
if content is not None:
content = tuple(map(tuple, content)) # Protect against alterations
# Check sanity of alternatives
assert len(content), repr(content) # Can't have zero alternatives
for alt in content:
assert len(alt), repr(alt) # Can have empty alternatives
self.content = content
self.min = min
self.max = max
self.name = name | [
"def",
"__init__",
"(",
"self",
",",
"content",
"=",
"None",
",",
"min",
"=",
"0",
",",
"max",
"=",
"HUGE",
",",
"name",
"=",
"None",
")",
":",
"assert",
"0",
"<=",
"min",
"<=",
"max",
"<=",
"HUGE",
",",
"(",
"min",
",",
"max",
")",
"if",
"content",
"is",
"not",
"None",
":",
"content",
"=",
"tuple",
"(",
"map",
"(",
"tuple",
",",
"content",
")",
")",
"# Protect against alterations",
"# Check sanity of alternatives",
"assert",
"len",
"(",
"content",
")",
",",
"repr",
"(",
"content",
")",
"# Can't have zero alternatives",
"for",
"alt",
"in",
"content",
":",
"assert",
"len",
"(",
"alt",
")",
",",
"repr",
"(",
"alt",
")",
"# Can have empty alternatives",
"self",
".",
"content",
"=",
"content",
"self",
".",
"min",
"=",
"min",
"self",
".",
"max",
"=",
"max",
"self",
".",
"name",
"=",
"name"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib2to3/pytree.py#L653-L686 |
||
jiaxiang-wu/quantized-cnn | 4d020e17026df90e40111d219e3eb74e0afb1588 | cpplint.py | python | NestingState.InExternC | (self) | return self.stack and isinstance(self.stack[-1], _ExternCInfo) | Check if we are currently one level inside an 'extern "C"' block.
Returns:
True if top of the stack is an extern block, False otherwise. | Check if we are currently one level inside an 'extern "C"' block. | [
"Check",
"if",
"we",
"are",
"currently",
"one",
"level",
"inside",
"an",
"extern",
"C",
"block",
"."
] | def InExternC(self):
"""Check if we are currently one level inside an 'extern "C"' block.
Returns:
True if top of the stack is an extern block, False otherwise.
"""
return self.stack and isinstance(self.stack[-1], _ExternCInfo) | [
"def",
"InExternC",
"(",
"self",
")",
":",
"return",
"self",
".",
"stack",
"and",
"isinstance",
"(",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
",",
"_ExternCInfo",
")"
] | https://github.com/jiaxiang-wu/quantized-cnn/blob/4d020e17026df90e40111d219e3eb74e0afb1588/cpplint.py#L2242-L2248 |
|
PlatformLab/Arachne | e67391471007174dd4002dc2c160628e19c284e8 | scripts/cpplint.py | python | ParseNolintSuppressions | (filename, raw_line, linenum, error) | Updates the global list of line error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler. | Updates the global list of line error-suppressions. | [
"Updates",
"the",
"global",
"list",
"of",
"line",
"error",
"-",
"suppressions",
"."
] | def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of line error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line)
if matched:
if matched.group(1):
suppressed_line = linenum + 1
else:
suppressed_line = linenum
category = matched.group(2)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(suppressed_line)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(suppressed_line)
elif category not in _LEGACY_ERROR_CATEGORIES:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category) | [
"def",
"ParseNolintSuppressions",
"(",
"filename",
",",
"raw_line",
",",
"linenum",
",",
"error",
")",
":",
"matched",
"=",
"Search",
"(",
"r'\\bNOLINT(NEXTLINE)?\\b(\\([^)]+\\))?'",
",",
"raw_line",
")",
"if",
"matched",
":",
"if",
"matched",
".",
"group",
"(",
"1",
")",
":",
"suppressed_line",
"=",
"linenum",
"+",
"1",
"else",
":",
"suppressed_line",
"=",
"linenum",
"category",
"=",
"matched",
".",
"group",
"(",
"2",
")",
"if",
"category",
"in",
"(",
"None",
",",
"'(*)'",
")",
":",
"# => \"suppress all\"",
"_error_suppressions",
".",
"setdefault",
"(",
"None",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"suppressed_line",
")",
"else",
":",
"if",
"category",
".",
"startswith",
"(",
"'('",
")",
"and",
"category",
".",
"endswith",
"(",
"')'",
")",
":",
"category",
"=",
"category",
"[",
"1",
":",
"-",
"1",
"]",
"if",
"category",
"in",
"_ERROR_CATEGORIES",
":",
"_error_suppressions",
".",
"setdefault",
"(",
"category",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"suppressed_line",
")",
"elif",
"category",
"not",
"in",
"_LEGACY_ERROR_CATEGORIES",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/nolint'",
",",
"5",
",",
"'Unknown NOLINT error category: %s'",
"%",
"category",
")"
] | https://github.com/PlatformLab/Arachne/blob/e67391471007174dd4002dc2c160628e19c284e8/scripts/cpplint.py#L571-L600 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/stats/_continuous_distns.py | python | levy_stable_gen._pdf_from_cf_with_fft | (cf, h=0.01, q=9) | return (x, density) | Calculates pdf from cf using fft. Using region around 0 with N=2**q points
separated by distance h. As suggested by [MS]. | Calculates pdf from cf using fft. Using region around 0 with N=2**q points
separated by distance h. As suggested by [MS]. | [
"Calculates",
"pdf",
"from",
"cf",
"using",
"fft",
".",
"Using",
"region",
"around",
"0",
"with",
"N",
"=",
"2",
"**",
"q",
"points",
"separated",
"by",
"distance",
"h",
".",
"As",
"suggested",
"by",
"[",
"MS",
"]",
"."
] | def _pdf_from_cf_with_fft(cf, h=0.01, q=9):
"""Calculates pdf from cf using fft. Using region around 0 with N=2**q points
separated by distance h. As suggested by [MS].
"""
N = 2**q
n = np.arange(1,N+1)
density = ((-1)**(n-1-N/2))*np.fft.fft(((-1)**(n-1))*cf(2*np.pi*(n-1-N/2)/h/N))/h/N
x = (n-1-N/2)*h
return (x, density) | [
"def",
"_pdf_from_cf_with_fft",
"(",
"cf",
",",
"h",
"=",
"0.01",
",",
"q",
"=",
"9",
")",
":",
"N",
"=",
"2",
"**",
"q",
"n",
"=",
"np",
".",
"arange",
"(",
"1",
",",
"N",
"+",
"1",
")",
"density",
"=",
"(",
"(",
"-",
"1",
")",
"**",
"(",
"n",
"-",
"1",
"-",
"N",
"/",
"2",
")",
")",
"*",
"np",
".",
"fft",
".",
"fft",
"(",
"(",
"(",
"-",
"1",
")",
"**",
"(",
"n",
"-",
"1",
")",
")",
"*",
"cf",
"(",
"2",
"*",
"np",
".",
"pi",
"*",
"(",
"n",
"-",
"1",
"-",
"N",
"/",
"2",
")",
"/",
"h",
"/",
"N",
")",
")",
"/",
"h",
"/",
"N",
"x",
"=",
"(",
"n",
"-",
"1",
"-",
"N",
"/",
"2",
")",
"*",
"h",
"return",
"(",
"x",
",",
"density",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/stats/_continuous_distns.py#L3804-L3812 |
|
SmingHub/Sming | cde389ed030905694983121a32f9028976b57194 | Sming/Components/Storage/Tools/hwconfig/common.py | python | fixpath | (path) | return path | Paths in Windows can get a little weird | Paths in Windows can get a little weird | [
"Paths",
"in",
"Windows",
"can",
"get",
"a",
"little",
"weird"
] | def fixpath(path):
"""Paths in Windows can get a little weird """
if len(path) > 2 and path[1] != ':' and platform.system() == 'Windows' and path[2] == '/':
return path[1] + ':' + path[2:]
return path | [
"def",
"fixpath",
"(",
"path",
")",
":",
"if",
"len",
"(",
"path",
")",
">",
"2",
"and",
"path",
"[",
"1",
"]",
"!=",
"':'",
"and",
"platform",
".",
"system",
"(",
")",
"==",
"'Windows'",
"and",
"path",
"[",
"2",
"]",
"==",
"'/'",
":",
"return",
"path",
"[",
"1",
"]",
"+",
"':'",
"+",
"path",
"[",
"2",
":",
"]",
"return",
"path"
] | https://github.com/SmingHub/Sming/blob/cde389ed030905694983121a32f9028976b57194/Sming/Components/Storage/Tools/hwconfig/common.py#L25-L29 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/turtle.py | python | RawTurtle.begin_poly | (self) | Start recording the vertices of a polygon.
No argument.
Start recording the vertices of a polygon. Current turtle position
is first point of polygon.
Example (for a Turtle instance named turtle):
>>> turtle.begin_poly() | Start recording the vertices of a polygon. | [
"Start",
"recording",
"the",
"vertices",
"of",
"a",
"polygon",
"."
] | def begin_poly(self):
"""Start recording the vertices of a polygon.
No argument.
Start recording the vertices of a polygon. Current turtle position
is first point of polygon.
Example (for a Turtle instance named turtle):
>>> turtle.begin_poly()
"""
self._poly = [self._position]
self._creatingPoly = True | [
"def",
"begin_poly",
"(",
"self",
")",
":",
"self",
".",
"_poly",
"=",
"[",
"self",
".",
"_position",
"]",
"self",
".",
"_creatingPoly",
"=",
"True"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/lib-tk/turtle.py#L3305-L3317 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/site-packages/requests/utils.py | python | unquote_header_value | (value, is_filename=False) | return value | r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
:rtype: str | r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting. | [
"r",
"Unquotes",
"a",
"header",
"value",
".",
"(",
"Reversal",
"of",
":",
"func",
":",
"quote_header_value",
")",
".",
"This",
"does",
"not",
"use",
"the",
"real",
"unquoting",
"but",
"what",
"browsers",
"are",
"actually",
"using",
"for",
"quoting",
"."
] | def unquote_header_value(value, is_filename=False):
r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
This does not use the real unquoting but what browsers are actually
using for quoting.
:param value: the header value to unquote.
:rtype: str
"""
if value and value[0] == value[-1] == '"':
# this is not the real unquoting, but fixing this so that the
# RFC is met will result in bugs with internet explorer and
# probably some other browsers as well. IE for example is
# uploading files with "C:\foo\bar.txt" as filename
value = value[1:-1]
# if this is a filename and the starting characters look like
# a UNC path, then just return the value without quotes. Using the
# replace sequence below on a UNC path has the effect of turning
# the leading double slash into a single slash and then
# _fix_ie_filename() doesn't work correctly. See #458.
if not is_filename or value[:2] != '\\\\':
return value.replace('\\\\', '\\').replace('\\"', '"')
return value | [
"def",
"unquote_header_value",
"(",
"value",
",",
"is_filename",
"=",
"False",
")",
":",
"if",
"value",
"and",
"value",
"[",
"0",
"]",
"==",
"value",
"[",
"-",
"1",
"]",
"==",
"'\"'",
":",
"# this is not the real unquoting, but fixing this so that the",
"# RFC is met will result in bugs with internet explorer and",
"# probably some other browsers as well. IE for example is",
"# uploading files with \"C:\\foo\\bar.txt\" as filename",
"value",
"=",
"value",
"[",
"1",
":",
"-",
"1",
"]",
"# if this is a filename and the starting characters look like",
"# a UNC path, then just return the value without quotes. Using the",
"# replace sequence below on a UNC path has the effect of turning",
"# the leading double slash into a single slash and then",
"# _fix_ie_filename() doesn't work correctly. See #458.",
"if",
"not",
"is_filename",
"or",
"value",
"[",
":",
"2",
"]",
"!=",
"'\\\\\\\\'",
":",
"return",
"value",
".",
"replace",
"(",
"'\\\\\\\\'",
",",
"'\\\\'",
")",
".",
"replace",
"(",
"'\\\\\"'",
",",
"'\"'",
")",
"return",
"value"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/requests/utils.py#L384-L406 |
|
hpi-xnor/BMXNet-v2 | af2b1859eafc5c721b1397cef02f946aaf2ce20d | python/mxnet/gluon/model_zoo/vision/densenet.py | python | densenet161 | (**kwargs) | return get_densenet(161, **kwargs) | r"""Densenet-BC 161-layer model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters. | r"""Densenet-BC 161-layer model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper. | [
"r",
"Densenet",
"-",
"BC",
"161",
"-",
"layer",
"model",
"from",
"the",
"Densely",
"Connected",
"Convolutional",
"Networks",
"<https",
":",
"//",
"arxiv",
".",
"org",
"/",
"pdf",
"/",
"1608",
".",
"06993",
".",
"pdf",
">",
"_",
"paper",
"."
] | def densenet161(**kwargs):
r"""Densenet-BC 161-layer model from the
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
return get_densenet(161, **kwargs) | [
"def",
"densenet161",
"(",
"*",
"*",
"kwargs",
")",
":",
"return",
"get_densenet",
"(",
"161",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/gluon/model_zoo/vision/densenet.py#L163-L176 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/scipy/stats/stats.py | python | tmin | (a, lowerlimit=None, axis=0, inclusive=True, nan_policy='propagate') | return res | Compute the trimmed minimum
This function finds the miminum value of an array `a` along the
specified axis, but only considering values greater than a specified
lower limit.
Parameters
----------
a : array_like
array of values
lowerlimit : None or float, optional
Values in the input array less than the given limit will be ignored.
When lowerlimit is None, then all values are used. The default value
is None.
axis : int or None, optional
Axis along which to operate. Default is 0. If None, compute over the
whole array `a`.
inclusive : {True, False}, optional
This flag determines whether values exactly equal to the lower limit
are included. The default value is True.
nan_policy : {'propagate', 'raise', 'omit'}, optional
Defines how to handle when input contains nan. 'propagate' returns nan,
'raise' throws an error, 'omit' performs the calculations ignoring nan
values. Default is 'propagate'.
Returns
-------
tmin : float, int or ndarray
Examples
--------
>>> from scipy import stats
>>> x = np.arange(20)
>>> stats.tmin(x)
0
>>> stats.tmin(x, 13)
13
>>> stats.tmin(x, 13, inclusive=False)
14 | Compute the trimmed minimum | [
"Compute",
"the",
"trimmed",
"minimum"
] | def tmin(a, lowerlimit=None, axis=0, inclusive=True, nan_policy='propagate'):
"""
Compute the trimmed minimum
This function finds the miminum value of an array `a` along the
specified axis, but only considering values greater than a specified
lower limit.
Parameters
----------
a : array_like
array of values
lowerlimit : None or float, optional
Values in the input array less than the given limit will be ignored.
When lowerlimit is None, then all values are used. The default value
is None.
axis : int or None, optional
Axis along which to operate. Default is 0. If None, compute over the
whole array `a`.
inclusive : {True, False}, optional
This flag determines whether values exactly equal to the lower limit
are included. The default value is True.
nan_policy : {'propagate', 'raise', 'omit'}, optional
Defines how to handle when input contains nan. 'propagate' returns nan,
'raise' throws an error, 'omit' performs the calculations ignoring nan
values. Default is 'propagate'.
Returns
-------
tmin : float, int or ndarray
Examples
--------
>>> from scipy import stats
>>> x = np.arange(20)
>>> stats.tmin(x)
0
>>> stats.tmin(x, 13)
13
>>> stats.tmin(x, 13, inclusive=False)
14
"""
a, axis = _chk_asarray(a, axis)
am = _mask_to_limits(a, (lowerlimit, None), (inclusive, False))
contains_nan, nan_policy = _contains_nan(am, nan_policy)
if contains_nan and nan_policy == 'omit':
am = ma.masked_invalid(am)
res = ma.minimum.reduce(am, axis).data
if res.ndim == 0:
return res[()]
return res | [
"def",
"tmin",
"(",
"a",
",",
"lowerlimit",
"=",
"None",
",",
"axis",
"=",
"0",
",",
"inclusive",
"=",
"True",
",",
"nan_policy",
"=",
"'propagate'",
")",
":",
"a",
",",
"axis",
"=",
"_chk_asarray",
"(",
"a",
",",
"axis",
")",
"am",
"=",
"_mask_to_limits",
"(",
"a",
",",
"(",
"lowerlimit",
",",
"None",
")",
",",
"(",
"inclusive",
",",
"False",
")",
")",
"contains_nan",
",",
"nan_policy",
"=",
"_contains_nan",
"(",
"am",
",",
"nan_policy",
")",
"if",
"contains_nan",
"and",
"nan_policy",
"==",
"'omit'",
":",
"am",
"=",
"ma",
".",
"masked_invalid",
"(",
"am",
")",
"res",
"=",
"ma",
".",
"minimum",
".",
"reduce",
"(",
"am",
",",
"axis",
")",
".",
"data",
"if",
"res",
".",
"ndim",
"==",
"0",
":",
"return",
"res",
"[",
"(",
")",
"]",
"return",
"res"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/stats/stats.py#L594-L650 |
|
eclipse/omr | 056e7c9ce9d503649190bc5bd9931fac30b4e4bc | jitbuilder/apigen/genutils.py | python | APIService.sets_allocators | (self) | return "sets-allocators" in self.__flags() | Returns whether the service sets class allocators. | Returns whether the service sets class allocators. | [
"Returns",
"whether",
"the",
"service",
"sets",
"class",
"allocators",
"."
] | def sets_allocators(self):
"""Returns whether the service sets class allocators."""
return "sets-allocators" in self.__flags() | [
"def",
"sets_allocators",
"(",
"self",
")",
":",
"return",
"\"sets-allocators\"",
"in",
"self",
".",
"__flags",
"(",
")"
] | https://github.com/eclipse/omr/blob/056e7c9ce9d503649190bc5bd9931fac30b4e4bc/jitbuilder/apigen/genutils.py#L196-L198 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/numpy/py2/numpy/core/setup.py | python | is_npy_no_signal | () | return sys.platform == 'win32' | Return True if the NPY_NO_SIGNAL symbol must be defined in configuration
header. | Return True if the NPY_NO_SIGNAL symbol must be defined in configuration
header. | [
"Return",
"True",
"if",
"the",
"NPY_NO_SIGNAL",
"symbol",
"must",
"be",
"defined",
"in",
"configuration",
"header",
"."
] | def is_npy_no_signal():
"""Return True if the NPY_NO_SIGNAL symbol must be defined in configuration
header."""
return sys.platform == 'win32' | [
"def",
"is_npy_no_signal",
"(",
")",
":",
"return",
"sys",
".",
"platform",
"==",
"'win32'"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/core/setup.py#L76-L79 |
|
mhammond/pywin32 | 44afd86ba8485194df93234639243252deeb40d5 | com/win32com/servers/interp.py | python | Interpreter.Exec | (self, exp) | Execute a statement. | Execute a statement. | [
"Execute",
"a",
"statement",
"."
] | def Exec(self, exp):
"""Execute a statement."""
if type(exp) != str:
raise Exception(desc="Must be a string", scode=winerror.DISP_E_TYPEMISMATCH)
exec(str(exp), self.dict) | [
"def",
"Exec",
"(",
"self",
",",
"exp",
")",
":",
"if",
"type",
"(",
"exp",
")",
"!=",
"str",
":",
"raise",
"Exception",
"(",
"desc",
"=",
"\"Must be a string\"",
",",
"scode",
"=",
"winerror",
".",
"DISP_E_TYPEMISMATCH",
")",
"exec",
"(",
"str",
"(",
"exp",
")",
",",
"self",
".",
"dict",
")"
] | https://github.com/mhammond/pywin32/blob/44afd86ba8485194df93234639243252deeb40d5/com/win32com/servers/interp.py#L39-L43 |
||
microsoft/checkedc-clang | a173fefde5d7877b7750e7ce96dd08cf18baebf2 | lldb/third_party/Python/module/ptyprocess-0.6.0/ptyprocess/ptyprocess.py | python | PtyProcessUnicode.write | (self, s) | return super(PtyProcessUnicode, self).write(b) | Write the unicode string ``s`` to the pseudoterminal.
Returns the number of bytes written. | Write the unicode string ``s`` to the pseudoterminal. | [
"Write",
"the",
"unicode",
"string",
"s",
"to",
"the",
"pseudoterminal",
"."
] | def write(self, s):
"""Write the unicode string ``s`` to the pseudoterminal.
Returns the number of bytes written.
"""
b = s.encode(self.encoding)
return super(PtyProcessUnicode, self).write(b) | [
"def",
"write",
"(",
"self",
",",
"s",
")",
":",
"b",
"=",
"s",
".",
"encode",
"(",
"self",
".",
"encoding",
")",
"return",
"super",
"(",
"PtyProcessUnicode",
",",
"self",
")",
".",
"write",
"(",
"b",
")"
] | https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/lldb/third_party/Python/module/ptyprocess-0.6.0/ptyprocess/ptyprocess.py#L830-L836 |
|
ChromiumWebApps/chromium | c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7 | tools/cr/cr/actions/installer.py | python | Installer.Install | (self, context, targets, arguments) | Installs a target somewhere so that it is ready to run. | Installs a target somewhere so that it is ready to run. | [
"Installs",
"a",
"target",
"somewhere",
"so",
"that",
"it",
"is",
"ready",
"to",
"run",
"."
] | def Install(self, context, targets, arguments):
"""Installs a target somewhere so that it is ready to run."""
raise NotImplementedError('Must be overridden.') | [
"def",
"Install",
"(",
"self",
",",
"context",
",",
"targets",
",",
"arguments",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Must be overridden.'",
")"
] | https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/cr/cr/actions/installer.py#L29-L31 |
||
bh107/bohrium | 5b83e7117285fefc7779ed0e9acb0f8e74c7e068 | thirdparty/pyratemp/pyratemp.py | python | Parser.parse | (self, template) | return self._parse(template) | Parse a template.
:Parameters:
- `template`: template-unicode-string
:Returns: the resulting parse-tree
:Exceptions:
- `TemplateSyntaxError`: for template-syntax-errors
- `TemplateIncludeError`: if template-inclusion failed
- `TemplateException` | Parse a template. | [
"Parse",
"a",
"template",
"."
] | def parse(self, template):
"""Parse a template.
:Parameters:
- `template`: template-unicode-string
:Returns: the resulting parse-tree
:Exceptions:
- `TemplateSyntaxError`: for template-syntax-errors
- `TemplateIncludeError`: if template-inclusion failed
- `TemplateException`
"""
self._includestack = [(None, template)] # for error-messages (_errpos)
return self._parse(template) | [
"def",
"parse",
"(",
"self",
",",
"template",
")",
":",
"self",
".",
"_includestack",
"=",
"[",
"(",
"None",
",",
"template",
")",
"]",
"# for error-messages (_errpos)",
"return",
"self",
".",
"_parse",
"(",
"template",
")"
] | https://github.com/bh107/bohrium/blob/5b83e7117285fefc7779ed0e9acb0f8e74c7e068/thirdparty/pyratemp/pyratemp.py#L541-L553 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/richtext.py | python | RichTextCtrl.GetCaretPositionForDefaultStyle | (*args, **kwargs) | return _richtext.RichTextCtrl_GetCaretPositionForDefaultStyle(*args, **kwargs) | GetCaretPositionForDefaultStyle(self) -> long | GetCaretPositionForDefaultStyle(self) -> long | [
"GetCaretPositionForDefaultStyle",
"(",
"self",
")",
"-",
">",
"long"
] | def GetCaretPositionForDefaultStyle(*args, **kwargs):
"""GetCaretPositionForDefaultStyle(self) -> long"""
return _richtext.RichTextCtrl_GetCaretPositionForDefaultStyle(*args, **kwargs) | [
"def",
"GetCaretPositionForDefaultStyle",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextCtrl_GetCaretPositionForDefaultStyle",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L4124-L4126 |
|
gem5/gem5 | 141cc37c2d4b93959d4c249b8f7e6a8b2ef75338 | ext/ply/example/unicalc/calc.py | python | p_expression_binop | (p) | expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression | expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression | [
"expression",
":",
"expression",
"PLUS",
"expression",
"|",
"expression",
"MINUS",
"expression",
"|",
"expression",
"TIMES",
"expression",
"|",
"expression",
"DIVIDE",
"expression"
] | def p_expression_binop(p):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if p[2] == u'+' : p[0] = p[1] + p[3]
elif p[2] == u'-': p[0] = p[1] - p[3]
elif p[2] == u'*': p[0] = p[1] * p[3]
elif p[2] == u'/': p[0] = p[1] / p[3] | [
"def",
"p_expression_binop",
"(",
"p",
")",
":",
"if",
"p",
"[",
"2",
"]",
"==",
"u'+'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"+",
"p",
"[",
"3",
"]",
"elif",
"p",
"[",
"2",
"]",
"==",
"u'-'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"-",
"p",
"[",
"3",
"]",
"elif",
"p",
"[",
"2",
"]",
"==",
"u'*'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"*",
"p",
"[",
"3",
"]",
"elif",
"p",
"[",
"2",
"]",
"==",
"u'/'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"/",
"p",
"[",
"3",
"]"
] | https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/ext/ply/example/unicalc/calc.py#L72-L80 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/_misc.py | python | DateTime.ToGMT | (*args, **kwargs) | return _misc_.DateTime_ToGMT(*args, **kwargs) | ToGMT(self, bool noDST=False) -> DateTime | ToGMT(self, bool noDST=False) -> DateTime | [
"ToGMT",
"(",
"self",
"bool",
"noDST",
"=",
"False",
")",
"-",
">",
"DateTime"
] | def ToGMT(*args, **kwargs):
"""ToGMT(self, bool noDST=False) -> DateTime"""
return _misc_.DateTime_ToGMT(*args, **kwargs) | [
"def",
"ToGMT",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"DateTime_ToGMT",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_misc.py#L3946-L3948 |
|
deepmind/reverb | ef3c8f0be1b720a741d2dee335e15e44668c291a | configure.py | python | reset_configure_bazelrc | () | Reset file that contains customized config settings. | Reset file that contains customized config settings. | [
"Reset",
"file",
"that",
"contains",
"customized",
"config",
"settings",
"."
] | def reset_configure_bazelrc():
"""Reset file that contains customized config settings."""
open(_REVERB_BAZELRC, 'w').close() | [
"def",
"reset_configure_bazelrc",
"(",
")",
":",
"open",
"(",
"_REVERB_BAZELRC",
",",
"'w'",
")",
".",
"close",
"(",
")"
] | https://github.com/deepmind/reverb/blob/ef3c8f0be1b720a741d2dee335e15e44668c291a/configure.py#L229-L231 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/boto/boto/beanstalk/layer1.py | python | Layer1.create_application_version | (self, application_name, version_label,
description=None, s3_bucket=None,
s3_key=None, auto_create_application=None) | return self._get_response('CreateApplicationVersion', params) | Creates an application version for the specified application.
:type application_name: string
:param application_name: The name of the application. If no
application is found with this name, and AutoCreateApplication is
false, returns an InvalidParameterValue error.
:type version_label: string
:param version_label: A label identifying this version. Constraint:
Must be unique per application. If an application version already
exists with this label for the specified application, AWS Elastic
Beanstalk returns an InvalidParameterValue error.
:type description: string
:param description: Describes this version.
:type s3_bucket: string
:param s3_bucket: The Amazon S3 bucket where the data is located.
:type s3_key: string
:param s3_key: The Amazon S3 key where the data is located. Both
s3_bucket and s3_key must be specified in order to use a specific
source bundle. If both of these values are not specified the
sample application will be used.
:type auto_create_application: boolean
:param auto_create_application: Determines how the system behaves if
the specified application for this version does not already exist:
true: Automatically creates the specified application for this
version if it does not already exist. false: Returns an
InvalidParameterValue if the specified application for this version
does not already exist. Default: false Valid Values: true | false
:raises: TooManyApplicationsException,
TooManyApplicationVersionsException,
InsufficientPrivilegesException,
S3LocationNotInServiceRegionException | Creates an application version for the specified application. | [
"Creates",
"an",
"application",
"version",
"for",
"the",
"specified",
"application",
"."
] | def create_application_version(self, application_name, version_label,
description=None, s3_bucket=None,
s3_key=None, auto_create_application=None):
"""Creates an application version for the specified application.
:type application_name: string
:param application_name: The name of the application. If no
application is found with this name, and AutoCreateApplication is
false, returns an InvalidParameterValue error.
:type version_label: string
:param version_label: A label identifying this version. Constraint:
Must be unique per application. If an application version already
exists with this label for the specified application, AWS Elastic
Beanstalk returns an InvalidParameterValue error.
:type description: string
:param description: Describes this version.
:type s3_bucket: string
:param s3_bucket: The Amazon S3 bucket where the data is located.
:type s3_key: string
:param s3_key: The Amazon S3 key where the data is located. Both
s3_bucket and s3_key must be specified in order to use a specific
source bundle. If both of these values are not specified the
sample application will be used.
:type auto_create_application: boolean
:param auto_create_application: Determines how the system behaves if
the specified application for this version does not already exist:
true: Automatically creates the specified application for this
version if it does not already exist. false: Returns an
InvalidParameterValue if the specified application for this version
does not already exist. Default: false Valid Values: true | false
:raises: TooManyApplicationsException,
TooManyApplicationVersionsException,
InsufficientPrivilegesException,
S3LocationNotInServiceRegionException
"""
params = {'ApplicationName': application_name,
'VersionLabel': version_label}
if description:
params['Description'] = description
if s3_bucket and s3_key:
params['SourceBundle.S3Bucket'] = s3_bucket
params['SourceBundle.S3Key'] = s3_key
if auto_create_application:
params['AutoCreateApplication'] = self._encode_bool(
auto_create_application)
return self._get_response('CreateApplicationVersion', params) | [
"def",
"create_application_version",
"(",
"self",
",",
"application_name",
",",
"version_label",
",",
"description",
"=",
"None",
",",
"s3_bucket",
"=",
"None",
",",
"s3_key",
"=",
"None",
",",
"auto_create_application",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'ApplicationName'",
":",
"application_name",
",",
"'VersionLabel'",
":",
"version_label",
"}",
"if",
"description",
":",
"params",
"[",
"'Description'",
"]",
"=",
"description",
"if",
"s3_bucket",
"and",
"s3_key",
":",
"params",
"[",
"'SourceBundle.S3Bucket'",
"]",
"=",
"s3_bucket",
"params",
"[",
"'SourceBundle.S3Key'",
"]",
"=",
"s3_key",
"if",
"auto_create_application",
":",
"params",
"[",
"'AutoCreateApplication'",
"]",
"=",
"self",
".",
"_encode_bool",
"(",
"auto_create_application",
")",
"return",
"self",
".",
"_get_response",
"(",
"'CreateApplicationVersion'",
",",
"params",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/beanstalk/layer1.py#L104-L156 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python3/src/Lib/calendar.py | python | isleap | (year) | return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) | Return True for leap years, False for non-leap years. | Return True for leap years, False for non-leap years. | [
"Return",
"True",
"for",
"leap",
"years",
"False",
"for",
"non",
"-",
"leap",
"years",
"."
] | def isleap(year):
"""Return True for leap years, False for non-leap years."""
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0) | [
"def",
"isleap",
"(",
"year",
")",
":",
"return",
"year",
"%",
"4",
"==",
"0",
"and",
"(",
"year",
"%",
"100",
"!=",
"0",
"or",
"year",
"%",
"400",
"==",
"0",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/calendar.py#L100-L102 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/data/ops/dataset_ops.py | python | from_variant | (variant, structure) | return _VariantDataset(variant, structure) | Constructs a dataset from the given variant and (nested) structure.
Args:
variant: A scalar `tf.variant` tensor representing a dataset.
structure: A (nested) structure of `tf.TypeSpec` objects representing the
structure of each element in the dataset.
Returns:
A `tf.data.Dataset` instance. | Constructs a dataset from the given variant and (nested) structure. | [
"Constructs",
"a",
"dataset",
"from",
"the",
"given",
"variant",
"and",
"(",
"nested",
")",
"structure",
"."
] | def from_variant(variant, structure):
"""Constructs a dataset from the given variant and (nested) structure.
Args:
variant: A scalar `tf.variant` tensor representing a dataset.
structure: A (nested) structure of `tf.TypeSpec` objects representing the
structure of each element in the dataset.
Returns:
A `tf.data.Dataset` instance.
"""
return _VariantDataset(variant, structure) | [
"def",
"from_variant",
"(",
"variant",
",",
"structure",
")",
":",
"return",
"_VariantDataset",
"(",
"variant",
",",
"structure",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/data/ops/dataset_ops.py#L4277-L4288 |
|
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/mapreduce/mapreduce/datastore_range_iterators.py | python | AbstractKeyRangeIterator.__iter__ | (self) | Iter. | Iter. | [
"Iter",
"."
] | def __iter__(self):
"""Iter."""
raise NotImplementedError() | [
"def",
"__iter__",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/datastore_range_iterators.py#L384-L386 |
||
maidsafe-archive/MaidSafe | defd65e1c8cfb6a1cbdeaaa0eee31d065421792d | tools/cpplint.py | python | ParseNolintSuppressions | (filename, raw_line, linenum, error) | Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler. | Updates the global list of error-suppressions. | [
"Updates",
"the",
"global",
"list",
"of",
"error",
"-",
"suppressions",
"."
] | def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
# FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*).
matched = _RE_SUPPRESSION.search(raw_line)
if matched:
category = matched.group(1)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(linenum)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(linenum)
else:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category) | [
"def",
"ParseNolintSuppressions",
"(",
"filename",
",",
"raw_line",
",",
"linenum",
",",
"error",
")",
":",
"# FIXME(adonovan): \"NOLINT(\" is misparsed as NOLINT(*).",
"matched",
"=",
"_RE_SUPPRESSION",
".",
"search",
"(",
"raw_line",
")",
"if",
"matched",
":",
"category",
"=",
"matched",
".",
"group",
"(",
"1",
")",
"if",
"category",
"in",
"(",
"None",
",",
"'(*)'",
")",
":",
"# => \"suppress all\"",
"_error_suppressions",
".",
"setdefault",
"(",
"None",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"linenum",
")",
"else",
":",
"if",
"category",
".",
"startswith",
"(",
"'('",
")",
"and",
"category",
".",
"endswith",
"(",
"')'",
")",
":",
"category",
"=",
"category",
"[",
"1",
":",
"-",
"1",
"]",
"if",
"category",
"in",
"_ERROR_CATEGORIES",
":",
"_error_suppressions",
".",
"setdefault",
"(",
"category",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"linenum",
")",
"else",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/nolint'",
",",
"5",
",",
"'Unknown NOLINT error category: %s'",
"%",
"category",
")"
] | https://github.com/maidsafe-archive/MaidSafe/blob/defd65e1c8cfb6a1cbdeaaa0eee31d065421792d/tools/cpplint.py#L363-L389 |
||
pytorch/pytorch | 7176c92687d3cc847cc046bf002269c6949a21c2 | benchmarks/distributed/rpc/parameter_server/server/server.py | python | ParameterServerBase.record_straggler_start | (self, key, cuda=True) | r"""
A helper method that records a straggler metric
for the given key. A user should call this when
the first gradient for the param location is received.
Args:
key (str): unique id for metric within a group
cuda (bool): indicator to determine if this is a CUDA metric | r"""
A helper method that records a straggler metric
for the given key. A user should call this when
the first gradient for the param location is received.
Args:
key (str): unique id for metric within a group
cuda (bool): indicator to determine if this is a CUDA metric | [
"r",
"A",
"helper",
"method",
"that",
"records",
"a",
"straggler",
"metric",
"for",
"the",
"given",
"key",
".",
"A",
"user",
"should",
"call",
"this",
"when",
"the",
"first",
"gradient",
"for",
"the",
"param",
"location",
"is",
"received",
".",
"Args",
":",
"key",
"(",
"str",
")",
":",
"unique",
"id",
"for",
"metric",
"within",
"a",
"group",
"cuda",
"(",
"bool",
")",
":",
"indicator",
"to",
"determine",
"if",
"this",
"is",
"a",
"CUDA",
"metric"
] | def record_straggler_start(self, key, cuda=True):
r"""
A helper method that records a straggler metric
for the given key. A user should call this when
the first gradient for the param location is received.
Args:
key (str): unique id for metric within a group
cuda (bool): indicator to determine if this is a CUDA metric
"""
self.__metrics_logger.record_start(
self.PARAMETER_SERVER_STRAGGLER_METRIC,
key,
self.PARAM_INDEX_STRAGGLER,
cuda
) | [
"def",
"record_straggler_start",
"(",
"self",
",",
"key",
",",
"cuda",
"=",
"True",
")",
":",
"self",
".",
"__metrics_logger",
".",
"record_start",
"(",
"self",
".",
"PARAMETER_SERVER_STRAGGLER_METRIC",
",",
"key",
",",
"self",
".",
"PARAM_INDEX_STRAGGLER",
",",
"cuda",
")"
] | https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/benchmarks/distributed/rpc/parameter_server/server/server.py#L82-L96 |
||
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tkinter.py | python | CallWrapper.__init__ | (self, func, subst, widget) | Store FUNC, SUBST and WIDGET as members. | Store FUNC, SUBST and WIDGET as members. | [
"Store",
"FUNC",
"SUBST",
"and",
"WIDGET",
"as",
"members",
"."
] | def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget | [
"def",
"__init__",
"(",
"self",
",",
"func",
",",
"subst",
",",
"widget",
")",
":",
"self",
".",
"func",
"=",
"func",
"self",
".",
"subst",
"=",
"subst",
"self",
".",
"widget",
"=",
"widget"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib-tk/Tkinter.py#L1460-L1464 |
||
eclipse/omr | 056e7c9ce9d503649190bc5bd9931fac30b4e4bc | jitbuilder/apigen/cppgen.py | python | CppGenerator.get_impl_type | (self, c) | return "{} *".format(self.get_impl_class_name(c.as_class())) if c.is_class() else self.builtin_type_map[c.name()] | Returns the C++ type to be used in the JitBuilder implementation
for a given type name, prefixing with a given namespace if needed. | Returns the C++ type to be used in the JitBuilder implementation
for a given type name, prefixing with a given namespace if needed. | [
"Returns",
"the",
"C",
"++",
"type",
"to",
"be",
"used",
"in",
"the",
"JitBuilder",
"implementation",
"for",
"a",
"given",
"type",
"name",
"prefixing",
"with",
"a",
"given",
"namespace",
"if",
"needed",
"."
] | def get_impl_type(self, c):
"""
Returns the C++ type to be used in the JitBuilder implementation
for a given type name, prefixing with a given namespace if needed.
"""
return "{} *".format(self.get_impl_class_name(c.as_class())) if c.is_class() else self.builtin_type_map[c.name()] | [
"def",
"get_impl_type",
"(",
"self",
",",
"c",
")",
":",
"return",
"\"{} *\"",
".",
"format",
"(",
"self",
".",
"get_impl_class_name",
"(",
"c",
".",
"as_class",
"(",
")",
")",
")",
"if",
"c",
".",
"is_class",
"(",
")",
"else",
"self",
".",
"builtin_type_map",
"[",
"c",
".",
"name",
"(",
")",
"]"
] | https://github.com/eclipse/omr/blob/056e7c9ce9d503649190bc5bd9931fac30b4e4bc/jitbuilder/apigen/cppgen.py#L154-L159 |
|
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/ops/data_flow_ops.py | python | SparseConditionalAccumulator.take_indexed_slices_grad | (self, num_required, name=None) | return ops.IndexedSlices(
indices=return_val.indices,
values=return_val.values,
dense_shape=return_val.shape) | Attempts to extract the average gradient from the accumulator.
The operation blocks until sufficient number of gradients have been
successfully applied to the accumulator.
Once successful, the following actions are also triggered:
- Counter of accumulated gradients is reset to 0.
- Aggregated gradient is reset to 0 tensor.
- Accumulator's internal time step is incremented by 1.
Args:
num_required: Number of gradients that needs to have been aggregated
name: Optional name for the operation
Returns:
An IndexedSlices holding the value of the average gradient.
Raises:
InvalidArgumentError: If num_required < 1 | Attempts to extract the average gradient from the accumulator. | [
"Attempts",
"to",
"extract",
"the",
"average",
"gradient",
"from",
"the",
"accumulator",
"."
] | def take_indexed_slices_grad(self, num_required, name=None):
"""Attempts to extract the average gradient from the accumulator.
The operation blocks until sufficient number of gradients have been
successfully applied to the accumulator.
Once successful, the following actions are also triggered:
- Counter of accumulated gradients is reset to 0.
- Aggregated gradient is reset to 0 tensor.
- Accumulator's internal time step is incremented by 1.
Args:
num_required: Number of gradients that needs to have been aggregated
name: Optional name for the operation
Returns:
An IndexedSlices holding the value of the average gradient.
Raises:
InvalidArgumentError: If num_required < 1
"""
return_val = gen_data_flow_ops.sparse_accumulator_take_gradient(
self._accumulator_ref, num_required, dtype=self._dtype, name=name)
return ops.IndexedSlices(
indices=return_val.indices,
values=return_val.values,
dense_shape=return_val.shape) | [
"def",
"take_indexed_slices_grad",
"(",
"self",
",",
"num_required",
",",
"name",
"=",
"None",
")",
":",
"return_val",
"=",
"gen_data_flow_ops",
".",
"sparse_accumulator_take_gradient",
"(",
"self",
".",
"_accumulator_ref",
",",
"num_required",
",",
"dtype",
"=",
"self",
".",
"_dtype",
",",
"name",
"=",
"name",
")",
"return",
"ops",
".",
"IndexedSlices",
"(",
"indices",
"=",
"return_val",
".",
"indices",
",",
"values",
"=",
"return_val",
".",
"values",
",",
"dense_shape",
"=",
"return_val",
".",
"shape",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/ops/data_flow_ops.py#L1365-L1391 |
|
gem5/gem5 | 141cc37c2d4b93959d4c249b8f7e6a8b2ef75338 | ext/ply/example/ansic/cparse.py | python | p_direct_declarator_5 | (t) | direct_declarator : direct_declarator LPAREN identifier_list RPAREN | direct_declarator : direct_declarator LPAREN identifier_list RPAREN | [
"direct_declarator",
":",
"direct_declarator",
"LPAREN",
"identifier_list",
"RPAREN"
] | def p_direct_declarator_5(t):
'direct_declarator : direct_declarator LPAREN identifier_list RPAREN '
pass | [
"def",
"p_direct_declarator_5",
"(",
"t",
")",
":",
"pass"
] | https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/ext/ply/example/ansic/cparse.py#L289-L291 |
||
rapidsai/cudf | d5b2448fc69f17509304d594f029d0df56984962 | python/cudf/cudf/core/dataframe.py | python | DataFrame.isin | (self, values) | Whether each element in the DataFrame is contained in values.
Parameters
----------
values : iterable, Series, DataFrame or dict
The result will only be true at a location if all
the labels match. If values is a Series, that’s the index.
If values is a dict, the keys must be the column names,
which must match. If values is a DataFrame, then both the
index and column labels must match.
Returns
-------
DataFrame:
DataFrame of booleans showing whether each element in
the DataFrame is contained in values.
Examples
--------
>>> import cudf
>>> df = cudf.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]},
... index=['falcon', 'dog'])
>>> df
num_legs num_wings
falcon 2 2
dog 4 0
When ``values`` is a list check whether every value in the DataFrame
is present in the list (which animals have 0 or 2 legs or wings)
>>> df.isin([0, 2])
num_legs num_wings
falcon True True
dog False True
When ``values`` is a dict, we can pass values to check for each
column separately:
>>> df.isin({'num_wings': [0, 3]})
num_legs num_wings
falcon False False
dog False True
When ``values`` is a Series or DataFrame the index and column must
match. Note that 'falcon' does not match based on the number of legs
in other.
>>> other = cudf.DataFrame({'num_legs': [8, 2], 'num_wings': [0, 2]},
... index=['spider', 'falcon'])
>>> df.isin(other)
num_legs num_wings
falcon True True
dog False False | Whether each element in the DataFrame is contained in values. | [
"Whether",
"each",
"element",
"in",
"the",
"DataFrame",
"is",
"contained",
"in",
"values",
"."
] | def isin(self, values):
"""
Whether each element in the DataFrame is contained in values.
Parameters
----------
values : iterable, Series, DataFrame or dict
The result will only be true at a location if all
the labels match. If values is a Series, that’s the index.
If values is a dict, the keys must be the column names,
which must match. If values is a DataFrame, then both the
index and column labels must match.
Returns
-------
DataFrame:
DataFrame of booleans showing whether each element in
the DataFrame is contained in values.
Examples
--------
>>> import cudf
>>> df = cudf.DataFrame({'num_legs': [2, 4], 'num_wings': [2, 0]},
... index=['falcon', 'dog'])
>>> df
num_legs num_wings
falcon 2 2
dog 4 0
When ``values`` is a list check whether every value in the DataFrame
is present in the list (which animals have 0 or 2 legs or wings)
>>> df.isin([0, 2])
num_legs num_wings
falcon True True
dog False True
When ``values`` is a dict, we can pass values to check for each
column separately:
>>> df.isin({'num_wings': [0, 3]})
num_legs num_wings
falcon False False
dog False True
When ``values`` is a Series or DataFrame the index and column must
match. Note that 'falcon' does not match based on the number of legs
in other.
>>> other = cudf.DataFrame({'num_legs': [8, 2], 'num_wings': [0, 2]},
... index=['spider', 'falcon'])
>>> df.isin(other)
num_legs num_wings
falcon True True
dog False False
"""
if isinstance(values, dict):
result_df = DataFrame()
for col in self._data.names:
if col in values:
val = values[col]
result_df[col] = self._data[col].isin(val)
else:
result_df[col] = column.full(
size=len(self), fill_value=False, dtype="bool"
)
result_df.index = self.index
return result_df
elif isinstance(values, Series):
values = values.reindex(self.index)
result = DataFrame()
# TODO: propagate nulls through isin
# https://github.com/rapidsai/cudf/issues/7556
for col in self._data.names:
if isinstance(
self[col]._column, cudf.core.column.CategoricalColumn
) and isinstance(
values._column, cudf.core.column.CategoricalColumn
):
res = (self._data[col] == values._column).fillna(False)
result[col] = res
elif (
isinstance(
self[col]._column, cudf.core.column.CategoricalColumn
)
or np.issubdtype(self[col].dtype, cudf.dtype("object"))
) or (
isinstance(
values._column, cudf.core.column.CategoricalColumn
)
or np.issubdtype(values.dtype, cudf.dtype("object"))
):
result[col] = utils.scalar_broadcast_to(False, len(self))
else:
result[col] = (self._data[col] == values._column).fillna(
False
)
result.index = self.index
return result
elif isinstance(values, DataFrame):
values = values.reindex(self.index)
result = DataFrame()
for col in self._data.names:
if col in values.columns:
result[col] = (
self._data[col] == values[col]._column
).fillna(False)
else:
result[col] = utils.scalar_broadcast_to(False, len(self))
result.index = self.index
return result
else:
if not is_list_like(values):
raise TypeError(
f"only list-like or dict-like objects are "
f"allowed to be passed to DataFrame.isin(), "
f"you passed a "
f"'{type(values).__name__}'"
)
result_df = DataFrame()
for col in self._data.names:
result_df[col] = self._data[col].isin(values)
result_df.index = self.index
return result_df | [
"def",
"isin",
"(",
"self",
",",
"values",
")",
":",
"if",
"isinstance",
"(",
"values",
",",
"dict",
")",
":",
"result_df",
"=",
"DataFrame",
"(",
")",
"for",
"col",
"in",
"self",
".",
"_data",
".",
"names",
":",
"if",
"col",
"in",
"values",
":",
"val",
"=",
"values",
"[",
"col",
"]",
"result_df",
"[",
"col",
"]",
"=",
"self",
".",
"_data",
"[",
"col",
"]",
".",
"isin",
"(",
"val",
")",
"else",
":",
"result_df",
"[",
"col",
"]",
"=",
"column",
".",
"full",
"(",
"size",
"=",
"len",
"(",
"self",
")",
",",
"fill_value",
"=",
"False",
",",
"dtype",
"=",
"\"bool\"",
")",
"result_df",
".",
"index",
"=",
"self",
".",
"index",
"return",
"result_df",
"elif",
"isinstance",
"(",
"values",
",",
"Series",
")",
":",
"values",
"=",
"values",
".",
"reindex",
"(",
"self",
".",
"index",
")",
"result",
"=",
"DataFrame",
"(",
")",
"# TODO: propagate nulls through isin",
"# https://github.com/rapidsai/cudf/issues/7556",
"for",
"col",
"in",
"self",
".",
"_data",
".",
"names",
":",
"if",
"isinstance",
"(",
"self",
"[",
"col",
"]",
".",
"_column",
",",
"cudf",
".",
"core",
".",
"column",
".",
"CategoricalColumn",
")",
"and",
"isinstance",
"(",
"values",
".",
"_column",
",",
"cudf",
".",
"core",
".",
"column",
".",
"CategoricalColumn",
")",
":",
"res",
"=",
"(",
"self",
".",
"_data",
"[",
"col",
"]",
"==",
"values",
".",
"_column",
")",
".",
"fillna",
"(",
"False",
")",
"result",
"[",
"col",
"]",
"=",
"res",
"elif",
"(",
"isinstance",
"(",
"self",
"[",
"col",
"]",
".",
"_column",
",",
"cudf",
".",
"core",
".",
"column",
".",
"CategoricalColumn",
")",
"or",
"np",
".",
"issubdtype",
"(",
"self",
"[",
"col",
"]",
".",
"dtype",
",",
"cudf",
".",
"dtype",
"(",
"\"object\"",
")",
")",
")",
"or",
"(",
"isinstance",
"(",
"values",
".",
"_column",
",",
"cudf",
".",
"core",
".",
"column",
".",
"CategoricalColumn",
")",
"or",
"np",
".",
"issubdtype",
"(",
"values",
".",
"dtype",
",",
"cudf",
".",
"dtype",
"(",
"\"object\"",
")",
")",
")",
":",
"result",
"[",
"col",
"]",
"=",
"utils",
".",
"scalar_broadcast_to",
"(",
"False",
",",
"len",
"(",
"self",
")",
")",
"else",
":",
"result",
"[",
"col",
"]",
"=",
"(",
"self",
".",
"_data",
"[",
"col",
"]",
"==",
"values",
".",
"_column",
")",
".",
"fillna",
"(",
"False",
")",
"result",
".",
"index",
"=",
"self",
".",
"index",
"return",
"result",
"elif",
"isinstance",
"(",
"values",
",",
"DataFrame",
")",
":",
"values",
"=",
"values",
".",
"reindex",
"(",
"self",
".",
"index",
")",
"result",
"=",
"DataFrame",
"(",
")",
"for",
"col",
"in",
"self",
".",
"_data",
".",
"names",
":",
"if",
"col",
"in",
"values",
".",
"columns",
":",
"result",
"[",
"col",
"]",
"=",
"(",
"self",
".",
"_data",
"[",
"col",
"]",
"==",
"values",
"[",
"col",
"]",
".",
"_column",
")",
".",
"fillna",
"(",
"False",
")",
"else",
":",
"result",
"[",
"col",
"]",
"=",
"utils",
".",
"scalar_broadcast_to",
"(",
"False",
",",
"len",
"(",
"self",
")",
")",
"result",
".",
"index",
"=",
"self",
".",
"index",
"return",
"result",
"else",
":",
"if",
"not",
"is_list_like",
"(",
"values",
")",
":",
"raise",
"TypeError",
"(",
"f\"only list-like or dict-like objects are \"",
"f\"allowed to be passed to DataFrame.isin(), \"",
"f\"you passed a \"",
"f\"'{type(values).__name__}'\"",
")",
"result_df",
"=",
"DataFrame",
"(",
")",
"for",
"col",
"in",
"self",
".",
"_data",
".",
"names",
":",
"result_df",
"[",
"col",
"]",
"=",
"self",
".",
"_data",
"[",
"col",
"]",
".",
"isin",
"(",
"values",
")",
"result_df",
".",
"index",
"=",
"self",
".",
"index",
"return",
"result_df"
] | https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/dataframe.py#L4993-L5125 |
||
trilinos/Trilinos | 6168be6dd51e35e1cd681e9c4b24433e709df140 | packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/PhactoriDriver.py | python | GetXyzForNodeOrElementParallelOneBlock | (inInputCsData, inIdIsNode,
inGlobalId, outXyz) | check for inGlobalId and set outXyz if present\n\n utility function called by GetXyzForNodeOrElementParallelRecurse1, this\n takes one unstructured grid as input, and sees if it has the node or element\n with id inGlobalId. If it does, the method sets outXyz to the geometric\n location of the node (xyz) or center of the element bounding box (xyz) and\n returns true, otherwise it returns false without changing outXyz\n | check for inGlobalId and set outXyz if present\n\n utility function called by GetXyzForNodeOrElementParallelRecurse1, this\n takes one unstructured grid as input, and sees if it has the node or element\n with id inGlobalId. If it does, the method sets outXyz to the geometric\n location of the node (xyz) or center of the element bounding box (xyz) and\n returns true, otherwise it returns false without changing outXyz\n | [
"check",
"for",
"inGlobalId",
"and",
"set",
"outXyz",
"if",
"present",
"\\",
"n",
"\\",
"n",
"utility",
"function",
"called",
"by",
"GetXyzForNodeOrElementParallelRecurse1",
"this",
"\\",
"n",
"takes",
"one",
"unstructured",
"grid",
"as",
"input",
"and",
"sees",
"if",
"it",
"has",
"the",
"node",
"or",
"element",
"\\",
"n",
"with",
"id",
"inGlobalId",
".",
"If",
"it",
"does",
"the",
"method",
"sets",
"outXyz",
"to",
"the",
"geometric",
"\\",
"n",
"location",
"of",
"the",
"node",
"(",
"xyz",
")",
"or",
"center",
"of",
"the",
"element",
"bounding",
"box",
"(",
"xyz",
")",
"and",
"\\",
"n",
"returns",
"true",
"otherwise",
"it",
"returns",
"false",
"without",
"changing",
"outXyz",
"\\",
"n"
] | def GetXyzForNodeOrElementParallelOneBlock(inInputCsData, inIdIsNode,
inGlobalId, outXyz):
"check for inGlobalId and set outXyz if present\n\n utility function called by GetXyzForNodeOrElementParallelRecurse1, this\n takes one unstructured grid as input, and sees if it has the node or element\n with id inGlobalId. If it does, the method sets outXyz to the geometric\n location of the node (xyz) or center of the element bounding box (xyz) and\n returns true, otherwise it returns false without changing outXyz\n "
if PhactoriDbg(100):
myDebugPrint3('GetXyzForNodeOrElementParallelOneBlock entered\n', 100)
globalIdArray = None
if inIdIsNode:
ptOrElData = inInputCsData.GetPointData()
globalIdArray = ptOrElData.GetArray('GlobalNodeId')
else:
ptOrElData = inInputCsData.GetCellData()
globalIdArray = ptOrElData.GetArray('GlobalElementId')
if globalIdArray == None:
if PhactoriDbg():
myDebugPrint3(" this process/block has no Global Node or Element Id array to contain " + str(inGlobalId) + "\n")
return False
numTuples = globalIdArray.GetNumberOfTuples()
thisProcessHasTheId = False
idIndex = -1
for ii in range(0, numTuples):
#myDebugPrint3(" testing " + str(ii) + " against " + str(inGlobalNodeId) + "\n")
#myDebugPrint3(" type array: " + str(type(globalNodeIdArray)) + " type ii:" + str(type(ii)) + "\n")
vv = globalIdArray.GetTuple1(ii)
if vv == inGlobalId:
thisProcessHasTheId = True
idIndex = ii
break
if not thisProcessHasTheId:
if PhactoriDbg():
myDebugPrint3(" this process/block doesn't contain id" + \
str(inGlobalId) + "\n")
return False
if PhactoriDbg():
myDebugPrint3(" this process/block contains id " + str(inGlobalId) + "\n")
if inIdIsNode:
pointsArray = inInputCsData.GetPoints()
numPoints = pointsArray.GetNumberOfPoints()
if idIndex >= numPoints:
if PhactoriDbg():
myDebugPrint3(" this process/block has problem with index, setting xyz 0\n")
outXyz[0] = 0.0
outXyz[1] = 0.0
outXyz[2] = 0.0
return False
thePoint = pointsArray.GetPoint(idIndex, outXyz)
if PhactoriDbg():
myDebugPrint3(" outXyz set to: " + str(outXyz) + "\n")
return True
else:
myBounds = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
#myCells = inInputCsData.GetCells()
#oneCell = myCells.GetCell(idIndex)
oneCell = inInputCsData.GetCell(idIndex)
oneCell.GetBounds(myBounds)
#myCells.GetCellBounds(idIndex, myBounds)
#ptOrElData.GetCellBounds(idIndex, myBounds)
outXyz[0] = 0.5 * (myBounds[0] + myBounds[1])
outXyz[1] = 0.5 * (myBounds[2] + myBounds[3])
outXyz[2] = 0.5 * (myBounds[4] + myBounds[5])
#xmin, xmax, ymin, ymax, zmin, zmax = myCells.GetCellBounds(idIndex)
#outXyz[0] = 0.5 * (xmin + xmax)
#outXyz[1] = 0.5 * (ymin + ymax)
#outXyz[2] = 0.5 * (zmin + zmax)
return True | [
"def",
"GetXyzForNodeOrElementParallelOneBlock",
"(",
"inInputCsData",
",",
"inIdIsNode",
",",
"inGlobalId",
",",
"outXyz",
")",
":",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"'GetXyzForNodeOrElementParallelOneBlock entered\\n'",
",",
"100",
")",
"globalIdArray",
"=",
"None",
"if",
"inIdIsNode",
":",
"ptOrElData",
"=",
"inInputCsData",
".",
"GetPointData",
"(",
")",
"globalIdArray",
"=",
"ptOrElData",
".",
"GetArray",
"(",
"'GlobalNodeId'",
")",
"else",
":",
"ptOrElData",
"=",
"inInputCsData",
".",
"GetCellData",
"(",
")",
"globalIdArray",
"=",
"ptOrElData",
".",
"GetArray",
"(",
"'GlobalElementId'",
")",
"if",
"globalIdArray",
"==",
"None",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" this process/block has no Global Node or Element Id array to contain \"",
"+",
"str",
"(",
"inGlobalId",
")",
"+",
"\"\\n\"",
")",
"return",
"False",
"numTuples",
"=",
"globalIdArray",
".",
"GetNumberOfTuples",
"(",
")",
"thisProcessHasTheId",
"=",
"False",
"idIndex",
"=",
"-",
"1",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"numTuples",
")",
":",
"#myDebugPrint3(\" testing \" + str(ii) + \" against \" + str(inGlobalNodeId) + \"\\n\")",
"#myDebugPrint3(\" type array: \" + str(type(globalNodeIdArray)) + \" type ii:\" + str(type(ii)) + \"\\n\")",
"vv",
"=",
"globalIdArray",
".",
"GetTuple1",
"(",
"ii",
")",
"if",
"vv",
"==",
"inGlobalId",
":",
"thisProcessHasTheId",
"=",
"True",
"idIndex",
"=",
"ii",
"break",
"if",
"not",
"thisProcessHasTheId",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" this process/block doesn't contain id\"",
"+",
"str",
"(",
"inGlobalId",
")",
"+",
"\"\\n\"",
")",
"return",
"False",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" this process/block contains id \"",
"+",
"str",
"(",
"inGlobalId",
")",
"+",
"\"\\n\"",
")",
"if",
"inIdIsNode",
":",
"pointsArray",
"=",
"inInputCsData",
".",
"GetPoints",
"(",
")",
"numPoints",
"=",
"pointsArray",
".",
"GetNumberOfPoints",
"(",
")",
"if",
"idIndex",
">=",
"numPoints",
":",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" this process/block has problem with index, setting xyz 0\\n\"",
")",
"outXyz",
"[",
"0",
"]",
"=",
"0.0",
"outXyz",
"[",
"1",
"]",
"=",
"0.0",
"outXyz",
"[",
"2",
"]",
"=",
"0.0",
"return",
"False",
"thePoint",
"=",
"pointsArray",
".",
"GetPoint",
"(",
"idIndex",
",",
"outXyz",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\" outXyz set to: \"",
"+",
"str",
"(",
"outXyz",
")",
"+",
"\"\\n\"",
")",
"return",
"True",
"else",
":",
"myBounds",
"=",
"[",
"0.0",
",",
"0.0",
",",
"0.0",
",",
"0.0",
",",
"0.0",
",",
"0.0",
"]",
"#myCells = inInputCsData.GetCells()",
"#oneCell = myCells.GetCell(idIndex)",
"oneCell",
"=",
"inInputCsData",
".",
"GetCell",
"(",
"idIndex",
")",
"oneCell",
".",
"GetBounds",
"(",
"myBounds",
")",
"#myCells.GetCellBounds(idIndex, myBounds)",
"#ptOrElData.GetCellBounds(idIndex, myBounds)",
"outXyz",
"[",
"0",
"]",
"=",
"0.5",
"*",
"(",
"myBounds",
"[",
"0",
"]",
"+",
"myBounds",
"[",
"1",
"]",
")",
"outXyz",
"[",
"1",
"]",
"=",
"0.5",
"*",
"(",
"myBounds",
"[",
"2",
"]",
"+",
"myBounds",
"[",
"3",
"]",
")",
"outXyz",
"[",
"2",
"]",
"=",
"0.5",
"*",
"(",
"myBounds",
"[",
"4",
"]",
"+",
"myBounds",
"[",
"5",
"]",
")",
"#xmin, xmax, ymin, ymax, zmin, zmax = myCells.GetCellBounds(idIndex)",
"#outXyz[0] = 0.5 * (xmin + xmax)",
"#outXyz[1] = 0.5 * (ymin + ymax)",
"#outXyz[2] = 0.5 * (zmin + zmax)",
"return",
"True"
] | https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/PhactoriDriver.py#L3999-L4071 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/grid.py | python | Grid.GetDefaultCellTextColour | (*args, **kwargs) | return _grid.Grid_GetDefaultCellTextColour(*args, **kwargs) | GetDefaultCellTextColour(self) -> Colour | GetDefaultCellTextColour(self) -> Colour | [
"GetDefaultCellTextColour",
"(",
"self",
")",
"-",
">",
"Colour"
] | def GetDefaultCellTextColour(*args, **kwargs):
"""GetDefaultCellTextColour(self) -> Colour"""
return _grid.Grid_GetDefaultCellTextColour(*args, **kwargs) | [
"def",
"GetDefaultCellTextColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_GetDefaultCellTextColour",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/grid.py#L1778-L1780 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/signal/ltisys.py | python | _order_complex_poles | (poles) | return ordered_poles | Check we have complex conjugates pairs and reorder P according to YT, ie
real_poles, complex_i, conjugate complex_i, ....
The lexicographic sort on the complex poles is added to help the user to
compare sets of poles. | Check we have complex conjugates pairs and reorder P according to YT, ie
real_poles, complex_i, conjugate complex_i, ....
The lexicographic sort on the complex poles is added to help the user to
compare sets of poles. | [
"Check",
"we",
"have",
"complex",
"conjugates",
"pairs",
"and",
"reorder",
"P",
"according",
"to",
"YT",
"ie",
"real_poles",
"complex_i",
"conjugate",
"complex_i",
"....",
"The",
"lexicographic",
"sort",
"on",
"the",
"complex",
"poles",
"is",
"added",
"to",
"help",
"the",
"user",
"to",
"compare",
"sets",
"of",
"poles",
"."
] | def _order_complex_poles(poles):
"""
Check we have complex conjugates pairs and reorder P according to YT, ie
real_poles, complex_i, conjugate complex_i, ....
The lexicographic sort on the complex poles is added to help the user to
compare sets of poles.
"""
ordered_poles = np.sort(poles[np.isreal(poles)])
im_poles = []
for p in np.sort(poles[np.imag(poles) < 0]):
if np.conj(p) in poles:
im_poles.extend((p, np.conj(p)))
ordered_poles = np.hstack((ordered_poles, im_poles))
if poles.shape[0] != len(ordered_poles):
raise ValueError("Complex poles must come with their conjugates")
return ordered_poles | [
"def",
"_order_complex_poles",
"(",
"poles",
")",
":",
"ordered_poles",
"=",
"np",
".",
"sort",
"(",
"poles",
"[",
"np",
".",
"isreal",
"(",
"poles",
")",
"]",
")",
"im_poles",
"=",
"[",
"]",
"for",
"p",
"in",
"np",
".",
"sort",
"(",
"poles",
"[",
"np",
".",
"imag",
"(",
"poles",
")",
"<",
"0",
"]",
")",
":",
"if",
"np",
".",
"conj",
"(",
"p",
")",
"in",
"poles",
":",
"im_poles",
".",
"extend",
"(",
"(",
"p",
",",
"np",
".",
"conj",
"(",
"p",
")",
")",
")",
"ordered_poles",
"=",
"np",
".",
"hstack",
"(",
"(",
"ordered_poles",
",",
"im_poles",
")",
")",
"if",
"poles",
".",
"shape",
"[",
"0",
"]",
"!=",
"len",
"(",
"ordered_poles",
")",
":",
"raise",
"ValueError",
"(",
"\"Complex poles must come with their conjugates\"",
")",
"return",
"ordered_poles"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/signal/ltisys.py#L2548-L2565 |
|
nasa/astrobee | 9241e67e6692810d6e275abb3165b6d02f4ca5ef | scripts/git/cpplint.py | python | CheckRValueReference | (filename, clean_lines, linenum, nesting_state, error) | Check for rvalue references.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found. | Check for rvalue references. | [
"Check",
"for",
"rvalue",
"references",
"."
] | def CheckRValueReference(filename, clean_lines, linenum, nesting_state, error):
"""Check for rvalue references.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found.
"""
# Find lines missing spaces around &&.
# TODO(unknown): currently we don't check for rvalue references
# with spaces surrounding the && to avoid false positives with
# boolean expressions.
line = clean_lines.elided[linenum]
match = Match(r"^(.*\S)&&", line)
if not match:
match = Match(r"(.*)&&\S", line)
if (not match) or "(&&)" in line or Search(r"\boperator\s*$", match.group(1)):
return
# Either poorly formed && or an rvalue reference, check the context
# to get a more accurate error message. Mostly we want to determine
# if what's to the left of "&&" is a type or not.
and_pos = len(match.group(1))
if IsRValueType(clean_lines, nesting_state, linenum, and_pos):
if not IsRValueAllowed(clean_lines, linenum):
error(
filename,
linenum,
"build/c++11",
3,
"RValue references are an unapproved C++ feature.",
)
else:
error(filename, linenum, "whitespace/operators", 3, "Missing spaces around &&") | [
"def",
"CheckRValueReference",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
":",
"# Find lines missing spaces around &&.",
"# TODO(unknown): currently we don't check for rvalue references",
"# with spaces surrounding the && to avoid false positives with",
"# boolean expressions.",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"match",
"=",
"Match",
"(",
"r\"^(.*\\S)&&\"",
",",
"line",
")",
"if",
"not",
"match",
":",
"match",
"=",
"Match",
"(",
"r\"(.*)&&\\S\"",
",",
"line",
")",
"if",
"(",
"not",
"match",
")",
"or",
"\"(&&)\"",
"in",
"line",
"or",
"Search",
"(",
"r\"\\boperator\\s*$\"",
",",
"match",
".",
"group",
"(",
"1",
")",
")",
":",
"return",
"# Either poorly formed && or an rvalue reference, check the context",
"# to get a more accurate error message. Mostly we want to determine",
"# if what's to the left of \"&&\" is a type or not.",
"and_pos",
"=",
"len",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
"if",
"IsRValueType",
"(",
"clean_lines",
",",
"nesting_state",
",",
"linenum",
",",
"and_pos",
")",
":",
"if",
"not",
"IsRValueAllowed",
"(",
"clean_lines",
",",
"linenum",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"\"build/c++11\"",
",",
"3",
",",
"\"RValue references are an unapproved C++ feature.\"",
",",
")",
"else",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"\"whitespace/operators\"",
",",
"3",
",",
"\"Missing spaces around &&\"",
")"
] | https://github.com/nasa/astrobee/blob/9241e67e6692810d6e275abb3165b6d02f4ca5ef/scripts/git/cpplint.py#L4049-L4085 |
||
InsightSoftwareConsortium/ITK | 87acfce9a93d928311c38bc371b666b515b9f19d | Modules/ThirdParty/pygccxml/src/pygccxml/parser/directory_cache.py | python | filename_repository_t.release_filename | (self, id_) | Release a file name. | Release a file name. | [
"Release",
"a",
"file",
"name",
"."
] | def release_filename(self, id_):
"""Release a file name.
"""
entry = self.__entries.get(id_)
if entry is None:
raise ValueError("Invalid filename id (%d)" % id_)
# Decrease reference count and check if the entry has to be removed...
if entry.dec_ref_count() == 0:
del self.__entries[id_]
del self.__id_lut[entry.filename] | [
"def",
"release_filename",
"(",
"self",
",",
"id_",
")",
":",
"entry",
"=",
"self",
".",
"__entries",
".",
"get",
"(",
"id_",
")",
"if",
"entry",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Invalid filename id (%d)\"",
"%",
"id_",
")",
"# Decrease reference count and check if the entry has to be removed...",
"if",
"entry",
".",
"dec_ref_count",
"(",
")",
"==",
"0",
":",
"del",
"self",
".",
"__entries",
"[",
"id_",
"]",
"del",
"self",
".",
"__id_lut",
"[",
"entry",
".",
"filename",
"]"
] | https://github.com/InsightSoftwareConsortium/ITK/blob/87acfce9a93d928311c38bc371b666b515b9f19d/Modules/ThirdParty/pygccxml/src/pygccxml/parser/directory_cache.py#L486-L497 |
||
carla-simulator/carla | 8854804f4d7748e14d937ec763a2912823a7e5f5 | PythonAPI/carla/agents/navigation/local_planner.py | python | LocalPlanner.reset_vehicle | (self) | Reset the ego-vehicle | Reset the ego-vehicle | [
"Reset",
"the",
"ego",
"-",
"vehicle"
] | def reset_vehicle(self):
"""Reset the ego-vehicle"""
self._vehicle = None | [
"def",
"reset_vehicle",
"(",
"self",
")",
":",
"self",
".",
"_vehicle",
"=",
"None"
] | https://github.com/carla-simulator/carla/blob/8854804f4d7748e14d937ec763a2912823a7e5f5/PythonAPI/carla/agents/navigation/local_planner.py#L110-L112 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/tools/python/src/Lib/mailbox.py | python | Mailbox.get_message | (self, key) | Return a Message representation or raise a KeyError. | Return a Message representation or raise a KeyError. | [
"Return",
"a",
"Message",
"representation",
"or",
"raise",
"a",
"KeyError",
"."
] | def get_message(self, key):
"""Return a Message representation or raise a KeyError."""
raise NotImplementedError('Method must be implemented by subclass') | [
"def",
"get_message",
"(",
"self",
",",
"key",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Method must be implemented by subclass'",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/mailbox.py#L84-L86 |
||
Xilinx/Vitis-AI | fc74d404563d9951b57245443c73bef389f3657f | tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/layers/recurrent.py | python | _generate_zero_filled_state | (batch_size_tensor, state_size, dtype) | Generate a zero filled tensor with shape [batch_size, state_size]. | Generate a zero filled tensor with shape [batch_size, state_size]. | [
"Generate",
"a",
"zero",
"filled",
"tensor",
"with",
"shape",
"[",
"batch_size",
"state_size",
"]",
"."
] | def _generate_zero_filled_state(batch_size_tensor, state_size, dtype):
"""Generate a zero filled tensor with shape [batch_size, state_size]."""
if batch_size_tensor is None or dtype is None:
raise ValueError(
'batch_size and dtype cannot be None while constructing initial state: '
'batch_size={}, dtype={}'.format(batch_size_tensor, dtype))
def create_zeros(unnested_state_size):
flat_dims = tensor_shape.as_shape(unnested_state_size).as_list()
init_state_size = [batch_size_tensor] + flat_dims
return array_ops.zeros(init_state_size, dtype=dtype)
if nest.is_sequence(state_size):
return nest.map_structure(create_zeros, state_size)
else:
return create_zeros(state_size) | [
"def",
"_generate_zero_filled_state",
"(",
"batch_size_tensor",
",",
"state_size",
",",
"dtype",
")",
":",
"if",
"batch_size_tensor",
"is",
"None",
"or",
"dtype",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'batch_size and dtype cannot be None while constructing initial state: '",
"'batch_size={}, dtype={}'",
".",
"format",
"(",
"batch_size_tensor",
",",
"dtype",
")",
")",
"def",
"create_zeros",
"(",
"unnested_state_size",
")",
":",
"flat_dims",
"=",
"tensor_shape",
".",
"as_shape",
"(",
"unnested_state_size",
")",
".",
"as_list",
"(",
")",
"init_state_size",
"=",
"[",
"batch_size_tensor",
"]",
"+",
"flat_dims",
"return",
"array_ops",
".",
"zeros",
"(",
"init_state_size",
",",
"dtype",
"=",
"dtype",
")",
"if",
"nest",
".",
"is_sequence",
"(",
"state_size",
")",
":",
"return",
"nest",
".",
"map_structure",
"(",
"create_zeros",
",",
"state_size",
")",
"else",
":",
"return",
"create_zeros",
"(",
"state_size",
")"
] | https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/layers/recurrent.py#L2755-L2770 |
||
mindspore-ai/mindspore | fb8fd3338605bb34fa5cea054e535a8b1d753fab | mindspore/python/mindspore/dataset/core/validator_helpers.py | python | check_gnn_list_of_pair_or_ndarray | (param, param_name) | Check if the input parameter is a list of tuple or numpy.ndarray.
Args:
param (Union[list[tuple], nd.ndarray]): param.
param_name (str): param_name.
Returns:
Exception: TypeError if error. | Check if the input parameter is a list of tuple or numpy.ndarray. | [
"Check",
"if",
"the",
"input",
"parameter",
"is",
"a",
"list",
"of",
"tuple",
"or",
"numpy",
".",
"ndarray",
"."
] | def check_gnn_list_of_pair_or_ndarray(param, param_name):
"""
Check if the input parameter is a list of tuple or numpy.ndarray.
Args:
param (Union[list[tuple], nd.ndarray]): param.
param_name (str): param_name.
Returns:
Exception: TypeError if error.
"""
type_check(param, (list, np.ndarray), param_name)
if isinstance(param, list):
param_names = ["node_list[{0}]".format(i) for i in range(len(param))]
type_check_list(param, (tuple,), param_names)
for idx, pair in enumerate(param):
if not len(pair) == 2:
raise ValueError("Each member in {0} must be a pair which means length == 2. Got length {1}".format(
param_names[idx], len(pair)))
column_names = ["node_list[{0}], number #{1} element".format(idx, i+1) for i in range(len(pair))]
type_check_list(pair, (int,), column_names)
elif isinstance(param, np.ndarray):
if param.ndim != 2:
raise ValueError("Input ndarray must be in dimension 2. Got {0}".format(param.ndim))
if param.shape[1] != 2:
raise ValueError("Each member in {0} must be a pair which means length == 2. Got length {1}".format(
param_name, param.shape[1]))
if not param.dtype == np.int32:
raise TypeError("Each member in {0} should be of type int32. Got {1}.".format(
param_name, param.dtype)) | [
"def",
"check_gnn_list_of_pair_or_ndarray",
"(",
"param",
",",
"param_name",
")",
":",
"type_check",
"(",
"param",
",",
"(",
"list",
",",
"np",
".",
"ndarray",
")",
",",
"param_name",
")",
"if",
"isinstance",
"(",
"param",
",",
"list",
")",
":",
"param_names",
"=",
"[",
"\"node_list[{0}]\"",
".",
"format",
"(",
"i",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"param",
")",
")",
"]",
"type_check_list",
"(",
"param",
",",
"(",
"tuple",
",",
")",
",",
"param_names",
")",
"for",
"idx",
",",
"pair",
"in",
"enumerate",
"(",
"param",
")",
":",
"if",
"not",
"len",
"(",
"pair",
")",
"==",
"2",
":",
"raise",
"ValueError",
"(",
"\"Each member in {0} must be a pair which means length == 2. Got length {1}\"",
".",
"format",
"(",
"param_names",
"[",
"idx",
"]",
",",
"len",
"(",
"pair",
")",
")",
")",
"column_names",
"=",
"[",
"\"node_list[{0}], number #{1} element\"",
".",
"format",
"(",
"idx",
",",
"i",
"+",
"1",
")",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"pair",
")",
")",
"]",
"type_check_list",
"(",
"pair",
",",
"(",
"int",
",",
")",
",",
"column_names",
")",
"elif",
"isinstance",
"(",
"param",
",",
"np",
".",
"ndarray",
")",
":",
"if",
"param",
".",
"ndim",
"!=",
"2",
":",
"raise",
"ValueError",
"(",
"\"Input ndarray must be in dimension 2. Got {0}\"",
".",
"format",
"(",
"param",
".",
"ndim",
")",
")",
"if",
"param",
".",
"shape",
"[",
"1",
"]",
"!=",
"2",
":",
"raise",
"ValueError",
"(",
"\"Each member in {0} must be a pair which means length == 2. Got length {1}\"",
".",
"format",
"(",
"param_name",
",",
"param",
".",
"shape",
"[",
"1",
"]",
")",
")",
"if",
"not",
"param",
".",
"dtype",
"==",
"np",
".",
"int32",
":",
"raise",
"TypeError",
"(",
"\"Each member in {0} should be of type int32. Got {1}.\"",
".",
"format",
"(",
"param_name",
",",
"param",
".",
"dtype",
")",
")"
] | https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/dataset/core/validator_helpers.py#L682-L711 |
||
microsoft/TSS.MSR | 0f2516fca2cd9929c31d5450e39301c9bde43688 | TSS.Py/src/TpmTypes.py | python | _PRIVATE.toTpm | (self, buf) | TpmMarshaller method | TpmMarshaller method | [
"TpmMarshaller",
"method"
] | def toTpm(self, buf):
""" TpmMarshaller method """
buf.writeSizedByteBuf(self.integrityOuter)
buf.writeSizedByteBuf(self.integrityInner)
buf.writeSizedObj(self.sensitive) | [
"def",
"toTpm",
"(",
"self",
",",
"buf",
")",
":",
"buf",
".",
"writeSizedByteBuf",
"(",
"self",
".",
"integrityOuter",
")",
"buf",
".",
"writeSizedByteBuf",
"(",
"self",
".",
"integrityInner",
")",
"buf",
".",
"writeSizedObj",
"(",
"self",
".",
"sensitive",
")"
] | https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L8459-L8463 |
||
jackaudio/jack2 | 21b293dbc37d42446141a08922cdec0d2550c6a0 | waflib/Utils.py | python | h_fun | (fun) | Hash functions
:param fun: function to hash
:type fun: function
:return: hash of the function
:rtype: string or bytes | Hash functions | [
"Hash",
"functions"
] | def h_fun(fun):
"""
Hash functions
:param fun: function to hash
:type fun: function
:return: hash of the function
:rtype: string or bytes
"""
try:
return fun.code
except AttributeError:
if isinstance(fun, functools.partial):
code = list(fun.args)
# The method items() provides a sequence of tuples where the first element
# represents an optional argument of the partial function application
#
# The sorting result outcome will be consistent because:
# 1. tuples are compared in order of their elements
# 2. optional argument namess are unique
code.extend(sorted(fun.keywords.items()))
code.append(h_fun(fun.func))
fun.code = h_list(code)
return fun.code
try:
h = inspect.getsource(fun)
except EnvironmentError:
h = 'nocode'
try:
fun.code = h
except AttributeError:
pass
return h | [
"def",
"h_fun",
"(",
"fun",
")",
":",
"try",
":",
"return",
"fun",
".",
"code",
"except",
"AttributeError",
":",
"if",
"isinstance",
"(",
"fun",
",",
"functools",
".",
"partial",
")",
":",
"code",
"=",
"list",
"(",
"fun",
".",
"args",
")",
"# The method items() provides a sequence of tuples where the first element",
"# represents an optional argument of the partial function application",
"#",
"# The sorting result outcome will be consistent because:",
"# 1. tuples are compared in order of their elements",
"# 2. optional argument namess are unique",
"code",
".",
"extend",
"(",
"sorted",
"(",
"fun",
".",
"keywords",
".",
"items",
"(",
")",
")",
")",
"code",
".",
"append",
"(",
"h_fun",
"(",
"fun",
".",
"func",
")",
")",
"fun",
".",
"code",
"=",
"h_list",
"(",
"code",
")",
"return",
"fun",
".",
"code",
"try",
":",
"h",
"=",
"inspect",
".",
"getsource",
"(",
"fun",
")",
"except",
"EnvironmentError",
":",
"h",
"=",
"'nocode'",
"try",
":",
"fun",
".",
"code",
"=",
"h",
"except",
"AttributeError",
":",
"pass",
"return",
"h"
] | https://github.com/jackaudio/jack2/blob/21b293dbc37d42446141a08922cdec0d2550c6a0/waflib/Utils.py#L599-L631 |
||
rsummers11/CADLab | 976ed959a0b5208bb4173127a7ef732ac73a9b6f | panreas_hnn/hed-globalweight/scripts/cpp_lint.py | python | _Filters | () | return _cpplint_state.filters | Returns the module's list of output filters, as a list. | Returns the module's list of output filters, as a list. | [
"Returns",
"the",
"module",
"s",
"list",
"of",
"output",
"filters",
"as",
"a",
"list",
"."
] | def _Filters():
"""Returns the module's list of output filters, as a list."""
return _cpplint_state.filters | [
"def",
"_Filters",
"(",
")",
":",
"return",
"_cpplint_state",
".",
"filters"
] | https://github.com/rsummers11/CADLab/blob/976ed959a0b5208bb4173127a7ef732ac73a9b6f/panreas_hnn/hed-globalweight/scripts/cpp_lint.py#L792-L794 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.