nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
sequence | function
stringlengths 34
151k
| function_tokens
sequence | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/secrets.py | python | token_urlsafe | (nbytes=None) | return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii') | Return a random URL-safe text string, in Base64 encoding.
The string has *nbytes* random bytes. If *nbytes* is ``None``
or not supplied, a reasonable default is used.
>>> token_urlsafe(16) #doctest:+SKIP
'Drmhze6EPcv0fN_81Bj-nA' | Return a random URL-safe text string, in Base64 encoding. | [
"Return",
"a",
"random",
"URL",
"-",
"safe",
"text",
"string",
"in",
"Base64",
"encoding",
"."
] | def token_urlsafe(nbytes=None):
"""Return a random URL-safe text string, in Base64 encoding.
The string has *nbytes* random bytes. If *nbytes* is ``None``
or not supplied, a reasonable default is used.
>>> token_urlsafe(16) #doctest:+SKIP
'Drmhze6EPcv0fN_81Bj-nA'
"""
tok = token_bytes(nbytes)
return base64.urlsafe_b64encode(tok).rstrip(b'=').decode('ascii') | [
"def",
"token_urlsafe",
"(",
"nbytes",
"=",
"None",
")",
":",
"tok",
"=",
"token_bytes",
"(",
"nbytes",
")",
"return",
"base64",
".",
"urlsafe_b64encode",
"(",
"tok",
")",
".",
"rstrip",
"(",
"b'='",
")",
".",
"decode",
"(",
"'ascii'",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/secrets.py#L62-L73 |
|
kamyu104/LeetCode-Solutions | 77605708a927ea3b85aee5a479db733938c7c211 | Python/logger-rate-limiter.py | python | Logger.__init__ | (self) | Initialize your data structure here. | Initialize your data structure here. | [
"Initialize",
"your",
"data",
"structure",
"here",
"."
] | def __init__(self):
"""
Initialize your data structure here.
"""
self.__dq = collections.deque()
self.__printed = set() | [
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"__dq",
"=",
"collections",
".",
"deque",
"(",
")",
"self",
".",
"__printed",
"=",
"set",
"(",
")"
] | https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/logger-rate-limiter.py#L9-L14 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/aui/auibook.py | python | TabFrame.SetTabCtrlHeight | (self, h) | Sets the tab control height.
:param integer `h`: the tab area height. | Sets the tab control height. | [
"Sets",
"the",
"tab",
"control",
"height",
"."
] | def SetTabCtrlHeight(self, h):
"""
Sets the tab control height.
:param integer `h`: the tab area height.
"""
self._tab_ctrl_height = h | [
"def",
"SetTabCtrlHeight",
"(",
"self",
",",
"h",
")",
":",
"self",
".",
"_tab_ctrl_height",
"=",
"h"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/auibook.py#L2653-L2660 |
||
KratosMultiphysics/Kratos | 0000833054ed0503424eb28205d6508d9ca6cbbc | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/methodDefs_hierarchyOptimiser/optimalSampleNumbers.py | python | multiLevelDoubleAllSamples | (inputDict, newLevels) | return new_samples | Returns a list of sample numbers of same length as the number of entries
in newLevels. Doubles the number of samples from oldHierarchy if an
entry of newLevels exists in oldHierarchy. If not, allocate a default
newSampleNumber to the entry. | Returns a list of sample numbers of same length as the number of entries
in newLevels. Doubles the number of samples from oldHierarchy if an
entry of newLevels exists in oldHierarchy. If not, allocate a default
newSampleNumber to the entry. | [
"Returns",
"a",
"list",
"of",
"sample",
"numbers",
"of",
"same",
"length",
"as",
"the",
"number",
"of",
"entries",
"in",
"newLevels",
".",
"Doubles",
"the",
"number",
"of",
"samples",
"from",
"oldHierarchy",
"if",
"an",
"entry",
"of",
"newLevels",
"exists",
"in",
"oldHierarchy",
".",
"If",
"not",
"allocate",
"a",
"default",
"newSampleNumber",
"to",
"the",
"entry",
"."
] | def multiLevelDoubleAllSamples(inputDict, newLevels):
"""
Returns a list of sample numbers of same length as the number of entries
in newLevels. Doubles the number of samples from oldHierarchy if an
entry of newLevels exists in oldHierarchy. If not, allocate a default
newSampleNumber to the entry.
"""
oldHierarchy = inputDict["oldHierarchy"]
newSampleNumber = inputDict["newSampleNumber"]
new_samples = []
for newLevel in newLevels:
is_level_found = False
for oldElement in oldHierarchy:
if newLevel == oldElement[0]:
new_samples.append(2 * oldElement[1])
is_level_found = True
break
if is_level_found is False:
new_samples.append(newSampleNumber)
return new_samples | [
"def",
"multiLevelDoubleAllSamples",
"(",
"inputDict",
",",
"newLevels",
")",
":",
"oldHierarchy",
"=",
"inputDict",
"[",
"\"oldHierarchy\"",
"]",
"newSampleNumber",
"=",
"inputDict",
"[",
"\"newSampleNumber\"",
"]",
"new_samples",
"=",
"[",
"]",
"for",
"newLevel",
"in",
"newLevels",
":",
"is_level_found",
"=",
"False",
"for",
"oldElement",
"in",
"oldHierarchy",
":",
"if",
"newLevel",
"==",
"oldElement",
"[",
"0",
"]",
":",
"new_samples",
".",
"append",
"(",
"2",
"*",
"oldElement",
"[",
"1",
"]",
")",
"is_level_found",
"=",
"True",
"break",
"if",
"is_level_found",
"is",
"False",
":",
"new_samples",
".",
"append",
"(",
"newSampleNumber",
")",
"return",
"new_samples"
] | https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/methodDefs_hierarchyOptimiser/optimalSampleNumbers.py#L25-L44 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_controls.py | python | StaticLine_GetDefaultSize | (*args) | return _controls_.StaticLine_GetDefaultSize(*args) | StaticLine_GetDefaultSize() -> int | StaticLine_GetDefaultSize() -> int | [
"StaticLine_GetDefaultSize",
"()",
"-",
">",
"int"
] | def StaticLine_GetDefaultSize(*args):
"""StaticLine_GetDefaultSize() -> int"""
return _controls_.StaticLine_GetDefaultSize(*args) | [
"def",
"StaticLine_GetDefaultSize",
"(",
"*",
"args",
")",
":",
"return",
"_controls_",
".",
"StaticLine_GetDefaultSize",
"(",
"*",
"args",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L960-L962 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/corrections_tab_widget/background_corrections_presenter.py | python | BackgroundCorrectionsPresenter.handle_mode_combo_box_changed | (self) | Handles when the background corrections mode is changed. | Handles when the background corrections mode is changed. | [
"Handles",
"when",
"the",
"background",
"corrections",
"mode",
"is",
"changed",
"."
] | def handle_mode_combo_box_changed(self) -> None:
"""Handles when the background corrections mode is changed."""
self.model.set_background_correction_mode(self.view.background_correction_mode)
if self.model.is_background_mode_none():
self.view.set_none_background_correction_options_visible()
elif self.model.is_background_mode_auto():
self.view.set_auto_background_correction_options_visible()
elif self.model.is_background_mode_manual():
self.view.set_manual_background_correction_options_visible()
self._run_background_corrections_for_all() | [
"def",
"handle_mode_combo_box_changed",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"model",
".",
"set_background_correction_mode",
"(",
"self",
".",
"view",
".",
"background_correction_mode",
")",
"if",
"self",
".",
"model",
".",
"is_background_mode_none",
"(",
")",
":",
"self",
".",
"view",
".",
"set_none_background_correction_options_visible",
"(",
")",
"elif",
"self",
".",
"model",
".",
"is_background_mode_auto",
"(",
")",
":",
"self",
".",
"view",
".",
"set_auto_background_correction_options_visible",
"(",
")",
"elif",
"self",
".",
"model",
".",
"is_background_mode_manual",
"(",
")",
":",
"self",
".",
"view",
".",
"set_manual_background_correction_options_visible",
"(",
")",
"self",
".",
"_run_background_corrections_for_all",
"(",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/corrections_tab_widget/background_corrections_presenter.py#L61-L71 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/keras/utils/tf_utils.py | python | type_spec_from_value | (value) | Grab type_spec without converting array-likes to tensors. | Grab type_spec without converting array-likes to tensors. | [
"Grab",
"type_spec",
"without",
"converting",
"array",
"-",
"likes",
"to",
"tensors",
"."
] | def type_spec_from_value(value):
"""Grab type_spec without converting array-likes to tensors."""
if is_extension_type(value):
return value._type_spec # pylint: disable=protected-access
# Get a TensorSpec for array-like data without
# converting the data to a Tensor
if hasattr(value, 'shape') and hasattr(value, 'dtype'):
return tensor_spec.TensorSpec(value.shape, value.dtype)
else:
return type_spec.type_spec_from_value(value) | [
"def",
"type_spec_from_value",
"(",
"value",
")",
":",
"if",
"is_extension_type",
"(",
"value",
")",
":",
"return",
"value",
".",
"_type_spec",
"# pylint: disable=protected-access",
"# Get a TensorSpec for array-like data without",
"# converting the data to a Tensor",
"if",
"hasattr",
"(",
"value",
",",
"'shape'",
")",
"and",
"hasattr",
"(",
"value",
",",
"'dtype'",
")",
":",
"return",
"tensor_spec",
".",
"TensorSpec",
"(",
"value",
".",
"shape",
",",
"value",
".",
"dtype",
")",
"else",
":",
"return",
"type_spec",
".",
"type_spec_from_value",
"(",
"value",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/utils/tf_utils.py#L374-L383 |
||
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/VBox/GuestHost/OpenGL/glapi_parser/apiutil.py | python | ChromiumRelOpCode | (funcName) | return d[funcName].chrelopcode | Return list of Chromium-specific properties of the named GL function. | Return list of Chromium-specific properties of the named GL function. | [
"Return",
"list",
"of",
"Chromium",
"-",
"specific",
"properties",
"of",
"the",
"named",
"GL",
"function",
"."
] | def ChromiumRelOpCode(funcName):
"""Return list of Chromium-specific properties of the named GL function."""
d = GetFunctionDict()
return d[funcName].chrelopcode | [
"def",
"ChromiumRelOpCode",
"(",
"funcName",
")",
":",
"d",
"=",
"GetFunctionDict",
"(",
")",
"return",
"d",
"[",
"funcName",
"]",
".",
"chrelopcode"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/VBox/GuestHost/OpenGL/glapi_parser/apiutil.py#L321-L324 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/grouping_tab_widget/grouping_tab_widget_presenter.py | python | GroupingTabPresenter.text_for_description | (self) | return text | Generate the text for the description edit at the top of the widget. | Generate the text for the description edit at the top of the widget. | [
"Generate",
"the",
"text",
"for",
"the",
"description",
"edit",
"at",
"the",
"top",
"of",
"the",
"widget",
"."
] | def text_for_description(self):
"""
Generate the text for the description edit at the top of the widget.
"""
instrument = self._model.instrument
n_detectors = self._model.num_detectors
main_field = self._model.main_field_direction
text = "{}, {} detectors".format(
instrument, n_detectors)
if main_field:
text += ", main field : {} to muon polarization".format(main_field)
return text | [
"def",
"text_for_description",
"(",
"self",
")",
":",
"instrument",
"=",
"self",
".",
"_model",
".",
"instrument",
"n_detectors",
"=",
"self",
".",
"_model",
".",
"num_detectors",
"main_field",
"=",
"self",
".",
"_model",
".",
"main_field_direction",
"text",
"=",
"\"{}, {} detectors\"",
".",
"format",
"(",
"instrument",
",",
"n_detectors",
")",
"if",
"main_field",
":",
"text",
"+=",
"\", main field : {} to muon polarization\"",
".",
"format",
"(",
"main_field",
")",
"return",
"text"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/grouping_tab_widget/grouping_tab_widget_presenter.py#L83-L94 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/ipython/py3/IPython/core/completer.py | python | protect_filename | (s, protectables=PROTECTABLES) | Escape a string to protect certain characters. | Escape a string to protect certain characters. | [
"Escape",
"a",
"string",
"to",
"protect",
"certain",
"characters",
"."
] | def protect_filename(s, protectables=PROTECTABLES):
"""Escape a string to protect certain characters."""
if set(s) & set(protectables):
if sys.platform == "win32":
return '"' + s + '"'
else:
return "".join(("\\" + c if c in protectables else c) for c in s)
else:
return s | [
"def",
"protect_filename",
"(",
"s",
",",
"protectables",
"=",
"PROTECTABLES",
")",
":",
"if",
"set",
"(",
"s",
")",
"&",
"set",
"(",
"protectables",
")",
":",
"if",
"sys",
".",
"platform",
"==",
"\"win32\"",
":",
"return",
"'\"'",
"+",
"s",
"+",
"'\"'",
"else",
":",
"return",
"\"\"",
".",
"join",
"(",
"(",
"\"\\\\\"",
"+",
"c",
"if",
"c",
"in",
"protectables",
"else",
"c",
")",
"for",
"c",
"in",
"s",
")",
"else",
":",
"return",
"s"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/core/completer.py#L241-L249 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/idlelib/search.py | python | SearchDialog.find_again | (self, text) | Repeat the last search.
If no search was previously run, open a new search dialog. In
this case, no search is done.
If a search was previously run, the search dialog won't be
shown and the options from the previous search (including the
search pattern) will be used to find the next occurrence
of the pattern. Next is relative based on direction.
Position the window to display the located occurrence in the
text.
Return True if the search was successful and False otherwise. | Repeat the last search. | [
"Repeat",
"the",
"last",
"search",
"."
] | def find_again(self, text):
"""Repeat the last search.
If no search was previously run, open a new search dialog. In
this case, no search is done.
If a search was previously run, the search dialog won't be
shown and the options from the previous search (including the
search pattern) will be used to find the next occurrence
of the pattern. Next is relative based on direction.
Position the window to display the located occurrence in the
text.
Return True if the search was successful and False otherwise.
"""
if not self.engine.getpat():
self.open(text)
return False
if not self.engine.getprog():
return False
res = self.engine.search_text(text)
if res:
line, m = res
i, j = m.span()
first = "%d.%d" % (line, i)
last = "%d.%d" % (line, j)
try:
selfirst = text.index("sel.first")
sellast = text.index("sel.last")
if selfirst == first and sellast == last:
self.bell()
return False
except TclError:
pass
text.tag_remove("sel", "1.0", "end")
text.tag_add("sel", first, last)
text.mark_set("insert", self.engine.isback() and first or last)
text.see("insert")
return True
else:
self.bell()
return False | [
"def",
"find_again",
"(",
"self",
",",
"text",
")",
":",
"if",
"not",
"self",
".",
"engine",
".",
"getpat",
"(",
")",
":",
"self",
".",
"open",
"(",
"text",
")",
"return",
"False",
"if",
"not",
"self",
".",
"engine",
".",
"getprog",
"(",
")",
":",
"return",
"False",
"res",
"=",
"self",
".",
"engine",
".",
"search_text",
"(",
"text",
")",
"if",
"res",
":",
"line",
",",
"m",
"=",
"res",
"i",
",",
"j",
"=",
"m",
".",
"span",
"(",
")",
"first",
"=",
"\"%d.%d\"",
"%",
"(",
"line",
",",
"i",
")",
"last",
"=",
"\"%d.%d\"",
"%",
"(",
"line",
",",
"j",
")",
"try",
":",
"selfirst",
"=",
"text",
".",
"index",
"(",
"\"sel.first\"",
")",
"sellast",
"=",
"text",
".",
"index",
"(",
"\"sel.last\"",
")",
"if",
"selfirst",
"==",
"first",
"and",
"sellast",
"==",
"last",
":",
"self",
".",
"bell",
"(",
")",
"return",
"False",
"except",
"TclError",
":",
"pass",
"text",
".",
"tag_remove",
"(",
"\"sel\"",
",",
"\"1.0\"",
",",
"\"end\"",
")",
"text",
".",
"tag_add",
"(",
"\"sel\"",
",",
"first",
",",
"last",
")",
"text",
".",
"mark_set",
"(",
"\"insert\"",
",",
"self",
".",
"engine",
".",
"isback",
"(",
")",
"and",
"first",
"or",
"last",
")",
"text",
".",
"see",
"(",
"\"insert\"",
")",
"return",
"True",
"else",
":",
"self",
".",
"bell",
"(",
")",
"return",
"False"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/idlelib/search.py#L77-L119 |
||
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | lts/tools/gyp/pylib/gyp/MSVSProject.py | python | Writer.AddFileConfig | (self, path, config, attrs=None, tools=None) | Adds a configuration to a file.
Args:
path: Relative path to the file.
config: Name of configuration to add.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Raises:
ValueError: Relative path does not match any file added via AddFiles(). | Adds a configuration to a file. | [
"Adds",
"a",
"configuration",
"to",
"a",
"file",
"."
] | def AddFileConfig(self, path, config, attrs=None, tools=None):
"""Adds a configuration to a file.
Args:
path: Relative path to the file.
config: Name of configuration to add.
attrs: Dict of configuration attributes; may be None.
tools: List of tools (strings or Tool objects); may be None.
Raises:
ValueError: Relative path does not match any file added via AddFiles().
"""
# Find the file node with the right relative path
parent = self.files_dict.get(path)
if not parent:
raise ValueError('AddFileConfig: file "%s" not in project.' % path)
# Add the config to the file node
spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs,
tools)
parent.append(spec) | [
"def",
"AddFileConfig",
"(",
"self",
",",
"path",
",",
"config",
",",
"attrs",
"=",
"None",
",",
"tools",
"=",
"None",
")",
":",
"# Find the file node with the right relative path",
"parent",
"=",
"self",
".",
"files_dict",
".",
"get",
"(",
"path",
")",
"if",
"not",
"parent",
":",
"raise",
"ValueError",
"(",
"'AddFileConfig: file \"%s\" not in project.'",
"%",
"path",
")",
"# Add the config to the file node",
"spec",
"=",
"self",
".",
"_GetSpecForConfiguration",
"(",
"'FileConfiguration'",
",",
"config",
",",
"attrs",
",",
"tools",
")",
"parent",
".",
"append",
"(",
"spec",
")"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/tools/gyp/pylib/gyp/MSVSProject.py#L166-L186 |
||
MTG/gaia | 0f7214dbdec6f9b651ca34211824841ffba0bc77 | src/doc/doxy2swig.py | python | Doxy2SWIG.get_specific_nodes | (self, node, names) | return dict(nodes) | Given a node and a sequence of strings in `names`, return a
dictionary containing the names as keys and child
`ELEMENT_NODEs`, that have a `tagName` equal to the name. | Given a node and a sequence of strings in `names`, return a
dictionary containing the names as keys and child
`ELEMENT_NODEs`, that have a `tagName` equal to the name. | [
"Given",
"a",
"node",
"and",
"a",
"sequence",
"of",
"strings",
"in",
"names",
"return",
"a",
"dictionary",
"containing",
"the",
"names",
"as",
"keys",
"and",
"child",
"ELEMENT_NODEs",
"that",
"have",
"a",
"tagName",
"equal",
"to",
"the",
"name",
"."
] | def get_specific_nodes(self, node, names):
"""Given a node and a sequence of strings in `names`, return a
dictionary containing the names as keys and child
`ELEMENT_NODEs`, that have a `tagName` equal to the name.
"""
nodes = [(x.tagName, x) for x in node.childNodes
if x.nodeType == x.ELEMENT_NODE and
x.tagName in names]
return dict(nodes) | [
"def",
"get_specific_nodes",
"(",
"self",
",",
"node",
",",
"names",
")",
":",
"nodes",
"=",
"[",
"(",
"x",
".",
"tagName",
",",
"x",
")",
"for",
"x",
"in",
"node",
".",
"childNodes",
"if",
"x",
".",
"nodeType",
"==",
"x",
".",
"ELEMENT_NODE",
"and",
"x",
".",
"tagName",
"in",
"names",
"]",
"return",
"dict",
"(",
"nodes",
")"
] | https://github.com/MTG/gaia/blob/0f7214dbdec6f9b651ca34211824841ffba0bc77/src/doc/doxy2swig.py#L275-L284 |
|
wlanjie/AndroidFFmpeg | 7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf | tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/decimal.py | python | Decimal.__float__ | (self) | return float(s) | Float representation. | Float representation. | [
"Float",
"representation",
"."
] | def __float__(self):
"""Float representation."""
if self._isnan():
if self.is_snan():
raise ValueError("Cannot convert signaling NaN to float")
s = "-nan" if self._sign else "nan"
else:
s = str(self)
return float(s) | [
"def",
"__float__",
"(",
"self",
")",
":",
"if",
"self",
".",
"_isnan",
"(",
")",
":",
"if",
"self",
".",
"is_snan",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"Cannot convert signaling NaN to float\"",
")",
"s",
"=",
"\"-nan\"",
"if",
"self",
".",
"_sign",
"else",
"\"nan\"",
"else",
":",
"s",
"=",
"str",
"(",
"self",
")",
"return",
"float",
"(",
"s",
")"
] | https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/decimal.py#L1582-L1590 |
|
miyosuda/TensorFlowAndroidDemo | 35903e0221aa5f109ea2dbef27f20b52e317f42d | jni-build/jni/include/tensorflow/contrib/distributions/python/ops/categorical.py | python | Categorical.log_prob | (self, k, name="log_prob") | Log-probability of class `k`.
Args:
k: `int32` or `int64` Tensor. Must be broadcastable with a `batch_shape`
`Tensor`.
name: A name for this operation (optional).
Returns:
The log-probabilities of the classes indexed by `k` | Log-probability of class `k`. | [
"Log",
"-",
"probability",
"of",
"class",
"k",
"."
] | def log_prob(self, k, name="log_prob"):
"""Log-probability of class `k`.
Args:
k: `int32` or `int64` Tensor. Must be broadcastable with a `batch_shape`
`Tensor`.
name: A name for this operation (optional).
Returns:
The log-probabilities of the classes indexed by `k`
"""
with ops.name_scope(self.name):
with ops.op_scope([k, self.logits], name):
k = ops.convert_to_tensor(k, name="k")
logits = self.logits * array_ops.ones_like(
array_ops.expand_dims(k, -1),
dtype=self.logits.dtype)
k *= array_ops.ones(
array_ops.slice(
array_ops.shape(logits), [0], [array_ops.rank(logits) - 1]),
dtype=k.dtype)
k.set_shape(tensor_shape.TensorShape(logits.get_shape()[:-1]))
return -nn_ops.sparse_softmax_cross_entropy_with_logits(logits, k) | [
"def",
"log_prob",
"(",
"self",
",",
"k",
",",
"name",
"=",
"\"log_prob\"",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"self",
".",
"name",
")",
":",
"with",
"ops",
".",
"op_scope",
"(",
"[",
"k",
",",
"self",
".",
"logits",
"]",
",",
"name",
")",
":",
"k",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"k",
",",
"name",
"=",
"\"k\"",
")",
"logits",
"=",
"self",
".",
"logits",
"*",
"array_ops",
".",
"ones_like",
"(",
"array_ops",
".",
"expand_dims",
"(",
"k",
",",
"-",
"1",
")",
",",
"dtype",
"=",
"self",
".",
"logits",
".",
"dtype",
")",
"k",
"*=",
"array_ops",
".",
"ones",
"(",
"array_ops",
".",
"slice",
"(",
"array_ops",
".",
"shape",
"(",
"logits",
")",
",",
"[",
"0",
"]",
",",
"[",
"array_ops",
".",
"rank",
"(",
"logits",
")",
"-",
"1",
"]",
")",
",",
"dtype",
"=",
"k",
".",
"dtype",
")",
"k",
".",
"set_shape",
"(",
"tensor_shape",
".",
"TensorShape",
"(",
"logits",
".",
"get_shape",
"(",
")",
"[",
":",
"-",
"1",
"]",
")",
")",
"return",
"-",
"nn_ops",
".",
"sparse_softmax_cross_entropy_with_logits",
"(",
"logits",
",",
"k",
")"
] | https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/categorical.py#L117-L141 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/psutil/_pswindows.py | python | cpu_freq | () | return [_common.scpufreq(float(curr), min_, float(max_))] | Return CPU frequency.
On Windows per-cpu frequency is not supported. | Return CPU frequency.
On Windows per-cpu frequency is not supported. | [
"Return",
"CPU",
"frequency",
".",
"On",
"Windows",
"per",
"-",
"cpu",
"frequency",
"is",
"not",
"supported",
"."
] | def cpu_freq():
"""Return CPU frequency.
On Windows per-cpu frequency is not supported.
"""
curr, max_ = cext.cpu_freq()
min_ = 0.0
return [_common.scpufreq(float(curr), min_, float(max_))] | [
"def",
"cpu_freq",
"(",
")",
":",
"curr",
",",
"max_",
"=",
"cext",
".",
"cpu_freq",
"(",
")",
"min_",
"=",
"0.0",
"return",
"[",
"_common",
".",
"scpufreq",
"(",
"float",
"(",
"curr",
")",
",",
"min_",
",",
"float",
"(",
"max_",
")",
")",
"]"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/psutil/_pswindows.py#L320-L326 |
|
microsoft/TSS.MSR | 0f2516fca2cd9929c31d5450e39301c9bde43688 | TSS.Py/src/TpmTypes.py | python | TPMS_SCHEME_ECDSA.__init__ | (self, hashAlg = TPM_ALG_ID.NULL) | Most of the ECC signature schemes only require a hash algorithm to
complete the definition and can be typed as TPMS_SCHEME_HASH. Anonymous
algorithms also require a count value so they are typed to be
TPMS_SCHEME_ECDAA.
Attributes:
hashAlg (TPM_ALG_ID): The hash algorithm used to digest the message | Most of the ECC signature schemes only require a hash algorithm to
complete the definition and can be typed as TPMS_SCHEME_HASH. Anonymous
algorithms also require a count value so they are typed to be
TPMS_SCHEME_ECDAA. | [
"Most",
"of",
"the",
"ECC",
"signature",
"schemes",
"only",
"require",
"a",
"hash",
"algorithm",
"to",
"complete",
"the",
"definition",
"and",
"can",
"be",
"typed",
"as",
"TPMS_SCHEME_HASH",
".",
"Anonymous",
"algorithms",
"also",
"require",
"a",
"count",
"value",
"so",
"they",
"are",
"typed",
"to",
"be",
"TPMS_SCHEME_ECDAA",
"."
] | def __init__(self, hashAlg = TPM_ALG_ID.NULL):
""" Most of the ECC signature schemes only require a hash algorithm to
complete the definition and can be typed as TPMS_SCHEME_HASH. Anonymous
algorithms also require a count value so they are typed to be
TPMS_SCHEME_ECDAA.
Attributes:
hashAlg (TPM_ALG_ID): The hash algorithm used to digest the message
"""
super(TPMS_SCHEME_ECDSA, self).__init__(hashAlg) | [
"def",
"__init__",
"(",
"self",
",",
"hashAlg",
"=",
"TPM_ALG_ID",
".",
"NULL",
")",
":",
"super",
"(",
"TPMS_SCHEME_ECDSA",
",",
"self",
")",
".",
"__init__",
"(",
"hashAlg",
")"
] | https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L17713-L17722 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemFramework/v1/AWS/common-code/ServiceClient_Python/cgf_service_client/__init__.py | python | for_url | (url, **kwargs) | return Path(url, **kwargs) | Create a Path object that can be used to make requests using paths relative to the specified url.
Arguments:
url - The url.
**kwargs - Used to configure the Path object. | Create a Path object that can be used to make requests using paths relative to the specified url. | [
"Create",
"a",
"Path",
"object",
"that",
"can",
"be",
"used",
"to",
"make",
"requests",
"using",
"paths",
"relative",
"to",
"the",
"specified",
"url",
"."
] | def for_url(url, **kwargs):
'''Create a Path object that can be used to make requests using paths relative to the specified url.
Arguments:
url - The url.
**kwargs - Used to configure the Path object.
'''
return Path(url, **kwargs) | [
"def",
"for_url",
"(",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"Path",
"(",
"url",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/common-code/ServiceClient_Python/cgf_service_client/__init__.py#L17-L27 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/_misc.py | python | ConfigBase.DeleteGroup | (*args, **kwargs) | return _misc_.ConfigBase_DeleteGroup(*args, **kwargs) | DeleteGroup(self, String key) -> bool
Delete the group (with all subgroups) | DeleteGroup(self, String key) -> bool | [
"DeleteGroup",
"(",
"self",
"String",
"key",
")",
"-",
">",
"bool"
] | def DeleteGroup(*args, **kwargs):
"""
DeleteGroup(self, String key) -> bool
Delete the group (with all subgroups)
"""
return _misc_.ConfigBase_DeleteGroup(*args, **kwargs) | [
"def",
"DeleteGroup",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"ConfigBase_DeleteGroup",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_misc.py#L3354-L3360 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/nn_grad.py | python | _Conv2DGrad | (op, grad) | return [
gen_nn_ops.conv2d_backprop_input(
shape_0,
op.inputs[1],
grad,
dilations=dilations,
strides=strides,
padding=padding,
explicit_paddings=explicit_paddings,
use_cudnn_on_gpu=use_cudnn_on_gpu,
data_format=data_format),
gen_nn_ops.conv2d_backprop_filter(
op.inputs[0],
shape_1,
grad,
dilations=dilations,
strides=strides,
padding=padding,
explicit_paddings=explicit_paddings,
use_cudnn_on_gpu=use_cudnn_on_gpu,
data_format=data_format)
] | Gradient function for Conv2D. | Gradient function for Conv2D. | [
"Gradient",
"function",
"for",
"Conv2D",
"."
] | def _Conv2DGrad(op, grad):
"""Gradient function for Conv2D."""
dilations = op.get_attr("dilations")
strides = op.get_attr("strides")
padding = op.get_attr("padding")
explicit_paddings = op.get_attr("explicit_paddings")
use_cudnn_on_gpu = op.get_attr("use_cudnn_on_gpu")
data_format = op.get_attr("data_format")
shape_0, shape_1 = array_ops.shape_n([op.inputs[0], op.inputs[1]])
# We call the gen_nn_ops backprop functions instead of nn_ops backprop
# functions for performance reasons in Eager mode. gen_nn_ops functions take a
# `explicit_paddings` parameter, but nn_ops functions do not. So if we were
# to use the nn_ops functions, we would have to convert `padding` and
# `explicit_paddings` into a single `padding` parameter, increasing overhead
# in Eager mode.
return [
gen_nn_ops.conv2d_backprop_input(
shape_0,
op.inputs[1],
grad,
dilations=dilations,
strides=strides,
padding=padding,
explicit_paddings=explicit_paddings,
use_cudnn_on_gpu=use_cudnn_on_gpu,
data_format=data_format),
gen_nn_ops.conv2d_backprop_filter(
op.inputs[0],
shape_1,
grad,
dilations=dilations,
strides=strides,
padding=padding,
explicit_paddings=explicit_paddings,
use_cudnn_on_gpu=use_cudnn_on_gpu,
data_format=data_format)
] | [
"def",
"_Conv2DGrad",
"(",
"op",
",",
"grad",
")",
":",
"dilations",
"=",
"op",
".",
"get_attr",
"(",
"\"dilations\"",
")",
"strides",
"=",
"op",
".",
"get_attr",
"(",
"\"strides\"",
")",
"padding",
"=",
"op",
".",
"get_attr",
"(",
"\"padding\"",
")",
"explicit_paddings",
"=",
"op",
".",
"get_attr",
"(",
"\"explicit_paddings\"",
")",
"use_cudnn_on_gpu",
"=",
"op",
".",
"get_attr",
"(",
"\"use_cudnn_on_gpu\"",
")",
"data_format",
"=",
"op",
".",
"get_attr",
"(",
"\"data_format\"",
")",
"shape_0",
",",
"shape_1",
"=",
"array_ops",
".",
"shape_n",
"(",
"[",
"op",
".",
"inputs",
"[",
"0",
"]",
",",
"op",
".",
"inputs",
"[",
"1",
"]",
"]",
")",
"# We call the gen_nn_ops backprop functions instead of nn_ops backprop",
"# functions for performance reasons in Eager mode. gen_nn_ops functions take a",
"# `explicit_paddings` parameter, but nn_ops functions do not. So if we were",
"# to use the nn_ops functions, we would have to convert `padding` and",
"# `explicit_paddings` into a single `padding` parameter, increasing overhead",
"# in Eager mode.",
"return",
"[",
"gen_nn_ops",
".",
"conv2d_backprop_input",
"(",
"shape_0",
",",
"op",
".",
"inputs",
"[",
"1",
"]",
",",
"grad",
",",
"dilations",
"=",
"dilations",
",",
"strides",
"=",
"strides",
",",
"padding",
"=",
"padding",
",",
"explicit_paddings",
"=",
"explicit_paddings",
",",
"use_cudnn_on_gpu",
"=",
"use_cudnn_on_gpu",
",",
"data_format",
"=",
"data_format",
")",
",",
"gen_nn_ops",
".",
"conv2d_backprop_filter",
"(",
"op",
".",
"inputs",
"[",
"0",
"]",
",",
"shape_1",
",",
"grad",
",",
"dilations",
"=",
"dilations",
",",
"strides",
"=",
"strides",
",",
"padding",
"=",
"padding",
",",
"explicit_paddings",
"=",
"explicit_paddings",
",",
"use_cudnn_on_gpu",
"=",
"use_cudnn_on_gpu",
",",
"data_format",
"=",
"data_format",
")",
"]"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/nn_grad.py#L560-L597 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/signal/filter_design.py | python | bilinear | (b, a, fs=1.0) | return normalize(bprime, aprime) | Return a digital filter from an analog one using a bilinear transform.
The bilinear transform substitutes ``(z-1) / (z+1)`` for ``s``.
See Also
--------
lp2lp, lp2hp, lp2bp, lp2bs
bilinear_zpk | Return a digital filter from an analog one using a bilinear transform. | [
"Return",
"a",
"digital",
"filter",
"from",
"an",
"analog",
"one",
"using",
"a",
"bilinear",
"transform",
"."
] | def bilinear(b, a, fs=1.0):
"""Return a digital filter from an analog one using a bilinear transform.
The bilinear transform substitutes ``(z-1) / (z+1)`` for ``s``.
See Also
--------
lp2lp, lp2hp, lp2bp, lp2bs
bilinear_zpk
"""
fs = float(fs)
a, b = map(atleast_1d, (a, b))
D = len(a) - 1
N = len(b) - 1
artype = float
M = max([N, D])
Np = M
Dp = M
bprime = numpy.zeros(Np + 1, artype)
aprime = numpy.zeros(Dp + 1, artype)
for j in range(Np + 1):
val = 0.0
for i in range(N + 1):
for k in range(i + 1):
for l in range(M - i + 1):
if k + l == j:
val += (comb(i, k) * comb(M - i, l) * b[N - i] *
pow(2 * fs, i) * (-1) ** k)
bprime[j] = real(val)
for j in range(Dp + 1):
val = 0.0
for i in range(D + 1):
for k in range(i + 1):
for l in range(M - i + 1):
if k + l == j:
val += (comb(i, k) * comb(M - i, l) * a[D - i] *
pow(2 * fs, i) * (-1) ** k)
aprime[j] = real(val)
return normalize(bprime, aprime) | [
"def",
"bilinear",
"(",
"b",
",",
"a",
",",
"fs",
"=",
"1.0",
")",
":",
"fs",
"=",
"float",
"(",
"fs",
")",
"a",
",",
"b",
"=",
"map",
"(",
"atleast_1d",
",",
"(",
"a",
",",
"b",
")",
")",
"D",
"=",
"len",
"(",
"a",
")",
"-",
"1",
"N",
"=",
"len",
"(",
"b",
")",
"-",
"1",
"artype",
"=",
"float",
"M",
"=",
"max",
"(",
"[",
"N",
",",
"D",
"]",
")",
"Np",
"=",
"M",
"Dp",
"=",
"M",
"bprime",
"=",
"numpy",
".",
"zeros",
"(",
"Np",
"+",
"1",
",",
"artype",
")",
"aprime",
"=",
"numpy",
".",
"zeros",
"(",
"Dp",
"+",
"1",
",",
"artype",
")",
"for",
"j",
"in",
"range",
"(",
"Np",
"+",
"1",
")",
":",
"val",
"=",
"0.0",
"for",
"i",
"in",
"range",
"(",
"N",
"+",
"1",
")",
":",
"for",
"k",
"in",
"range",
"(",
"i",
"+",
"1",
")",
":",
"for",
"l",
"in",
"range",
"(",
"M",
"-",
"i",
"+",
"1",
")",
":",
"if",
"k",
"+",
"l",
"==",
"j",
":",
"val",
"+=",
"(",
"comb",
"(",
"i",
",",
"k",
")",
"*",
"comb",
"(",
"M",
"-",
"i",
",",
"l",
")",
"*",
"b",
"[",
"N",
"-",
"i",
"]",
"*",
"pow",
"(",
"2",
"*",
"fs",
",",
"i",
")",
"*",
"(",
"-",
"1",
")",
"**",
"k",
")",
"bprime",
"[",
"j",
"]",
"=",
"real",
"(",
"val",
")",
"for",
"j",
"in",
"range",
"(",
"Dp",
"+",
"1",
")",
":",
"val",
"=",
"0.0",
"for",
"i",
"in",
"range",
"(",
"D",
"+",
"1",
")",
":",
"for",
"k",
"in",
"range",
"(",
"i",
"+",
"1",
")",
":",
"for",
"l",
"in",
"range",
"(",
"M",
"-",
"i",
"+",
"1",
")",
":",
"if",
"k",
"+",
"l",
"==",
"j",
":",
"val",
"+=",
"(",
"comb",
"(",
"i",
",",
"k",
")",
"*",
"comb",
"(",
"M",
"-",
"i",
",",
"l",
")",
"*",
"a",
"[",
"D",
"-",
"i",
"]",
"*",
"pow",
"(",
"2",
"*",
"fs",
",",
"i",
")",
"*",
"(",
"-",
"1",
")",
"**",
"k",
")",
"aprime",
"[",
"j",
"]",
"=",
"real",
"(",
"val",
")",
"return",
"normalize",
"(",
"bprime",
",",
"aprime",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/signal/filter_design.py#L1787-L1827 |
|
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_carbon/dataview.py | python | DataViewModelNotifier.BeforeReset | (*args, **kwargs) | return _dataview.DataViewModelNotifier_BeforeReset(*args, **kwargs) | BeforeReset(self) -> bool | BeforeReset(self) -> bool | [
"BeforeReset",
"(",
"self",
")",
"-",
">",
"bool"
] | def BeforeReset(*args, **kwargs):
"""BeforeReset(self) -> bool"""
return _dataview.DataViewModelNotifier_BeforeReset(*args, **kwargs) | [
"def",
"BeforeReset",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_dataview",
".",
"DataViewModelNotifier_BeforeReset",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/dataview.py#L259-L261 |
|
cms-sw/cmssw | fd9de012d503d3405420bcbeec0ec879baa57cf2 | Alignment/OfflineValidation/python/TkAlAllInOneTool/genericValidation.py | python | ValidationWithPlots.plottingscriptname | (cls) | override with a classmethod | override with a classmethod | [
"override",
"with",
"a",
"classmethod"
] | def plottingscriptname(cls):
"""override with a classmethod""" | [
"def",
"plottingscriptname",
"(",
"cls",
")",
":"
] | https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/OfflineValidation/python/TkAlAllInOneTool/genericValidation.py#L607-L608 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/protobuf/python/google/protobuf/internal/python_message.py | python | _InternalUnpackAny | (msg) | return message | Unpacks Any message and returns the unpacked message.
This internal method is differnt from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
Args:
msg: An Any message to be unpacked.
Returns:
The unpacked message. | Unpacks Any message and returns the unpacked message. | [
"Unpacks",
"Any",
"message",
"and",
"returns",
"the",
"unpacked",
"message",
"."
] | def _InternalUnpackAny(msg):
"""Unpacks Any message and returns the unpacked message.
This internal method is differnt from public Any Unpack method which takes
the target message as argument. _InternalUnpackAny method does not have
target message type and need to find the message type in descriptor pool.
Args:
msg: An Any message to be unpacked.
Returns:
The unpacked message.
"""
type_url = msg.type_url
db = symbol_database.Default()
if not type_url:
return None
# TODO(haberman): For now we just strip the hostname. Better logic will be
# required.
type_name = type_url.split("/")[-1]
descriptor = db.pool.FindMessageTypeByName(type_name)
if descriptor is None:
return None
message_class = db.GetPrototype(descriptor)
message = message_class()
message.ParseFromString(msg.value)
return message | [
"def",
"_InternalUnpackAny",
"(",
"msg",
")",
":",
"type_url",
"=",
"msg",
".",
"type_url",
"db",
"=",
"symbol_database",
".",
"Default",
"(",
")",
"if",
"not",
"type_url",
":",
"return",
"None",
"# TODO(haberman): For now we just strip the hostname. Better logic will be",
"# required.",
"type_name",
"=",
"type_url",
".",
"split",
"(",
"\"/\"",
")",
"[",
"-",
"1",
"]",
"descriptor",
"=",
"db",
".",
"pool",
".",
"FindMessageTypeByName",
"(",
"type_name",
")",
"if",
"descriptor",
"is",
"None",
":",
"return",
"None",
"message_class",
"=",
"db",
".",
"GetPrototype",
"(",
"descriptor",
")",
"message",
"=",
"message_class",
"(",
")",
"message",
".",
"ParseFromString",
"(",
"msg",
".",
"value",
")",
"return",
"message"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/protobuf/python/google/protobuf/internal/python_message.py#L916-L947 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/corrections_tab_widget/background_corrections_presenter.py | python | BackgroundCorrectionsPresenter.handle_background_changed | (self) | Handles when a Background table cell is changed. | Handles when a Background table cell is changed. | [
"Handles",
"when",
"a",
"Background",
"table",
"cell",
"is",
"changed",
"."
] | def handle_background_changed(self) -> None:
"""Handles when a Background table cell is changed."""
runs, groups = self._selected_runs_and_groups()
background = self.view.selected_background()
for run, group in zip(runs, groups):
self._update_background_in_view_and_model(run, group, background)
self._perform_background_corrections_for(runs, groups) | [
"def",
"handle_background_changed",
"(",
"self",
")",
"->",
"None",
":",
"runs",
",",
"groups",
"=",
"self",
".",
"_selected_runs_and_groups",
"(",
")",
"background",
"=",
"self",
".",
"view",
".",
"selected_background",
"(",
")",
"for",
"run",
",",
"group",
"in",
"zip",
"(",
"runs",
",",
"groups",
")",
":",
"self",
".",
"_update_background_in_view_and_model",
"(",
"run",
",",
"group",
",",
"background",
")",
"self",
".",
"_perform_background_corrections_for",
"(",
"runs",
",",
"groups",
")"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/corrections_tab_widget/background_corrections_presenter.py#L105-L112 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/__init__.py | python | non_empty_lines | (path) | Yield non-empty lines from file at path | Yield non-empty lines from file at path | [
"Yield",
"non",
"-",
"empty",
"lines",
"from",
"file",
"at",
"path"
] | def non_empty_lines(path):
"""
Yield non-empty lines from file at path
"""
with open(path) as f:
for line in f:
line = line.strip()
if line:
yield line | [
"def",
"non_empty_lines",
"(",
"path",
")",
":",
"with",
"open",
"(",
"path",
")",
"as",
"f",
":",
"for",
"line",
"in",
"f",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"line",
":",
"yield",
"line"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/__init__.py#L2122-L2130 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/_pydecimal.py | python | Context.divide_int | (self, a, b) | Divides two numbers and returns the integer part of the result.
>>> ExtendedContext.divide_int(Decimal('2'), Decimal('3'))
Decimal('0')
>>> ExtendedContext.divide_int(Decimal('10'), Decimal('3'))
Decimal('3')
>>> ExtendedContext.divide_int(Decimal('1'), Decimal('0.3'))
Decimal('3')
>>> ExtendedContext.divide_int(10, 3)
Decimal('3')
>>> ExtendedContext.divide_int(Decimal(10), 3)
Decimal('3')
>>> ExtendedContext.divide_int(10, Decimal(3))
Decimal('3') | Divides two numbers and returns the integer part of the result. | [
"Divides",
"two",
"numbers",
"and",
"returns",
"the",
"integer",
"part",
"of",
"the",
"result",
"."
] | def divide_int(self, a, b):
"""Divides two numbers and returns the integer part of the result.
>>> ExtendedContext.divide_int(Decimal('2'), Decimal('3'))
Decimal('0')
>>> ExtendedContext.divide_int(Decimal('10'), Decimal('3'))
Decimal('3')
>>> ExtendedContext.divide_int(Decimal('1'), Decimal('0.3'))
Decimal('3')
>>> ExtendedContext.divide_int(10, 3)
Decimal('3')
>>> ExtendedContext.divide_int(Decimal(10), 3)
Decimal('3')
>>> ExtendedContext.divide_int(10, Decimal(3))
Decimal('3')
"""
a = _convert_other(a, raiseit=True)
r = a.__floordiv__(b, context=self)
if r is NotImplemented:
raise TypeError("Unable to convert %s to Decimal" % b)
else:
return r | [
"def",
"divide_int",
"(",
"self",
",",
"a",
",",
"b",
")",
":",
"a",
"=",
"_convert_other",
"(",
"a",
",",
"raiseit",
"=",
"True",
")",
"r",
"=",
"a",
".",
"__floordiv__",
"(",
"b",
",",
"context",
"=",
"self",
")",
"if",
"r",
"is",
"NotImplemented",
":",
"raise",
"TypeError",
"(",
"\"Unable to convert %s to Decimal\"",
"%",
"b",
")",
"else",
":",
"return",
"r"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/_pydecimal.py#L4395-L4416 |
||
microsoft/TSS.MSR | 0f2516fca2cd9929c31d5450e39301c9bde43688 | TSS.Py/src/TpmTypes.py | python | TPM2_PCR_Reset_REQUEST.fromTpm | (buf) | return buf.createObj(TPM2_PCR_Reset_REQUEST) | Returns new TPM2_PCR_Reset_REQUEST object constructed from its
marshaled representation in the given TpmBuffer buffer | Returns new TPM2_PCR_Reset_REQUEST object constructed from its
marshaled representation in the given TpmBuffer buffer | [
"Returns",
"new",
"TPM2_PCR_Reset_REQUEST",
"object",
"constructed",
"from",
"its",
"marshaled",
"representation",
"in",
"the",
"given",
"TpmBuffer",
"buffer"
] | def fromTpm(buf):
""" Returns new TPM2_PCR_Reset_REQUEST object constructed from its
marshaled representation in the given TpmBuffer buffer
"""
return buf.createObj(TPM2_PCR_Reset_REQUEST) | [
"def",
"fromTpm",
"(",
"buf",
")",
":",
"return",
"buf",
".",
"createObj",
"(",
"TPM2_PCR_Reset_REQUEST",
")"
] | https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L14096-L14100 |
|
miyosuda/TensorFlowAndroidMNIST | 7b5a4603d2780a8a2834575706e9001977524007 | jni-build/jni/include/tensorflow/contrib/slim/python/slim/data/parallel_reader.py | python | get_data_files | (data_sources) | return data_files | Get data_files from data_sources.
Args:
data_sources: a list/tuple of files or the location of the data, i.e.
/cns/../train@128, /cns/.../train* or /tmp/.../train*
Returns:
a list of data_files.
Raises:
ValueError: if not data files are not found | Get data_files from data_sources. | [
"Get",
"data_files",
"from",
"data_sources",
"."
] | def get_data_files(data_sources):
"""Get data_files from data_sources.
Args:
data_sources: a list/tuple of files or the location of the data, i.e.
/cns/../train@128, /cns/.../train* or /tmp/.../train*
Returns:
a list of data_files.
Raises:
ValueError: if not data files are not found
"""
if isinstance(data_sources, (list, tuple)):
data_files = []
for source in data_sources:
data_files += get_data_files(source)
else:
if '*' in data_sources or '?' in data_sources or '[' in data_sources:
data_files = gfile.Glob(data_sources)
else:
data_files = [data_sources]
if not data_files:
raise ValueError('No data files found in %s', data_sources)
return data_files | [
"def",
"get_data_files",
"(",
"data_sources",
")",
":",
"if",
"isinstance",
"(",
"data_sources",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"data_files",
"=",
"[",
"]",
"for",
"source",
"in",
"data_sources",
":",
"data_files",
"+=",
"get_data_files",
"(",
"source",
")",
"else",
":",
"if",
"'*'",
"in",
"data_sources",
"or",
"'?'",
"in",
"data_sources",
"or",
"'['",
"in",
"data_sources",
":",
"data_files",
"=",
"gfile",
".",
"Glob",
"(",
"data_sources",
")",
"else",
":",
"data_files",
"=",
"[",
"data_sources",
"]",
"if",
"not",
"data_files",
":",
"raise",
"ValueError",
"(",
"'No data files found in %s'",
",",
"data_sources",
")",
"return",
"data_files"
] | https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/slim/python/slim/data/parallel_reader.py#L254-L279 |
|
D-X-Y/caffe-faster-rcnn | eb50c97ff48f3df115d0e85fe0a32b0c7e2aa4cb | scripts/cpp_lint.py | python | _NestingState.Update | (self, filename, clean_lines, linenum, error) | Update nesting state with current line.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found. | Update nesting state with current line. | [
"Update",
"nesting",
"state",
"with",
"current",
"line",
"."
] | def Update(self, filename, clean_lines, linenum, error):
"""Update nesting state with current line.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
error: The function to call with any errors found.
"""
line = clean_lines.elided[linenum]
# Update pp_stack first
self.UpdatePreprocessor(line)
# Count parentheses. This is to avoid adding struct arguments to
# the nesting stack.
if self.stack:
inner_block = self.stack[-1]
depth_change = line.count('(') - line.count(')')
inner_block.open_parentheses += depth_change
# Also check if we are starting or ending an inline assembly block.
if inner_block.inline_asm in (_NO_ASM, _END_ASM):
if (depth_change != 0 and
inner_block.open_parentheses == 1 and
_MATCH_ASM.match(line)):
# Enter assembly block
inner_block.inline_asm = _INSIDE_ASM
else:
# Not entering assembly block. If previous line was _END_ASM,
# we will now shift to _NO_ASM state.
inner_block.inline_asm = _NO_ASM
elif (inner_block.inline_asm == _INSIDE_ASM and
inner_block.open_parentheses == 0):
# Exit assembly block
inner_block.inline_asm = _END_ASM
# Consume namespace declaration at the beginning of the line. Do
# this in a loop so that we catch same line declarations like this:
# namespace proto2 { namespace bridge { class MessageSet; } }
while True:
# Match start of namespace. The "\b\s*" below catches namespace
# declarations even if it weren't followed by a whitespace, this
# is so that we don't confuse our namespace checker. The
# missing spaces will be flagged by CheckSpacing.
namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line)
if not namespace_decl_match:
break
new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum)
self.stack.append(new_namespace)
line = namespace_decl_match.group(2)
if line.find('{') != -1:
new_namespace.seen_open_brace = True
line = line[line.find('{') + 1:]
# Look for a class declaration in whatever is left of the line
# after parsing namespaces. The regexp accounts for decorated classes
# such as in:
# class LOCKABLE API Object {
# };
#
# Templates with class arguments may confuse the parser, for example:
# template <class T
# class Comparator = less<T>,
# class Vector = vector<T> >
# class HeapQueue {
#
# Because this parser has no nesting state about templates, by the
# time it saw "class Comparator", it may think that it's a new class.
# Nested templates have a similar problem:
# template <
# typename ExportedType,
# typename TupleType,
# template <typename, typename> class ImplTemplate>
#
# To avoid these cases, we ignore classes that are followed by '=' or '>'
class_decl_match = Match(
r'\s*(template\s*<[\w\s<>,:]*>\s*)?'
r'(class|struct)\s+([A-Z_]+\s+)*(\w+(?:::\w+)*)'
r'(([^=>]|<[^<>]*>|<[^<>]*<[^<>]*>\s*>)*)$', line)
if (class_decl_match and
(not self.stack or self.stack[-1].open_parentheses == 0)):
self.stack.append(_ClassInfo(
class_decl_match.group(4), class_decl_match.group(2),
clean_lines, linenum))
line = class_decl_match.group(5)
# If we have not yet seen the opening brace for the innermost block,
# run checks here.
if not self.SeenOpenBrace():
self.stack[-1].CheckBegin(filename, clean_lines, linenum, error)
# Update access control if we are inside a class/struct
if self.stack and isinstance(self.stack[-1], _ClassInfo):
classinfo = self.stack[-1]
access_match = Match(
r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?'
r':(?:[^:]|$)',
line)
if access_match:
classinfo.access = access_match.group(2)
# Check that access keywords are indented +1 space. Skip this
# check if the keywords are not preceded by whitespaces.
indent = access_match.group(1)
if (len(indent) != classinfo.class_indent + 1 and
Match(r'^\s*$', indent)):
if classinfo.is_struct:
parent = 'struct ' + classinfo.name
else:
parent = 'class ' + classinfo.name
slots = ''
if access_match.group(3):
slots = access_match.group(3)
error(filename, linenum, 'whitespace/indent', 3,
'%s%s: should be indented +1 space inside %s' % (
access_match.group(2), slots, parent))
# Consume braces or semicolons from what's left of the line
while True:
# Match first brace, semicolon, or closed parenthesis.
matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line)
if not matched:
break
token = matched.group(1)
if token == '{':
# If namespace or class hasn't seen a opening brace yet, mark
# namespace/class head as complete. Push a new block onto the
# stack otherwise.
if not self.SeenOpenBrace():
self.stack[-1].seen_open_brace = True
else:
self.stack.append(_BlockInfo(True))
if _MATCH_ASM.match(line):
self.stack[-1].inline_asm = _BLOCK_ASM
elif token == ';' or token == ')':
# If we haven't seen an opening brace yet, but we already saw
# a semicolon, this is probably a forward declaration. Pop
# the stack for these.
#
# Similarly, if we haven't seen an opening brace yet, but we
# already saw a closing parenthesis, then these are probably
# function arguments with extra "class" or "struct" keywords.
# Also pop these stack for these.
if not self.SeenOpenBrace():
self.stack.pop()
else: # token == '}'
# Perform end of block checks and pop the stack.
if self.stack:
self.stack[-1].CheckEnd(filename, clean_lines, linenum, error)
self.stack.pop()
line = matched.group(2) | [
"def",
"Update",
"(",
"self",
",",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
":",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"# Update pp_stack first",
"self",
".",
"UpdatePreprocessor",
"(",
"line",
")",
"# Count parentheses. This is to avoid adding struct arguments to",
"# the nesting stack.",
"if",
"self",
".",
"stack",
":",
"inner_block",
"=",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
"depth_change",
"=",
"line",
".",
"count",
"(",
"'('",
")",
"-",
"line",
".",
"count",
"(",
"')'",
")",
"inner_block",
".",
"open_parentheses",
"+=",
"depth_change",
"# Also check if we are starting or ending an inline assembly block.",
"if",
"inner_block",
".",
"inline_asm",
"in",
"(",
"_NO_ASM",
",",
"_END_ASM",
")",
":",
"if",
"(",
"depth_change",
"!=",
"0",
"and",
"inner_block",
".",
"open_parentheses",
"==",
"1",
"and",
"_MATCH_ASM",
".",
"match",
"(",
"line",
")",
")",
":",
"# Enter assembly block",
"inner_block",
".",
"inline_asm",
"=",
"_INSIDE_ASM",
"else",
":",
"# Not entering assembly block. If previous line was _END_ASM,",
"# we will now shift to _NO_ASM state.",
"inner_block",
".",
"inline_asm",
"=",
"_NO_ASM",
"elif",
"(",
"inner_block",
".",
"inline_asm",
"==",
"_INSIDE_ASM",
"and",
"inner_block",
".",
"open_parentheses",
"==",
"0",
")",
":",
"# Exit assembly block",
"inner_block",
".",
"inline_asm",
"=",
"_END_ASM",
"# Consume namespace declaration at the beginning of the line. Do",
"# this in a loop so that we catch same line declarations like this:",
"# namespace proto2 { namespace bridge { class MessageSet; } }",
"while",
"True",
":",
"# Match start of namespace. The \"\\b\\s*\" below catches namespace",
"# declarations even if it weren't followed by a whitespace, this",
"# is so that we don't confuse our namespace checker. The",
"# missing spaces will be flagged by CheckSpacing.",
"namespace_decl_match",
"=",
"Match",
"(",
"r'^\\s*namespace\\b\\s*([:\\w]+)?(.*)$'",
",",
"line",
")",
"if",
"not",
"namespace_decl_match",
":",
"break",
"new_namespace",
"=",
"_NamespaceInfo",
"(",
"namespace_decl_match",
".",
"group",
"(",
"1",
")",
",",
"linenum",
")",
"self",
".",
"stack",
".",
"append",
"(",
"new_namespace",
")",
"line",
"=",
"namespace_decl_match",
".",
"group",
"(",
"2",
")",
"if",
"line",
".",
"find",
"(",
"'{'",
")",
"!=",
"-",
"1",
":",
"new_namespace",
".",
"seen_open_brace",
"=",
"True",
"line",
"=",
"line",
"[",
"line",
".",
"find",
"(",
"'{'",
")",
"+",
"1",
":",
"]",
"# Look for a class declaration in whatever is left of the line",
"# after parsing namespaces. The regexp accounts for decorated classes",
"# such as in:",
"# class LOCKABLE API Object {",
"# };",
"#",
"# Templates with class arguments may confuse the parser, for example:",
"# template <class T",
"# class Comparator = less<T>,",
"# class Vector = vector<T> >",
"# class HeapQueue {",
"#",
"# Because this parser has no nesting state about templates, by the",
"# time it saw \"class Comparator\", it may think that it's a new class.",
"# Nested templates have a similar problem:",
"# template <",
"# typename ExportedType,",
"# typename TupleType,",
"# template <typename, typename> class ImplTemplate>",
"#",
"# To avoid these cases, we ignore classes that are followed by '=' or '>'",
"class_decl_match",
"=",
"Match",
"(",
"r'\\s*(template\\s*<[\\w\\s<>,:]*>\\s*)?'",
"r'(class|struct)\\s+([A-Z_]+\\s+)*(\\w+(?:::\\w+)*)'",
"r'(([^=>]|<[^<>]*>|<[^<>]*<[^<>]*>\\s*>)*)$'",
",",
"line",
")",
"if",
"(",
"class_decl_match",
"and",
"(",
"not",
"self",
".",
"stack",
"or",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"open_parentheses",
"==",
"0",
")",
")",
":",
"self",
".",
"stack",
".",
"append",
"(",
"_ClassInfo",
"(",
"class_decl_match",
".",
"group",
"(",
"4",
")",
",",
"class_decl_match",
".",
"group",
"(",
"2",
")",
",",
"clean_lines",
",",
"linenum",
")",
")",
"line",
"=",
"class_decl_match",
".",
"group",
"(",
"5",
")",
"# If we have not yet seen the opening brace for the innermost block,",
"# run checks here.",
"if",
"not",
"self",
".",
"SeenOpenBrace",
"(",
")",
":",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"CheckBegin",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"# Update access control if we are inside a class/struct",
"if",
"self",
".",
"stack",
"and",
"isinstance",
"(",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
",",
"_ClassInfo",
")",
":",
"classinfo",
"=",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
"access_match",
"=",
"Match",
"(",
"r'^(.*)\\b(public|private|protected|signals)(\\s+(?:slots\\s*)?)?'",
"r':(?:[^:]|$)'",
",",
"line",
")",
"if",
"access_match",
":",
"classinfo",
".",
"access",
"=",
"access_match",
".",
"group",
"(",
"2",
")",
"# Check that access keywords are indented +1 space. Skip this",
"# check if the keywords are not preceded by whitespaces.",
"indent",
"=",
"access_match",
".",
"group",
"(",
"1",
")",
"if",
"(",
"len",
"(",
"indent",
")",
"!=",
"classinfo",
".",
"class_indent",
"+",
"1",
"and",
"Match",
"(",
"r'^\\s*$'",
",",
"indent",
")",
")",
":",
"if",
"classinfo",
".",
"is_struct",
":",
"parent",
"=",
"'struct '",
"+",
"classinfo",
".",
"name",
"else",
":",
"parent",
"=",
"'class '",
"+",
"classinfo",
".",
"name",
"slots",
"=",
"''",
"if",
"access_match",
".",
"group",
"(",
"3",
")",
":",
"slots",
"=",
"access_match",
".",
"group",
"(",
"3",
")",
"error",
"(",
"filename",
",",
"linenum",
",",
"'whitespace/indent'",
",",
"3",
",",
"'%s%s: should be indented +1 space inside %s'",
"%",
"(",
"access_match",
".",
"group",
"(",
"2",
")",
",",
"slots",
",",
"parent",
")",
")",
"# Consume braces or semicolons from what's left of the line",
"while",
"True",
":",
"# Match first brace, semicolon, or closed parenthesis.",
"matched",
"=",
"Match",
"(",
"r'^[^{;)}]*([{;)}])(.*)$'",
",",
"line",
")",
"if",
"not",
"matched",
":",
"break",
"token",
"=",
"matched",
".",
"group",
"(",
"1",
")",
"if",
"token",
"==",
"'{'",
":",
"# If namespace or class hasn't seen a opening brace yet, mark",
"# namespace/class head as complete. Push a new block onto the",
"# stack otherwise.",
"if",
"not",
"self",
".",
"SeenOpenBrace",
"(",
")",
":",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"seen_open_brace",
"=",
"True",
"else",
":",
"self",
".",
"stack",
".",
"append",
"(",
"_BlockInfo",
"(",
"True",
")",
")",
"if",
"_MATCH_ASM",
".",
"match",
"(",
"line",
")",
":",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"inline_asm",
"=",
"_BLOCK_ASM",
"elif",
"token",
"==",
"';'",
"or",
"token",
"==",
"')'",
":",
"# If we haven't seen an opening brace yet, but we already saw",
"# a semicolon, this is probably a forward declaration. Pop",
"# the stack for these.",
"#",
"# Similarly, if we haven't seen an opening brace yet, but we",
"# already saw a closing parenthesis, then these are probably",
"# function arguments with extra \"class\" or \"struct\" keywords.",
"# Also pop these stack for these.",
"if",
"not",
"self",
".",
"SeenOpenBrace",
"(",
")",
":",
"self",
".",
"stack",
".",
"pop",
"(",
")",
"else",
":",
"# token == '}'",
"# Perform end of block checks and pop the stack.",
"if",
"self",
".",
"stack",
":",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"CheckEnd",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"error",
")",
"self",
".",
"stack",
".",
"pop",
"(",
")",
"line",
"=",
"matched",
".",
"group",
"(",
"2",
")"
] | https://github.com/D-X-Y/caffe-faster-rcnn/blob/eb50c97ff48f3df115d0e85fe0a32b0c7e2aa4cb/scripts/cpp_lint.py#L2008-L2162 |
||
apple/swift-lldb | d74be846ef3e62de946df343e8c234bde93a8912 | examples/python/dict_utils.py | python | LookupDictionary.get_keys_for_value | (self, value, fail_value=None) | return fail_value | find the key(s) as a list given a value | find the key(s) as a list given a value | [
"find",
"the",
"key",
"(",
"s",
")",
"as",
"a",
"list",
"given",
"a",
"value"
] | def get_keys_for_value(self, value, fail_value=None):
"""find the key(s) as a list given a value"""
list_result = [item[0] for item in self.items() if item[1] == value]
if len(list_result) > 0:
return list_result
return fail_value | [
"def",
"get_keys_for_value",
"(",
"self",
",",
"value",
",",
"fail_value",
"=",
"None",
")",
":",
"list_result",
"=",
"[",
"item",
"[",
"0",
"]",
"for",
"item",
"in",
"self",
".",
"items",
"(",
")",
"if",
"item",
"[",
"1",
"]",
"==",
"value",
"]",
"if",
"len",
"(",
"list_result",
")",
">",
"0",
":",
"return",
"list_result",
"return",
"fail_value"
] | https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/examples/python/dict_utils.py#L11-L16 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/poolmanager.py | python | PoolManager.connection_from_context | (self, request_context) | return self.connection_from_pool_key(pool_key, request_context=request_context) | Get a :class:`ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable. | Get a :class:`ConnectionPool` based on the request context. | [
"Get",
"a",
":",
"class",
":",
"ConnectionPool",
"based",
"on",
"the",
"request",
"context",
"."
] | def connection_from_context(self, request_context):
"""
Get a :class:`ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
scheme = request_context['scheme'].lower()
pool_key_constructor = self.key_fn_by_scheme[scheme]
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context) | [
"def",
"connection_from_context",
"(",
"self",
",",
"request_context",
")",
":",
"scheme",
"=",
"request_context",
"[",
"'scheme'",
"]",
".",
"lower",
"(",
")",
"pool_key_constructor",
"=",
"self",
".",
"key_fn_by_scheme",
"[",
"scheme",
"]",
"pool_key",
"=",
"pool_key_constructor",
"(",
"request_context",
")",
"return",
"self",
".",
"connection_from_pool_key",
"(",
"pool_key",
",",
"request_context",
"=",
"request_context",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/urllib3/poolmanager.py#L229-L240 |
|
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/_abcoll.py | python | MutableMapping.update | (*args, **kwds) | D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.
If E present and has a .keys() method, does: for k in E: D[k] = E[k]
If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v
In either case, this is followed by: for k, v in F.items(): D[k] = v | D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.
If E present and has a .keys() method, does: for k in E: D[k] = E[k]
If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v
In either case, this is followed by: for k, v in F.items(): D[k] = v | [
"D",
".",
"update",
"(",
"[",
"E",
"]",
"**",
"F",
")",
"-",
">",
"None",
".",
"Update",
"D",
"from",
"mapping",
"/",
"iterable",
"E",
"and",
"F",
".",
"If",
"E",
"present",
"and",
"has",
"a",
".",
"keys",
"()",
"method",
"does",
":",
"for",
"k",
"in",
"E",
":",
"D",
"[",
"k",
"]",
"=",
"E",
"[",
"k",
"]",
"If",
"E",
"present",
"and",
"lacks",
".",
"keys",
"()",
"method",
"does",
":",
"for",
"(",
"k",
"v",
")",
"in",
"E",
":",
"D",
"[",
"k",
"]",
"=",
"v",
"In",
"either",
"case",
"this",
"is",
"followed",
"by",
":",
"for",
"k",
"v",
"in",
"F",
".",
"items",
"()",
":",
"D",
"[",
"k",
"]",
"=",
"v"
] | def update(*args, **kwds):
''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.
If E present and has a .keys() method, does: for k in E: D[k] = E[k]
If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v
In either case, this is followed by: for k, v in F.items(): D[k] = v
'''
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value | [
"def",
"update",
"(",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"if",
"len",
"(",
"args",
")",
">",
"2",
":",
"raise",
"TypeError",
"(",
"\"update() takes at most 2 positional \"",
"\"arguments ({} given)\"",
".",
"format",
"(",
"len",
"(",
"args",
")",
")",
")",
"elif",
"not",
"args",
":",
"raise",
"TypeError",
"(",
"\"update() takes at least 1 argument (0 given)\"",
")",
"self",
"=",
"args",
"[",
"0",
"]",
"other",
"=",
"args",
"[",
"1",
"]",
"if",
"len",
"(",
"args",
")",
">=",
"2",
"else",
"(",
")",
"if",
"isinstance",
"(",
"other",
",",
"Mapping",
")",
":",
"for",
"key",
"in",
"other",
":",
"self",
"[",
"key",
"]",
"=",
"other",
"[",
"key",
"]",
"elif",
"hasattr",
"(",
"other",
",",
"\"keys\"",
")",
":",
"for",
"key",
"in",
"other",
".",
"keys",
"(",
")",
":",
"self",
"[",
"key",
"]",
"=",
"other",
"[",
"key",
"]",
"else",
":",
"for",
"key",
",",
"value",
"in",
"other",
":",
"self",
"[",
"key",
"]",
"=",
"value",
"for",
"key",
",",
"value",
"in",
"kwds",
".",
"items",
"(",
")",
":",
"self",
"[",
"key",
"]",
"=",
"value"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/_abcoll.py#L526-L550 |
||
gnuradio/gnuradio | 09c3c4fa4bfb1a02caac74cb5334dfe065391e3b | grc/gui/StateCache.py | python | StateCache.get_next_state | (self) | return None | Get the nest state and increment the current index.
Returns:
the next state or None | Get the nest state and increment the current index. | [
"Get",
"the",
"nest",
"state",
"and",
"increment",
"the",
"current",
"index",
"."
] | def get_next_state(self):
"""
Get the nest state and increment the current index.
Returns:
the next state or None
"""
if self.num_next_states > 0:
self.current_state_index = (
self.current_state_index + 1) % STATE_CACHE_SIZE
self.num_next_states = self.num_next_states - 1
self.num_prev_states = self.num_prev_states + 1
return self.get_current_state()
return None | [
"def",
"get_next_state",
"(",
"self",
")",
":",
"if",
"self",
".",
"num_next_states",
">",
"0",
":",
"self",
".",
"current_state_index",
"=",
"(",
"self",
".",
"current_state_index",
"+",
"1",
")",
"%",
"STATE_CACHE_SIZE",
"self",
".",
"num_next_states",
"=",
"self",
".",
"num_next_states",
"-",
"1",
"self",
".",
"num_prev_states",
"=",
"self",
".",
"num_prev_states",
"+",
"1",
"return",
"self",
".",
"get_current_state",
"(",
")",
"return",
"None"
] | https://github.com/gnuradio/gnuradio/blob/09c3c4fa4bfb1a02caac74cb5334dfe065391e3b/grc/gui/StateCache.py#L76-L89 |
|
SoarGroup/Soar | a1c5e249499137a27da60533c72969eef3b8ab6b | scons/scons-local-4.1.0/SCons/Environment.py | python | Base.get_factory | (self, factory, default='File') | return factory | Return a factory function for creating Nodes for this
construction environment. | Return a factory function for creating Nodes for this
construction environment. | [
"Return",
"a",
"factory",
"function",
"for",
"creating",
"Nodes",
"for",
"this",
"construction",
"environment",
"."
] | def get_factory(self, factory, default='File'):
"""Return a factory function for creating Nodes for this
construction environment.
"""
name = default
try:
is_node = issubclass(factory, SCons.Node.FS.Base)
except TypeError:
# The specified factory isn't a Node itself--it's
# most likely None, or possibly a callable.
pass
else:
if is_node:
# The specified factory is a Node (sub)class. Try to
# return the FS method that corresponds to the Node's
# name--that is, we return self.fs.Dir if they want a Dir,
# self.fs.File for a File, etc.
try: name = factory.__name__
except AttributeError: pass
else: factory = None
if not factory:
# They passed us None, or we picked up a name from a specified
# class, so return the FS method. (Note that we *don't*
# use our own self.{Dir,File} methods because that would
# cause env.subst() to be called twice on the file name,
# interfering with files that have $$ in them.)
factory = getattr(self.fs, name)
return factory | [
"def",
"get_factory",
"(",
"self",
",",
"factory",
",",
"default",
"=",
"'File'",
")",
":",
"name",
"=",
"default",
"try",
":",
"is_node",
"=",
"issubclass",
"(",
"factory",
",",
"SCons",
".",
"Node",
".",
"FS",
".",
"Base",
")",
"except",
"TypeError",
":",
"# The specified factory isn't a Node itself--it's",
"# most likely None, or possibly a callable.",
"pass",
"else",
":",
"if",
"is_node",
":",
"# The specified factory is a Node (sub)class. Try to",
"# return the FS method that corresponds to the Node's",
"# name--that is, we return self.fs.Dir if they want a Dir,",
"# self.fs.File for a File, etc.",
"try",
":",
"name",
"=",
"factory",
".",
"__name__",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"factory",
"=",
"None",
"if",
"not",
"factory",
":",
"# They passed us None, or we picked up a name from a specified",
"# class, so return the FS method. (Note that we *don't*",
"# use our own self.{Dir,File} methods because that would",
"# cause env.subst() to be called twice on the file name,",
"# interfering with files that have $$ in them.)",
"factory",
"=",
"getattr",
"(",
"self",
".",
"fs",
",",
"name",
")",
"return",
"factory"
] | https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Environment.py#L1053-L1080 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/distributions/util.py | python | _is_known_unsigned_by_dtype | (dt) | return {
dtypes.bool: True,
dtypes.uint8: True,
dtypes.uint16: True,
}.get(dt.base_dtype, False) | Helper returning True if dtype is known to be unsigned. | Helper returning True if dtype is known to be unsigned. | [
"Helper",
"returning",
"True",
"if",
"dtype",
"is",
"known",
"to",
"be",
"unsigned",
"."
] | def _is_known_unsigned_by_dtype(dt):
"""Helper returning True if dtype is known to be unsigned."""
return {
dtypes.bool: True,
dtypes.uint8: True,
dtypes.uint16: True,
}.get(dt.base_dtype, False) | [
"def",
"_is_known_unsigned_by_dtype",
"(",
"dt",
")",
":",
"return",
"{",
"dtypes",
".",
"bool",
":",
"True",
",",
"dtypes",
".",
"uint8",
":",
"True",
",",
"dtypes",
".",
"uint16",
":",
"True",
",",
"}",
".",
"get",
"(",
"dt",
".",
"base_dtype",
",",
"False",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/distributions/util.py#L239-L245 |
|
panda3d/panda3d | 833ad89ebad58395d0af0b7ec08538e5e4308265 | direct/src/distributed/ServerRepository.py | python | ServerRepository.handleClientObjectUpdateField | (self, datagram, dgi, targeted = False) | Received an update request from a client. | Received an update request from a client. | [
"Received",
"an",
"update",
"request",
"from",
"a",
"client",
"."
] | def handleClientObjectUpdateField(self, datagram, dgi, targeted = False):
""" Received an update request from a client. """
connection = datagram.getConnection()
client = self.clientsByConnection[connection]
if targeted:
targetId = dgi.getUint32()
doId = dgi.getUint32()
fieldId = dgi.getUint16()
doIdBase = self.getDoIdBase(doId)
owner = self.clientsByDoIdBase.get(doIdBase)
object = owner and owner.objectsByDoId.get(doId)
if not object:
self.notify.warning(
"Ignoring update for unknown object %s from client %s" % (
doId, client.doIdBase))
return
dcfield = object.dclass.getFieldByIndex(fieldId)
if dcfield is None:
self.notify.warning(
"Ignoring update for field %s on object %s from client %s; no such field for class %s." % (
fieldId, doId, client.doIdBase, object.dclass.getName()))
if client != owner:
# This message was not sent by the object's owner.
if not dcfield.hasKeyword('clsend') and not dcfield.hasKeyword('p2p'):
self.notify.warning(
"Ignoring update for %s.%s on object %s from client %s: not owner" % (
object.dclass.getName(), dcfield.getName(), doId, client.doIdBase))
return
# We reformat the message slightly to insert the sender's
# doIdBase.
dg = PyDatagram()
dg.addUint16(OBJECT_UPDATE_FIELD_CMU)
dg.addUint32(client.doIdBase)
dg.addUint32(doId)
dg.addUint16(fieldId)
dg.appendData(dgi.getRemainingBytes())
if targeted:
# A targeted update: only to the indicated client.
target = self.clientsByDoIdBase.get(targetId)
if not target:
self.notify.warning(
"Ignoring targeted update to %s for %s.%s on object %s from client %s: target not known" % (
targetId,
dclass.getName(), dcfield.getName(), doId, client.doIdBase))
return
self.cw.send(dg, target.connection)
self.needsFlush.add(target)
elif dcfield.hasKeyword('p2p'):
# p2p: to object owner only
self.cw.send(dg, owner.connection)
self.needsFlush.add(owner)
elif dcfield.hasKeyword('broadcast'):
# Broadcast: to everyone except orig sender
self.sendToZoneExcept(object.zoneId, dg, [client])
elif dcfield.hasKeyword('reflect'):
# Reflect: broadcast to everyone including orig sender
self.sendToZoneExcept(object.zoneId, dg, [])
else:
self.notify.warning(
"Message is not broadcast or p2p") | [
"def",
"handleClientObjectUpdateField",
"(",
"self",
",",
"datagram",
",",
"dgi",
",",
"targeted",
"=",
"False",
")",
":",
"connection",
"=",
"datagram",
".",
"getConnection",
"(",
")",
"client",
"=",
"self",
".",
"clientsByConnection",
"[",
"connection",
"]",
"if",
"targeted",
":",
"targetId",
"=",
"dgi",
".",
"getUint32",
"(",
")",
"doId",
"=",
"dgi",
".",
"getUint32",
"(",
")",
"fieldId",
"=",
"dgi",
".",
"getUint16",
"(",
")",
"doIdBase",
"=",
"self",
".",
"getDoIdBase",
"(",
"doId",
")",
"owner",
"=",
"self",
".",
"clientsByDoIdBase",
".",
"get",
"(",
"doIdBase",
")",
"object",
"=",
"owner",
"and",
"owner",
".",
"objectsByDoId",
".",
"get",
"(",
"doId",
")",
"if",
"not",
"object",
":",
"self",
".",
"notify",
".",
"warning",
"(",
"\"Ignoring update for unknown object %s from client %s\"",
"%",
"(",
"doId",
",",
"client",
".",
"doIdBase",
")",
")",
"return",
"dcfield",
"=",
"object",
".",
"dclass",
".",
"getFieldByIndex",
"(",
"fieldId",
")",
"if",
"dcfield",
"is",
"None",
":",
"self",
".",
"notify",
".",
"warning",
"(",
"\"Ignoring update for field %s on object %s from client %s; no such field for class %s.\"",
"%",
"(",
"fieldId",
",",
"doId",
",",
"client",
".",
"doIdBase",
",",
"object",
".",
"dclass",
".",
"getName",
"(",
")",
")",
")",
"if",
"client",
"!=",
"owner",
":",
"# This message was not sent by the object's owner.",
"if",
"not",
"dcfield",
".",
"hasKeyword",
"(",
"'clsend'",
")",
"and",
"not",
"dcfield",
".",
"hasKeyword",
"(",
"'p2p'",
")",
":",
"self",
".",
"notify",
".",
"warning",
"(",
"\"Ignoring update for %s.%s on object %s from client %s: not owner\"",
"%",
"(",
"object",
".",
"dclass",
".",
"getName",
"(",
")",
",",
"dcfield",
".",
"getName",
"(",
")",
",",
"doId",
",",
"client",
".",
"doIdBase",
")",
")",
"return",
"# We reformat the message slightly to insert the sender's",
"# doIdBase.",
"dg",
"=",
"PyDatagram",
"(",
")",
"dg",
".",
"addUint16",
"(",
"OBJECT_UPDATE_FIELD_CMU",
")",
"dg",
".",
"addUint32",
"(",
"client",
".",
"doIdBase",
")",
"dg",
".",
"addUint32",
"(",
"doId",
")",
"dg",
".",
"addUint16",
"(",
"fieldId",
")",
"dg",
".",
"appendData",
"(",
"dgi",
".",
"getRemainingBytes",
"(",
")",
")",
"if",
"targeted",
":",
"# A targeted update: only to the indicated client.",
"target",
"=",
"self",
".",
"clientsByDoIdBase",
".",
"get",
"(",
"targetId",
")",
"if",
"not",
"target",
":",
"self",
".",
"notify",
".",
"warning",
"(",
"\"Ignoring targeted update to %s for %s.%s on object %s from client %s: target not known\"",
"%",
"(",
"targetId",
",",
"dclass",
".",
"getName",
"(",
")",
",",
"dcfield",
".",
"getName",
"(",
")",
",",
"doId",
",",
"client",
".",
"doIdBase",
")",
")",
"return",
"self",
".",
"cw",
".",
"send",
"(",
"dg",
",",
"target",
".",
"connection",
")",
"self",
".",
"needsFlush",
".",
"add",
"(",
"target",
")",
"elif",
"dcfield",
".",
"hasKeyword",
"(",
"'p2p'",
")",
":",
"# p2p: to object owner only",
"self",
".",
"cw",
".",
"send",
"(",
"dg",
",",
"owner",
".",
"connection",
")",
"self",
".",
"needsFlush",
".",
"add",
"(",
"owner",
")",
"elif",
"dcfield",
".",
"hasKeyword",
"(",
"'broadcast'",
")",
":",
"# Broadcast: to everyone except orig sender",
"self",
".",
"sendToZoneExcept",
"(",
"object",
".",
"zoneId",
",",
"dg",
",",
"[",
"client",
"]",
")",
"elif",
"dcfield",
".",
"hasKeyword",
"(",
"'reflect'",
")",
":",
"# Reflect: broadcast to everyone including orig sender",
"self",
".",
"sendToZoneExcept",
"(",
"object",
".",
"zoneId",
",",
"dg",
",",
"[",
"]",
")",
"else",
":",
"self",
".",
"notify",
".",
"warning",
"(",
"\"Message is not broadcast or p2p\"",
")"
] | https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/direct/src/distributed/ServerRepository.py#L445-L514 |
||
hanpfei/chromium-net | 392cc1fa3a8f92f42e4071ab6e674d8e0482f83f | third_party/catapult/third_party/gsutil/third_party/httplib2/upload-diffs.py | python | VersionControlSystem.CheckForUnknownFiles | (self) | Show an "are you sure?" prompt if there are unknown files. | Show an "are you sure?" prompt if there are unknown files. | [
"Show",
"an",
"are",
"you",
"sure?",
"prompt",
"if",
"there",
"are",
"unknown",
"files",
"."
] | def CheckForUnknownFiles(self):
"""Show an "are you sure?" prompt if there are unknown files."""
unknown_files = self.GetUnknownFiles()
if unknown_files:
print "The following files are not added to version control:"
for line in unknown_files:
print line
prompt = "Are you sure to continue?(y/N) "
answer = raw_input(prompt).strip()
if answer != "y":
ErrorExit("User aborted") | [
"def",
"CheckForUnknownFiles",
"(",
"self",
")",
":",
"unknown_files",
"=",
"self",
".",
"GetUnknownFiles",
"(",
")",
"if",
"unknown_files",
":",
"print",
"\"The following files are not added to version control:\"",
"for",
"line",
"in",
"unknown_files",
":",
"print",
"line",
"prompt",
"=",
"\"Are you sure to continue?(y/N) \"",
"answer",
"=",
"raw_input",
"(",
"prompt",
")",
".",
"strip",
"(",
")",
"if",
"answer",
"!=",
"\"y\"",
":",
"ErrorExit",
"(",
"\"User aborted\"",
")"
] | https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/httplib2/upload-diffs.py#L839-L849 |
||
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/mach/mach/mixin/logging.py | python | LoggingMixin.populate_logger | (self, name=None) | Ensure this class instance has a logger associated with it.
Users of this mixin that call log() will need to ensure self._logger is
a logging.Logger instance before they call log(). This function ensures
self._logger is defined by populating it if it isn't. | Ensure this class instance has a logger associated with it. | [
"Ensure",
"this",
"class",
"instance",
"has",
"a",
"logger",
"associated",
"with",
"it",
"."
] | def populate_logger(self, name=None):
"""Ensure this class instance has a logger associated with it.
Users of this mixin that call log() will need to ensure self._logger is
a logging.Logger instance before they call log(). This function ensures
self._logger is defined by populating it if it isn't.
"""
if hasattr(self, '_logger'):
return
if name is None:
name = '.'.join([self.__module__, self.__class__.__name__])
self._logger = logging.getLogger(name) | [
"def",
"populate_logger",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'_logger'",
")",
":",
"return",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"'.'",
".",
"join",
"(",
"[",
"self",
".",
"__module__",
",",
"self",
".",
"__class__",
".",
"__name__",
"]",
")",
"self",
".",
"_logger",
"=",
"logging",
".",
"getLogger",
"(",
"name",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/mach/mach/mixin/logging.py#L13-L26 |
||
windystrife/UnrealEngine_NVIDIAGameWorks | b50e6338a7c5b26374d66306ebc7807541ff815e | Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/fileinput.py | python | isfirstline | () | return _state.isfirstline() | Returns true the line just read is the first line of its file,
otherwise returns false. | Returns true the line just read is the first line of its file,
otherwise returns false. | [
"Returns",
"true",
"the",
"line",
"just",
"read",
"is",
"the",
"first",
"line",
"of",
"its",
"file",
"otherwise",
"returns",
"false",
"."
] | def isfirstline():
"""
Returns true the line just read is the first line of its file,
otherwise returns false.
"""
if not _state:
raise RuntimeError, "no active input()"
return _state.isfirstline() | [
"def",
"isfirstline",
"(",
")",
":",
"if",
"not",
"_state",
":",
"raise",
"RuntimeError",
",",
"\"no active input()\"",
"return",
"_state",
".",
"isfirstline",
"(",
")"
] | https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/fileinput.py#L166-L173 |
|
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/boost_1_66_0/tools/build/src/build/virtual_target.py | python | VirtualTarget.name | (self) | return self.name_ | Name of this target. | Name of this target. | [
"Name",
"of",
"this",
"target",
"."
] | def name (self):
""" Name of this target.
"""
return self.name_ | [
"def",
"name",
"(",
"self",
")",
":",
"return",
"self",
".",
"name_"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/boost_1_66_0/tools/build/src/build/virtual_target.py#L289-L292 |
|
openvinotoolkit/openvino | dedcbeafa8b84cccdc55ca64b8da516682b381c7 | docs/scripts/create_mapping.py | python | create_mapping | (xml_input: Path, output_dir: Path, strip_path: Path) | Create a mapping between doxygen label and file path for edit on github button. | Create a mapping between doxygen label and file path for edit on github button. | [
"Create",
"a",
"mapping",
"between",
"doxygen",
"label",
"and",
"file",
"path",
"for",
"edit",
"on",
"github",
"button",
"."
] | def create_mapping(xml_input: Path, output_dir: Path, strip_path: Path):
"""
Create a mapping between doxygen label and file path for edit on github button.
"""
xml_input = xml_input.resolve()
output_dir = output_dir.resolve()
strip_path = strip_path.resolve()
mapping = {
'get_started': 'openvino/docs/get_started.md',
'documentation': 'openvino/docs/documentation.md',
'index': 'openvino/docs/index.rst',
'model_zoo': 'openvino/docs/model_zoo.md',
'resources': 'openvino/docs/resources.md',
'tutorials': 'openvino/docs/tutorials.md',
'tuning_utilities': 'openvino/docs/tuning_utilities.md'
}
output_dir.mkdir(parents=True, exist_ok=True)
xml_files = xml_input.glob('*.xml')
for xml_file in xml_files:
try:
root = etree.parse(xml_file.as_posix()).getroot()
compounds = root.xpath('//compounddef')
for compound in compounds:
kind = compound.attrib['kind']
if kind in ['file', 'dir']:
continue
name_tag = compound.find('compoundname')
name = name_tag.text
name = name.replace('::', '_1_1')
if kind == 'page':
exclude = True
for rep in REPOSITORIES:
if name.startswith(rep):
exclude = False
if exclude:
continue
else:
name = kind + name
location_tag = compound.find('location')
file = Path(location_tag.attrib['file'])
if not file.suffix:
continue
try:
file = file.relative_to(strip_path)
except ValueError:
logging.warning('{}: {} is not relative to {}.'.format(xml_file, file, strip_path))
mapping[name] = file.as_posix()
except AttributeError:
logging.warning('{}: Cannot find the origin file.'.format(xml_file))
except etree.XMLSyntaxError as e:
logging.warning('{}: {}.'.format(xml_file, e))
with open(output_dir.joinpath('mapping.json'), 'w') as f:
json.dump(mapping, f) | [
"def",
"create_mapping",
"(",
"xml_input",
":",
"Path",
",",
"output_dir",
":",
"Path",
",",
"strip_path",
":",
"Path",
")",
":",
"xml_input",
"=",
"xml_input",
".",
"resolve",
"(",
")",
"output_dir",
"=",
"output_dir",
".",
"resolve",
"(",
")",
"strip_path",
"=",
"strip_path",
".",
"resolve",
"(",
")",
"mapping",
"=",
"{",
"'get_started'",
":",
"'openvino/docs/get_started.md'",
",",
"'documentation'",
":",
"'openvino/docs/documentation.md'",
",",
"'index'",
":",
"'openvino/docs/index.rst'",
",",
"'model_zoo'",
":",
"'openvino/docs/model_zoo.md'",
",",
"'resources'",
":",
"'openvino/docs/resources.md'",
",",
"'tutorials'",
":",
"'openvino/docs/tutorials.md'",
",",
"'tuning_utilities'",
":",
"'openvino/docs/tuning_utilities.md'",
"}",
"output_dir",
".",
"mkdir",
"(",
"parents",
"=",
"True",
",",
"exist_ok",
"=",
"True",
")",
"xml_files",
"=",
"xml_input",
".",
"glob",
"(",
"'*.xml'",
")",
"for",
"xml_file",
"in",
"xml_files",
":",
"try",
":",
"root",
"=",
"etree",
".",
"parse",
"(",
"xml_file",
".",
"as_posix",
"(",
")",
")",
".",
"getroot",
"(",
")",
"compounds",
"=",
"root",
".",
"xpath",
"(",
"'//compounddef'",
")",
"for",
"compound",
"in",
"compounds",
":",
"kind",
"=",
"compound",
".",
"attrib",
"[",
"'kind'",
"]",
"if",
"kind",
"in",
"[",
"'file'",
",",
"'dir'",
"]",
":",
"continue",
"name_tag",
"=",
"compound",
".",
"find",
"(",
"'compoundname'",
")",
"name",
"=",
"name_tag",
".",
"text",
"name",
"=",
"name",
".",
"replace",
"(",
"'::'",
",",
"'_1_1'",
")",
"if",
"kind",
"==",
"'page'",
":",
"exclude",
"=",
"True",
"for",
"rep",
"in",
"REPOSITORIES",
":",
"if",
"name",
".",
"startswith",
"(",
"rep",
")",
":",
"exclude",
"=",
"False",
"if",
"exclude",
":",
"continue",
"else",
":",
"name",
"=",
"kind",
"+",
"name",
"location_tag",
"=",
"compound",
".",
"find",
"(",
"'location'",
")",
"file",
"=",
"Path",
"(",
"location_tag",
".",
"attrib",
"[",
"'file'",
"]",
")",
"if",
"not",
"file",
".",
"suffix",
":",
"continue",
"try",
":",
"file",
"=",
"file",
".",
"relative_to",
"(",
"strip_path",
")",
"except",
"ValueError",
":",
"logging",
".",
"warning",
"(",
"'{}: {} is not relative to {}.'",
".",
"format",
"(",
"xml_file",
",",
"file",
",",
"strip_path",
")",
")",
"mapping",
"[",
"name",
"]",
"=",
"file",
".",
"as_posix",
"(",
")",
"except",
"AttributeError",
":",
"logging",
".",
"warning",
"(",
"'{}: Cannot find the origin file.'",
".",
"format",
"(",
"xml_file",
")",
")",
"except",
"etree",
".",
"XMLSyntaxError",
"as",
"e",
":",
"logging",
".",
"warning",
"(",
"'{}: {}.'",
".",
"format",
"(",
"xml_file",
",",
"e",
")",
")",
"with",
"open",
"(",
"output_dir",
".",
"joinpath",
"(",
"'mapping.json'",
")",
",",
"'w'",
")",
"as",
"f",
":",
"json",
".",
"dump",
"(",
"mapping",
",",
"f",
")"
] | https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/docs/scripts/create_mapping.py#L15-L68 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_internal/operations/install/wheel.py | python | _normalized_outrows | (outrows) | return sorted(
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
for record_path, hash_, size in outrows
) | Normalize the given rows of a RECORD file.
Items in each row are converted into str. Rows are then sorted to make
the value more predictable for tests.
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
passed to this function, the size can be an integer as an int or string,
or the empty string. | Normalize the given rows of a RECORD file. | [
"Normalize",
"the",
"given",
"rows",
"of",
"a",
"RECORD",
"file",
"."
] | def _normalized_outrows(outrows):
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
"""Normalize the given rows of a RECORD file.
Items in each row are converted into str. Rows are then sorted to make
the value more predictable for tests.
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
passed to this function, the size can be an integer as an int or string,
or the empty string.
"""
# Normally, there should only be one row per path, in which case the
# second and third elements don't come into play when sorting.
# However, in cases in the wild where a path might happen to occur twice,
# we don't want the sort operation to trigger an error (but still want
# determinism). Since the third element can be an int or string, we
# coerce each element to a string to avoid a TypeError in this case.
# For additional background, see--
# https://github.com/pypa/pip/issues/5868
return sorted(
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
for record_path, hash_, size in outrows
) | [
"def",
"_normalized_outrows",
"(",
"outrows",
")",
":",
"# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]",
"# Normally, there should only be one row per path, in which case the",
"# second and third elements don't come into play when sorting.",
"# However, in cases in the wild where a path might happen to occur twice,",
"# we don't want the sort operation to trigger an error (but still want",
"# determinism). Since the third element can be an int or string, we",
"# coerce each element to a string to avoid a TypeError in this case.",
"# For additional background, see--",
"# https://github.com/pypa/pip/issues/5868",
"return",
"sorted",
"(",
"(",
"ensure_str",
"(",
"record_path",
",",
"encoding",
"=",
"'utf-8'",
")",
",",
"hash_",
",",
"str",
"(",
"size",
")",
")",
"for",
"record_path",
",",
"hash_",
",",
"size",
"in",
"outrows",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_internal/operations/install/wheel.py#L226-L249 |
|
pmq20/node-packer | 12c46c6e44fbc14d9ee645ebd17d5296b324f7e0 | lts/tools/gyp/pylib/gyp/win_tool.py | python | WinTool._UseSeparateMspdbsrv | (self, env, args) | Allows to use a unique instance of mspdbsrv.exe per linker instead of a
shared one. | Allows to use a unique instance of mspdbsrv.exe per linker instead of a
shared one. | [
"Allows",
"to",
"use",
"a",
"unique",
"instance",
"of",
"mspdbsrv",
".",
"exe",
"per",
"linker",
"instead",
"of",
"a",
"shared",
"one",
"."
] | def _UseSeparateMspdbsrv(self, env, args):
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
shared one."""
if len(args) < 1:
raise Exception("Not enough arguments")
if args[0] != 'link.exe':
return
# Use the output filename passed to the linker to generate an endpoint name
# for mspdbsrv.exe.
endpoint_name = None
for arg in args:
m = _LINK_EXE_OUT_ARG.match(arg)
if m:
endpoint_name = re.sub(r'\W+', '',
'%s_%d' % (m.group('out'), os.getpid()))
break
if endpoint_name is None:
return
# Adds the appropriate environment variable. This will be read by link.exe
# to know which instance of mspdbsrv.exe it should connect to (if it's
# not set then the default endpoint is used).
env['_MSPDBSRV_ENDPOINT_'] = endpoint_name | [
"def",
"_UseSeparateMspdbsrv",
"(",
"self",
",",
"env",
",",
"args",
")",
":",
"if",
"len",
"(",
"args",
")",
"<",
"1",
":",
"raise",
"Exception",
"(",
"\"Not enough arguments\"",
")",
"if",
"args",
"[",
"0",
"]",
"!=",
"'link.exe'",
":",
"return",
"# Use the output filename passed to the linker to generate an endpoint name",
"# for mspdbsrv.exe.",
"endpoint_name",
"=",
"None",
"for",
"arg",
"in",
"args",
":",
"m",
"=",
"_LINK_EXE_OUT_ARG",
".",
"match",
"(",
"arg",
")",
"if",
"m",
":",
"endpoint_name",
"=",
"re",
".",
"sub",
"(",
"r'\\W+'",
",",
"''",
",",
"'%s_%d'",
"%",
"(",
"m",
".",
"group",
"(",
"'out'",
")",
",",
"os",
".",
"getpid",
"(",
")",
")",
")",
"break",
"if",
"endpoint_name",
"is",
"None",
":",
"return",
"# Adds the appropriate environment variable. This will be read by link.exe",
"# to know which instance of mspdbsrv.exe it should connect to (if it's",
"# not set then the default endpoint is used).",
"env",
"[",
"'_MSPDBSRV_ENDPOINT_'",
"]",
"=",
"endpoint_name"
] | https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/tools/gyp/pylib/gyp/win_tool.py#L40-L65 |
||
forkineye/ESPixelStick | 22926f1c0d1131f1369fc7cad405689a095ae3cb | dist/bin/esptool/esptool.py | python | timeout_per_mb | (seconds_per_mb, size_bytes) | return result | Scales timeouts which are size-specific | Scales timeouts which are size-specific | [
"Scales",
"timeouts",
"which",
"are",
"size",
"-",
"specific"
] | def timeout_per_mb(seconds_per_mb, size_bytes):
""" Scales timeouts which are size-specific """
result = seconds_per_mb * (size_bytes / 1e6)
if result < DEFAULT_TIMEOUT:
return DEFAULT_TIMEOUT
return result | [
"def",
"timeout_per_mb",
"(",
"seconds_per_mb",
",",
"size_bytes",
")",
":",
"result",
"=",
"seconds_per_mb",
"*",
"(",
"size_bytes",
"/",
"1e6",
")",
"if",
"result",
"<",
"DEFAULT_TIMEOUT",
":",
"return",
"DEFAULT_TIMEOUT",
"return",
"result"
] | https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/esptool/esptool.py#L79-L84 |
|
Yaafe/Yaafe | f5ed847bdbf540b47e8fe1980dddfb5509ae7f9d | src_python/yaafelib/engine.py | python | Engine.getInputs | (self) | return res | Get input metadata. Result format is the same as for
:py:meth:`getOutputs` method, but the general case is
that there is only one input named 'audio' and the sole
relevant metadata are:
:sampleRate: expected audio sampleRate
:parameters: attached parameters
Others fields should be set to 1. | Get input metadata. Result format is the same as for
:py:meth:`getOutputs` method, but the general case is
that there is only one input named 'audio' and the sole
relevant metadata are: | [
"Get",
"input",
"metadata",
".",
"Result",
"format",
"is",
"the",
"same",
"as",
"for",
":",
"py",
":",
"meth",
":",
"getOutputs",
"method",
"but",
"the",
"general",
"case",
"is",
"that",
"there",
"is",
"only",
"one",
"input",
"named",
"audio",
"and",
"the",
"sole",
"relevant",
"metadata",
"are",
":"
] | def getInputs(self):
"""
Get input metadata. Result format is the same as for
:py:meth:`getOutputs` method, but the general case is
that there is only one input named 'audio' and the sole
relevant metadata are:
:sampleRate: expected audio sampleRate
:parameters: attached parameters
Others fields should be set to 1.
"""
res = {}
iList = yc.engine_getInputList(self.ptr)
for inputname in iterPtrList(iList):
ptr = yc.engine_getInputInfos(self.ptr, to_char(inputname))
infos = {}
if ptr:
infos['sampleRate'] = ptr.contents.sampleRate
infos['sampleStep'] = ptr.contents.sampleStep
infos['frameLength'] = ptr.contents.frameLength
infos['size'] = ptr.contents.size
infos['parameters'] = dict(
(to_str(k), to_str(v))
for k, v in iterPtrDict(ptr.contents.parameters))
yc.engine_freeIOInfos(ptr)
res[to_str(inputname)] = infos
yc.engine_freeIOList(iList)
return res | [
"def",
"getInputs",
"(",
"self",
")",
":",
"res",
"=",
"{",
"}",
"iList",
"=",
"yc",
".",
"engine_getInputList",
"(",
"self",
".",
"ptr",
")",
"for",
"inputname",
"in",
"iterPtrList",
"(",
"iList",
")",
":",
"ptr",
"=",
"yc",
".",
"engine_getInputInfos",
"(",
"self",
".",
"ptr",
",",
"to_char",
"(",
"inputname",
")",
")",
"infos",
"=",
"{",
"}",
"if",
"ptr",
":",
"infos",
"[",
"'sampleRate'",
"]",
"=",
"ptr",
".",
"contents",
".",
"sampleRate",
"infos",
"[",
"'sampleStep'",
"]",
"=",
"ptr",
".",
"contents",
".",
"sampleStep",
"infos",
"[",
"'frameLength'",
"]",
"=",
"ptr",
".",
"contents",
".",
"frameLength",
"infos",
"[",
"'size'",
"]",
"=",
"ptr",
".",
"contents",
".",
"size",
"infos",
"[",
"'parameters'",
"]",
"=",
"dict",
"(",
"(",
"to_str",
"(",
"k",
")",
",",
"to_str",
"(",
"v",
")",
")",
"for",
"k",
",",
"v",
"in",
"iterPtrDict",
"(",
"ptr",
".",
"contents",
".",
"parameters",
")",
")",
"yc",
".",
"engine_freeIOInfos",
"(",
"ptr",
")",
"res",
"[",
"to_str",
"(",
"inputname",
")",
"]",
"=",
"infos",
"yc",
".",
"engine_freeIOList",
"(",
"iList",
")",
"return",
"res"
] | https://github.com/Yaafe/Yaafe/blob/f5ed847bdbf540b47e8fe1980dddfb5509ae7f9d/src_python/yaafelib/engine.py#L156-L184 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/scipy/py2/scipy/linalg/decomp_schur.py | python | rsf2csf | (T, Z, check_finite=True) | return T, Z | Convert real Schur form to complex Schur form.
Convert a quasi-diagonal real-valued Schur form to the upper triangular
complex-valued Schur form.
Parameters
----------
T : (M, M) array_like
Real Schur form of the original array
Z : (M, M) array_like
Schur transformation matrix
check_finite : bool, optional
Whether to check that the input arrays contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
T : (M, M) ndarray
Complex Schur form of the original array
Z : (M, M) ndarray
Schur transformation matrix corresponding to the complex form
See Also
--------
schur : Schur decomposition of an array
Examples
--------
>>> from scipy.linalg import schur, rsf2csf
>>> A = np.array([[0, 2, 2], [0, 1, 2], [1, 0, 1]])
>>> T, Z = schur(A)
>>> T
array([[ 2.65896708, 1.42440458, -1.92933439],
[ 0. , -0.32948354, -0.49063704],
[ 0. , 1.31178921, -0.32948354]])
>>> Z
array([[0.72711591, -0.60156188, 0.33079564],
[0.52839428, 0.79801892, 0.28976765],
[0.43829436, 0.03590414, -0.89811411]])
>>> T2 , Z2 = rsf2csf(T, Z)
>>> T2
array([[2.65896708+0.j, -1.64592781+0.743164187j, -1.21516887+1.00660462j],
[0.+0.j , -0.32948354+8.02254558e-01j, -0.82115218-2.77555756e-17j],
[0.+0.j , 0.+0.j, -0.32948354-0.802254558j]])
>>> Z2
array([[0.72711591+0.j, 0.28220393-0.31385693j, 0.51319638-0.17258824j],
[0.52839428+0.j, 0.24720268+0.41635578j, -0.68079517-0.15118243j],
[0.43829436+0.j, -0.76618703+0.01873251j, -0.03063006+0.46857912j]]) | Convert real Schur form to complex Schur form. | [
"Convert",
"real",
"Schur",
"form",
"to",
"complex",
"Schur",
"form",
"."
] | def rsf2csf(T, Z, check_finite=True):
"""
Convert real Schur form to complex Schur form.
Convert a quasi-diagonal real-valued Schur form to the upper triangular
complex-valued Schur form.
Parameters
----------
T : (M, M) array_like
Real Schur form of the original array
Z : (M, M) array_like
Schur transformation matrix
check_finite : bool, optional
Whether to check that the input arrays contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
T : (M, M) ndarray
Complex Schur form of the original array
Z : (M, M) ndarray
Schur transformation matrix corresponding to the complex form
See Also
--------
schur : Schur decomposition of an array
Examples
--------
>>> from scipy.linalg import schur, rsf2csf
>>> A = np.array([[0, 2, 2], [0, 1, 2], [1, 0, 1]])
>>> T, Z = schur(A)
>>> T
array([[ 2.65896708, 1.42440458, -1.92933439],
[ 0. , -0.32948354, -0.49063704],
[ 0. , 1.31178921, -0.32948354]])
>>> Z
array([[0.72711591, -0.60156188, 0.33079564],
[0.52839428, 0.79801892, 0.28976765],
[0.43829436, 0.03590414, -0.89811411]])
>>> T2 , Z2 = rsf2csf(T, Z)
>>> T2
array([[2.65896708+0.j, -1.64592781+0.743164187j, -1.21516887+1.00660462j],
[0.+0.j , -0.32948354+8.02254558e-01j, -0.82115218-2.77555756e-17j],
[0.+0.j , 0.+0.j, -0.32948354-0.802254558j]])
>>> Z2
array([[0.72711591+0.j, 0.28220393-0.31385693j, 0.51319638-0.17258824j],
[0.52839428+0.j, 0.24720268+0.41635578j, -0.68079517-0.15118243j],
[0.43829436+0.j, -0.76618703+0.01873251j, -0.03063006+0.46857912j]])
"""
if check_finite:
Z, T = map(asarray_chkfinite, (Z, T))
else:
Z, T = map(asarray, (Z, T))
for ind, X in enumerate([Z, T]):
if X.ndim != 2 or X.shape[0] != X.shape[1]:
raise ValueError("Input '{}' must be square.".format('ZT'[ind]))
if T.shape[0] != Z.shape[0]:
raise ValueError("Input array shapes must match: Z: {} vs. T: {}"
"".format(Z.shape, T.shape))
N = T.shape[0]
t = _commonType(Z, T, array([3.0], 'F'))
Z, T = _castCopy(t, Z, T)
for m in range(N-1, 0, -1):
if abs(T[m, m-1]) > eps*(abs(T[m-1, m-1]) + abs(T[m, m])):
mu = eigvals(T[m-1:m+1, m-1:m+1]) - T[m, m]
r = norm([mu[0], T[m, m-1]])
c = mu[0] / r
s = T[m, m-1] / r
G = array([[c.conj(), s], [-s, c]], dtype=t)
T[m-1:m+1, m-1:] = G.dot(T[m-1:m+1, m-1:])
T[:m+1, m-1:m+1] = T[:m+1, m-1:m+1].dot(G.conj().T)
Z[:, m-1:m+1] = Z[:, m-1:m+1].dot(G.conj().T)
T[m, m-1] = 0.0
return T, Z | [
"def",
"rsf2csf",
"(",
"T",
",",
"Z",
",",
"check_finite",
"=",
"True",
")",
":",
"if",
"check_finite",
":",
"Z",
",",
"T",
"=",
"map",
"(",
"asarray_chkfinite",
",",
"(",
"Z",
",",
"T",
")",
")",
"else",
":",
"Z",
",",
"T",
"=",
"map",
"(",
"asarray",
",",
"(",
"Z",
",",
"T",
")",
")",
"for",
"ind",
",",
"X",
"in",
"enumerate",
"(",
"[",
"Z",
",",
"T",
"]",
")",
":",
"if",
"X",
".",
"ndim",
"!=",
"2",
"or",
"X",
".",
"shape",
"[",
"0",
"]",
"!=",
"X",
".",
"shape",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"\"Input '{}' must be square.\"",
".",
"format",
"(",
"'ZT'",
"[",
"ind",
"]",
")",
")",
"if",
"T",
".",
"shape",
"[",
"0",
"]",
"!=",
"Z",
".",
"shape",
"[",
"0",
"]",
":",
"raise",
"ValueError",
"(",
"\"Input array shapes must match: Z: {} vs. T: {}\"",
"\"\"",
".",
"format",
"(",
"Z",
".",
"shape",
",",
"T",
".",
"shape",
")",
")",
"N",
"=",
"T",
".",
"shape",
"[",
"0",
"]",
"t",
"=",
"_commonType",
"(",
"Z",
",",
"T",
",",
"array",
"(",
"[",
"3.0",
"]",
",",
"'F'",
")",
")",
"Z",
",",
"T",
"=",
"_castCopy",
"(",
"t",
",",
"Z",
",",
"T",
")",
"for",
"m",
"in",
"range",
"(",
"N",
"-",
"1",
",",
"0",
",",
"-",
"1",
")",
":",
"if",
"abs",
"(",
"T",
"[",
"m",
",",
"m",
"-",
"1",
"]",
")",
">",
"eps",
"*",
"(",
"abs",
"(",
"T",
"[",
"m",
"-",
"1",
",",
"m",
"-",
"1",
"]",
")",
"+",
"abs",
"(",
"T",
"[",
"m",
",",
"m",
"]",
")",
")",
":",
"mu",
"=",
"eigvals",
"(",
"T",
"[",
"m",
"-",
"1",
":",
"m",
"+",
"1",
",",
"m",
"-",
"1",
":",
"m",
"+",
"1",
"]",
")",
"-",
"T",
"[",
"m",
",",
"m",
"]",
"r",
"=",
"norm",
"(",
"[",
"mu",
"[",
"0",
"]",
",",
"T",
"[",
"m",
",",
"m",
"-",
"1",
"]",
"]",
")",
"c",
"=",
"mu",
"[",
"0",
"]",
"/",
"r",
"s",
"=",
"T",
"[",
"m",
",",
"m",
"-",
"1",
"]",
"/",
"r",
"G",
"=",
"array",
"(",
"[",
"[",
"c",
".",
"conj",
"(",
")",
",",
"s",
"]",
",",
"[",
"-",
"s",
",",
"c",
"]",
"]",
",",
"dtype",
"=",
"t",
")",
"T",
"[",
"m",
"-",
"1",
":",
"m",
"+",
"1",
",",
"m",
"-",
"1",
":",
"]",
"=",
"G",
".",
"dot",
"(",
"T",
"[",
"m",
"-",
"1",
":",
"m",
"+",
"1",
",",
"m",
"-",
"1",
":",
"]",
")",
"T",
"[",
":",
"m",
"+",
"1",
",",
"m",
"-",
"1",
":",
"m",
"+",
"1",
"]",
"=",
"T",
"[",
":",
"m",
"+",
"1",
",",
"m",
"-",
"1",
":",
"m",
"+",
"1",
"]",
".",
"dot",
"(",
"G",
".",
"conj",
"(",
")",
".",
"T",
")",
"Z",
"[",
":",
",",
"m",
"-",
"1",
":",
"m",
"+",
"1",
"]",
"=",
"Z",
"[",
":",
",",
"m",
"-",
"1",
":",
"m",
"+",
"1",
"]",
".",
"dot",
"(",
"G",
".",
"conj",
"(",
")",
".",
"T",
")",
"T",
"[",
"m",
",",
"m",
"-",
"1",
"]",
"=",
"0.0",
"return",
"T",
",",
"Z"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/linalg/decomp_schur.py#L213-L295 |
|
BSVino/DoubleAction | c550b168a3e919926c198c30240f506538b92e75 | mp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/descriptor.py | python | EnumDescriptor.CopyToProto | (self, proto) | Copies this to a descriptor_pb2.EnumDescriptorProto.
Args:
proto: An empty descriptor_pb2.EnumDescriptorProto. | Copies this to a descriptor_pb2.EnumDescriptorProto. | [
"Copies",
"this",
"to",
"a",
"descriptor_pb2",
".",
"EnumDescriptorProto",
"."
] | def CopyToProto(self, proto):
"""Copies this to a descriptor_pb2.EnumDescriptorProto.
Args:
proto: An empty descriptor_pb2.EnumDescriptorProto.
"""
# This function is overriden to give a better doc comment.
super(EnumDescriptor, self).CopyToProto(proto) | [
"def",
"CopyToProto",
"(",
"self",
",",
"proto",
")",
":",
"# This function is overriden to give a better doc comment.",
"super",
"(",
"EnumDescriptor",
",",
"self",
")",
".",
"CopyToProto",
"(",
"proto",
")"
] | https://github.com/BSVino/DoubleAction/blob/c550b168a3e919926c198c30240f506538b92e75/mp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/descriptor.py#L448-L455 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | wx/lib/agw/ribbon/bar.py | python | RibbonBar.RecalculateMinSize | (self) | Recalculates the :class:`RibbonBar` minimum size. | Recalculates the :class:`RibbonBar` minimum size. | [
"Recalculates",
"the",
":",
"class",
":",
"RibbonBar",
"minimum",
"size",
"."
] | def RecalculateMinSize(self):
""" Recalculates the :class:`RibbonBar` minimum size. """
min_size = wx.Size(-1, -1)
numtabs = len(self._pages)
if numtabs != 0:
min_size = wx.Size(*self._pages[0].page.GetMinSize())
for info in self._pages:
page_min = info.page.GetMinSize()
min_size.x = max(min_size.x, page_min.x)
min_size.y = max(min_size.y, page_min.y)
if min_size.y != -1:
# TODO: Decide on best course of action when min height is unspecified
# - should we specify it to the tab minimum, or leave it unspecified?
min_size.IncBy(0, self._tab_height)
self._minWidth = min_size.GetWidth()
self._minHeight = (self._arePanelsShown and [min_size.GetHeight()] or [self._tab_height])[0] | [
"def",
"RecalculateMinSize",
"(",
"self",
")",
":",
"min_size",
"=",
"wx",
".",
"Size",
"(",
"-",
"1",
",",
"-",
"1",
")",
"numtabs",
"=",
"len",
"(",
"self",
".",
"_pages",
")",
"if",
"numtabs",
"!=",
"0",
":",
"min_size",
"=",
"wx",
".",
"Size",
"(",
"*",
"self",
".",
"_pages",
"[",
"0",
"]",
".",
"page",
".",
"GetMinSize",
"(",
")",
")",
"for",
"info",
"in",
"self",
".",
"_pages",
":",
"page_min",
"=",
"info",
".",
"page",
".",
"GetMinSize",
"(",
")",
"min_size",
".",
"x",
"=",
"max",
"(",
"min_size",
".",
"x",
",",
"page_min",
".",
"x",
")",
"min_size",
".",
"y",
"=",
"max",
"(",
"min_size",
".",
"y",
",",
"page_min",
".",
"y",
")",
"if",
"min_size",
".",
"y",
"!=",
"-",
"1",
":",
"# TODO: Decide on best course of action when min height is unspecified",
"# - should we specify it to the tab minimum, or leave it unspecified?",
"min_size",
".",
"IncBy",
"(",
"0",
",",
"self",
".",
"_tab_height",
")",
"self",
".",
"_minWidth",
"=",
"min_size",
".",
"GetWidth",
"(",
")",
"self",
".",
"_minHeight",
"=",
"(",
"self",
".",
"_arePanelsShown",
"and",
"[",
"min_size",
".",
"GetHeight",
"(",
")",
"]",
"or",
"[",
"self",
".",
"_tab_height",
"]",
")",
"[",
"0",
"]"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ribbon/bar.py#L1186-L1206 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/arrays/period.py | python | PeriodArray._format_native_types | (self, na_rep="NaT", date_format=None, **kwargs) | return values | actually format my specific types | actually format my specific types | [
"actually",
"format",
"my",
"specific",
"types"
] | def _format_native_types(self, na_rep="NaT", date_format=None, **kwargs):
"""
actually format my specific types
"""
values = self.astype(object)
if date_format:
formatter = lambda dt: dt.strftime(date_format)
else:
formatter = lambda dt: str(dt)
if self._hasnans:
mask = self._isnan
values[mask] = na_rep
imask = ~mask
values[imask] = np.array([formatter(dt) for dt in values[imask]])
else:
values = np.array([formatter(dt) for dt in values])
return values | [
"def",
"_format_native_types",
"(",
"self",
",",
"na_rep",
"=",
"\"NaT\"",
",",
"date_format",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"values",
"=",
"self",
".",
"astype",
"(",
"object",
")",
"if",
"date_format",
":",
"formatter",
"=",
"lambda",
"dt",
":",
"dt",
".",
"strftime",
"(",
"date_format",
")",
"else",
":",
"formatter",
"=",
"lambda",
"dt",
":",
"str",
"(",
"dt",
")",
"if",
"self",
".",
"_hasnans",
":",
"mask",
"=",
"self",
".",
"_isnan",
"values",
"[",
"mask",
"]",
"=",
"na_rep",
"imask",
"=",
"~",
"mask",
"values",
"[",
"imask",
"]",
"=",
"np",
".",
"array",
"(",
"[",
"formatter",
"(",
"dt",
")",
"for",
"dt",
"in",
"values",
"[",
"imask",
"]",
"]",
")",
"else",
":",
"values",
"=",
"np",
".",
"array",
"(",
"[",
"formatter",
"(",
"dt",
")",
"for",
"dt",
"in",
"values",
"]",
")",
"return",
"values"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/arrays/period.py#L558-L576 |
|
waymo-research/waymo-open-dataset | 5de359f3429e1496761790770868296140161b66 | waymo_open_dataset/utils/frame_utils.py | python | convert_range_image_to_cartesian | (frame,
range_images,
range_image_top_pose,
ri_index=0,
keep_polar_features=False) | return cartesian_range_images | Convert range images from polar coordinates to Cartesian coordinates.
Args:
frame: open dataset frame
range_images: A dict of {laser_name, [range_image_first_return,
range_image_second_return]}.
range_image_top_pose: range image pixel pose for top lidar.
ri_index: 0 for the first return, 1 for the second return.
keep_polar_features: If true, keep the features from the polar range image
(i.e. range, intensity, and elongation) as the first features in the
output range image.
Returns:
dict of {laser_name, (H, W, D)} range images in Cartesian coordinates. D
will be 3 if keep_polar_features is False (x, y, z) and 6 if
keep_polar_features is True (range, intensity, elongation, x, y, z). | Convert range images from polar coordinates to Cartesian coordinates. | [
"Convert",
"range",
"images",
"from",
"polar",
"coordinates",
"to",
"Cartesian",
"coordinates",
"."
] | def convert_range_image_to_cartesian(frame,
range_images,
range_image_top_pose,
ri_index=0,
keep_polar_features=False):
"""Convert range images from polar coordinates to Cartesian coordinates.
Args:
frame: open dataset frame
range_images: A dict of {laser_name, [range_image_first_return,
range_image_second_return]}.
range_image_top_pose: range image pixel pose for top lidar.
ri_index: 0 for the first return, 1 for the second return.
keep_polar_features: If true, keep the features from the polar range image
(i.e. range, intensity, and elongation) as the first features in the
output range image.
Returns:
dict of {laser_name, (H, W, D)} range images in Cartesian coordinates. D
will be 3 if keep_polar_features is False (x, y, z) and 6 if
keep_polar_features is True (range, intensity, elongation, x, y, z).
"""
cartesian_range_images = {}
frame_pose = tf.convert_to_tensor(
value=np.reshape(np.array(frame.pose.transform), [4, 4]))
# [H, W, 6]
range_image_top_pose_tensor = tf.reshape(
tf.convert_to_tensor(value=range_image_top_pose.data),
range_image_top_pose.shape.dims)
# [H, W, 3, 3]
range_image_top_pose_tensor_rotation = transform_utils.get_rotation_matrix(
range_image_top_pose_tensor[..., 0], range_image_top_pose_tensor[..., 1],
range_image_top_pose_tensor[..., 2])
range_image_top_pose_tensor_translation = range_image_top_pose_tensor[..., 3:]
range_image_top_pose_tensor = transform_utils.get_transform(
range_image_top_pose_tensor_rotation,
range_image_top_pose_tensor_translation)
for c in frame.context.laser_calibrations:
range_image = range_images[c.name][ri_index]
if len(c.beam_inclinations) == 0: # pylint: disable=g-explicit-length-test
beam_inclinations = range_image_utils.compute_inclination(
tf.constant([c.beam_inclination_min, c.beam_inclination_max]),
height=range_image.shape.dims[0])
else:
beam_inclinations = tf.constant(c.beam_inclinations)
beam_inclinations = tf.reverse(beam_inclinations, axis=[-1])
extrinsic = np.reshape(np.array(c.extrinsic.transform), [4, 4])
range_image_tensor = tf.reshape(
tf.convert_to_tensor(value=range_image.data), range_image.shape.dims)
pixel_pose_local = None
frame_pose_local = None
if c.name == dataset_pb2.LaserName.TOP:
pixel_pose_local = range_image_top_pose_tensor
pixel_pose_local = tf.expand_dims(pixel_pose_local, axis=0)
frame_pose_local = tf.expand_dims(frame_pose, axis=0)
range_image_cartesian = range_image_utils.extract_point_cloud_from_range_image(
tf.expand_dims(range_image_tensor[..., 0], axis=0),
tf.expand_dims(extrinsic, axis=0),
tf.expand_dims(tf.convert_to_tensor(value=beam_inclinations), axis=0),
pixel_pose=pixel_pose_local,
frame_pose=frame_pose_local)
range_image_cartesian = tf.squeeze(range_image_cartesian, axis=0)
if keep_polar_features:
# If we want to keep the polar coordinate features of range, intensity,
# and elongation, concatenate them to be the initial dimensions of the
# returned Cartesian range image.
range_image_cartesian = tf.concat(
[range_image_tensor[..., 0:3], range_image_cartesian], axis=-1)
cartesian_range_images[c.name] = range_image_cartesian
return cartesian_range_images | [
"def",
"convert_range_image_to_cartesian",
"(",
"frame",
",",
"range_images",
",",
"range_image_top_pose",
",",
"ri_index",
"=",
"0",
",",
"keep_polar_features",
"=",
"False",
")",
":",
"cartesian_range_images",
"=",
"{",
"}",
"frame_pose",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"array",
"(",
"frame",
".",
"pose",
".",
"transform",
")",
",",
"[",
"4",
",",
"4",
"]",
")",
")",
"# [H, W, 6]",
"range_image_top_pose_tensor",
"=",
"tf",
".",
"reshape",
"(",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"range_image_top_pose",
".",
"data",
")",
",",
"range_image_top_pose",
".",
"shape",
".",
"dims",
")",
"# [H, W, 3, 3]",
"range_image_top_pose_tensor_rotation",
"=",
"transform_utils",
".",
"get_rotation_matrix",
"(",
"range_image_top_pose_tensor",
"[",
"...",
",",
"0",
"]",
",",
"range_image_top_pose_tensor",
"[",
"...",
",",
"1",
"]",
",",
"range_image_top_pose_tensor",
"[",
"...",
",",
"2",
"]",
")",
"range_image_top_pose_tensor_translation",
"=",
"range_image_top_pose_tensor",
"[",
"...",
",",
"3",
":",
"]",
"range_image_top_pose_tensor",
"=",
"transform_utils",
".",
"get_transform",
"(",
"range_image_top_pose_tensor_rotation",
",",
"range_image_top_pose_tensor_translation",
")",
"for",
"c",
"in",
"frame",
".",
"context",
".",
"laser_calibrations",
":",
"range_image",
"=",
"range_images",
"[",
"c",
".",
"name",
"]",
"[",
"ri_index",
"]",
"if",
"len",
"(",
"c",
".",
"beam_inclinations",
")",
"==",
"0",
":",
"# pylint: disable=g-explicit-length-test",
"beam_inclinations",
"=",
"range_image_utils",
".",
"compute_inclination",
"(",
"tf",
".",
"constant",
"(",
"[",
"c",
".",
"beam_inclination_min",
",",
"c",
".",
"beam_inclination_max",
"]",
")",
",",
"height",
"=",
"range_image",
".",
"shape",
".",
"dims",
"[",
"0",
"]",
")",
"else",
":",
"beam_inclinations",
"=",
"tf",
".",
"constant",
"(",
"c",
".",
"beam_inclinations",
")",
"beam_inclinations",
"=",
"tf",
".",
"reverse",
"(",
"beam_inclinations",
",",
"axis",
"=",
"[",
"-",
"1",
"]",
")",
"extrinsic",
"=",
"np",
".",
"reshape",
"(",
"np",
".",
"array",
"(",
"c",
".",
"extrinsic",
".",
"transform",
")",
",",
"[",
"4",
",",
"4",
"]",
")",
"range_image_tensor",
"=",
"tf",
".",
"reshape",
"(",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"range_image",
".",
"data",
")",
",",
"range_image",
".",
"shape",
".",
"dims",
")",
"pixel_pose_local",
"=",
"None",
"frame_pose_local",
"=",
"None",
"if",
"c",
".",
"name",
"==",
"dataset_pb2",
".",
"LaserName",
".",
"TOP",
":",
"pixel_pose_local",
"=",
"range_image_top_pose_tensor",
"pixel_pose_local",
"=",
"tf",
".",
"expand_dims",
"(",
"pixel_pose_local",
",",
"axis",
"=",
"0",
")",
"frame_pose_local",
"=",
"tf",
".",
"expand_dims",
"(",
"frame_pose",
",",
"axis",
"=",
"0",
")",
"range_image_cartesian",
"=",
"range_image_utils",
".",
"extract_point_cloud_from_range_image",
"(",
"tf",
".",
"expand_dims",
"(",
"range_image_tensor",
"[",
"...",
",",
"0",
"]",
",",
"axis",
"=",
"0",
")",
",",
"tf",
".",
"expand_dims",
"(",
"extrinsic",
",",
"axis",
"=",
"0",
")",
",",
"tf",
".",
"expand_dims",
"(",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"beam_inclinations",
")",
",",
"axis",
"=",
"0",
")",
",",
"pixel_pose",
"=",
"pixel_pose_local",
",",
"frame_pose",
"=",
"frame_pose_local",
")",
"range_image_cartesian",
"=",
"tf",
".",
"squeeze",
"(",
"range_image_cartesian",
",",
"axis",
"=",
"0",
")",
"if",
"keep_polar_features",
":",
"# If we want to keep the polar coordinate features of range, intensity,",
"# and elongation, concatenate them to be the initial dimensions of the",
"# returned Cartesian range image.",
"range_image_cartesian",
"=",
"tf",
".",
"concat",
"(",
"[",
"range_image_tensor",
"[",
"...",
",",
"0",
":",
"3",
"]",
",",
"range_image_cartesian",
"]",
",",
"axis",
"=",
"-",
"1",
")",
"cartesian_range_images",
"[",
"c",
".",
"name",
"]",
"=",
"range_image_cartesian",
"return",
"cartesian_range_images"
] | https://github.com/waymo-research/waymo-open-dataset/blob/5de359f3429e1496761790770868296140161b66/waymo_open_dataset/utils/frame_utils.py#L81-L158 |
|
eventql/eventql | 7ca0dbb2e683b525620ea30dc40540a22d5eb227 | deps/3rdparty/spidermonkey/mozjs/python/jsmin/jsmin/__init__.py | python | jsmin | (js, **kwargs) | return outs.getvalue() | returns a minified version of the javascript string | returns a minified version of the javascript string | [
"returns",
"a",
"minified",
"version",
"of",
"the",
"javascript",
"string"
] | def jsmin(js, **kwargs):
"""
returns a minified version of the javascript string
"""
if not is_3:
if cStringIO and not isinstance(js, unicode):
# strings can use cStringIO for a 3x performance
# improvement, but unicode (in python2) cannot
klass = cStringIO.StringIO
else:
klass = StringIO.StringIO
else:
klass = io.StringIO
ins = klass(js)
outs = klass()
JavascriptMinify(ins, outs, **kwargs).minify()
return outs.getvalue() | [
"def",
"jsmin",
"(",
"js",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"is_3",
":",
"if",
"cStringIO",
"and",
"not",
"isinstance",
"(",
"js",
",",
"unicode",
")",
":",
"# strings can use cStringIO for a 3x performance",
"# improvement, but unicode (in python2) cannot",
"klass",
"=",
"cStringIO",
".",
"StringIO",
"else",
":",
"klass",
"=",
"StringIO",
".",
"StringIO",
"else",
":",
"klass",
"=",
"io",
".",
"StringIO",
"ins",
"=",
"klass",
"(",
"js",
")",
"outs",
"=",
"klass",
"(",
")",
"JavascriptMinify",
"(",
"ins",
",",
"outs",
",",
"*",
"*",
"kwargs",
")",
".",
"minify",
"(",
")",
"return",
"outs",
".",
"getvalue",
"(",
")"
] | https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/jsmin/jsmin/__init__.py#L43-L59 |
|
xhzdeng/crpn | a5aef0f80dbe486103123f740c634fb01e6cc9a1 | lib/pycocotools/coco.py | python | COCO.download | ( self, tarDir = None, imgIds = [] ) | Download COCO images from mscoco.org server.
:param tarDir (str): COCO results directory name
imgIds (list): images to be downloaded
:return: | Download COCO images from mscoco.org server.
:param tarDir (str): COCO results directory name
imgIds (list): images to be downloaded
:return: | [
"Download",
"COCO",
"images",
"from",
"mscoco",
".",
"org",
"server",
".",
":",
"param",
"tarDir",
"(",
"str",
")",
":",
"COCO",
"results",
"directory",
"name",
"imgIds",
"(",
"list",
")",
":",
"images",
"to",
"be",
"downloaded",
":",
"return",
":"
] | def download( self, tarDir = None, imgIds = [] ):
'''
Download COCO images from mscoco.org server.
:param tarDir (str): COCO results directory name
imgIds (list): images to be downloaded
:return:
'''
if tarDir is None:
print 'Please specify target directory'
return -1
if len(imgIds) == 0:
imgs = self.imgs.values()
else:
imgs = self.loadImgs(imgIds)
N = len(imgs)
if not os.path.exists(tarDir):
os.makedirs(tarDir)
for i, img in enumerate(imgs):
tic = time.time()
fname = os.path.join(tarDir, img['file_name'])
if not os.path.exists(fname):
urllib.urlretrieve(img['coco_url'], fname)
print 'downloaded %d/%d images (t=%.1fs)'%(i, N, time.time()- tic) | [
"def",
"download",
"(",
"self",
",",
"tarDir",
"=",
"None",
",",
"imgIds",
"=",
"[",
"]",
")",
":",
"if",
"tarDir",
"is",
"None",
":",
"print",
"'Please specify target directory'",
"return",
"-",
"1",
"if",
"len",
"(",
"imgIds",
")",
"==",
"0",
":",
"imgs",
"=",
"self",
".",
"imgs",
".",
"values",
"(",
")",
"else",
":",
"imgs",
"=",
"self",
".",
"loadImgs",
"(",
"imgIds",
")",
"N",
"=",
"len",
"(",
"imgs",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"tarDir",
")",
":",
"os",
".",
"makedirs",
"(",
"tarDir",
")",
"for",
"i",
",",
"img",
"in",
"enumerate",
"(",
"imgs",
")",
":",
"tic",
"=",
"time",
".",
"time",
"(",
")",
"fname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tarDir",
",",
"img",
"[",
"'file_name'",
"]",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"fname",
")",
":",
"urllib",
".",
"urlretrieve",
"(",
"img",
"[",
"'coco_url'",
"]",
",",
"fname",
")",
"print",
"'downloaded %d/%d images (t=%.1fs)'",
"%",
"(",
"i",
",",
"N",
",",
"time",
".",
"time",
"(",
")",
"-",
"tic",
")"
] | https://github.com/xhzdeng/crpn/blob/a5aef0f80dbe486103123f740c634fb01e6cc9a1/lib/pycocotools/coco.py#L329-L351 |
||
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/learn/python/learn/dataframe/transform.py | python | TensorFlowTransform._check_output_tensors | (self, output_tensors) | Helper for `build(...)`; verifies the output of `_build_transform`.
Args:
output_tensors: value returned by a call to `_build_transform`.
Raises:
TypeError: `transform_output` is not a list.
ValueError: `transform_output` does not match `output_names`. | Helper for `build(...)`; verifies the output of `_build_transform`. | [
"Helper",
"for",
"build",
"(",
"...",
")",
";",
"verifies",
"the",
"output",
"of",
"_build_transform",
"."
] | def _check_output_tensors(self, output_tensors):
"""Helper for `build(...)`; verifies the output of `_build_transform`.
Args:
output_tensors: value returned by a call to `_build_transform`.
Raises:
TypeError: `transform_output` is not a list.
ValueError: `transform_output` does not match `output_names`.
"""
if not isinstance(output_tensors, self.return_type):
raise TypeError(
"Expected a NamedTuple of Tensors with elements %s; got %s." %
(self.output_names, type(output_tensors).__name__)) | [
"def",
"_check_output_tensors",
"(",
"self",
",",
"output_tensors",
")",
":",
"if",
"not",
"isinstance",
"(",
"output_tensors",
",",
"self",
".",
"return_type",
")",
":",
"raise",
"TypeError",
"(",
"\"Expected a NamedTuple of Tensors with elements %s; got %s.\"",
"%",
"(",
"self",
".",
"output_names",
",",
"type",
"(",
"output_tensors",
")",
".",
"__name__",
")",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/learn/python/learn/dataframe/transform.py#L251-L264 |
||
borglab/gtsam | a5bee157efce6a0563704bce6a5d188c29817f39 | gtsam/3rdparty/GeographicLib/python/geographiclib/polygonarea.py | python | PolygonArea.AddEdge | (self, azi, s) | Add the next edge to the polygon
:param azi: the azimuth at the current the point in degrees
:param s: the length of the edge in meters
This specifies the new vertex in terms of the edge from the current
vertex. | Add the next edge to the polygon | [
"Add",
"the",
"next",
"edge",
"to",
"the",
"polygon"
] | def AddEdge(self, azi, s):
"""Add the next edge to the polygon
:param azi: the azimuth at the current the point in degrees
:param s: the length of the edge in meters
This specifies the new vertex in terms of the edge from the current
vertex.
"""
if self.num != 0:
_, lat, lon, _, _, _, _, _, S12 = self.earth._GenDirect(
self.lat1, self.lon1, azi, False, s, self._mask)
self._perimetersum.Add(s)
if not self.polyline:
self._areasum.Add(S12)
self._crossings += PolygonArea._transitdirect(self.lon1, lon)
self.lat1 = lat
self.lon1 = lon
self.num += 1 | [
"def",
"AddEdge",
"(",
"self",
",",
"azi",
",",
"s",
")",
":",
"if",
"self",
".",
"num",
"!=",
"0",
":",
"_",
",",
"lat",
",",
"lon",
",",
"_",
",",
"_",
",",
"_",
",",
"_",
",",
"_",
",",
"S12",
"=",
"self",
".",
"earth",
".",
"_GenDirect",
"(",
"self",
".",
"lat1",
",",
"self",
".",
"lon1",
",",
"azi",
",",
"False",
",",
"s",
",",
"self",
".",
"_mask",
")",
"self",
".",
"_perimetersum",
".",
"Add",
"(",
"s",
")",
"if",
"not",
"self",
".",
"polyline",
":",
"self",
".",
"_areasum",
".",
"Add",
"(",
"S12",
")",
"self",
".",
"_crossings",
"+=",
"PolygonArea",
".",
"_transitdirect",
"(",
"self",
".",
"lon1",
",",
"lon",
")",
"self",
".",
"lat1",
"=",
"lat",
"self",
".",
"lon1",
"=",
"lon",
"self",
".",
"num",
"+=",
"1"
] | https://github.com/borglab/gtsam/blob/a5bee157efce6a0563704bce6a5d188c29817f39/gtsam/3rdparty/GeographicLib/python/geographiclib/polygonarea.py#L139-L159 |
||
apache/trafodion | 8455c839ad6b6d7b6e04edda5715053095b78046 | install/python-installer/scripts/common.py | python | ParseJson.load | (self) | load json file to a dict | load json file to a dict | [
"load",
"json",
"file",
"to",
"a",
"dict"
] | def load(self):
""" load json file to a dict """
if not os.path.exists(self.__js_file): err_m('Cannot find json file %s' % self.__js_file)
with open(self.__js_file, 'r') as f:
tmparray = f.readlines()
content = ''
for t in tmparray:
content += t
try:
return defaultdict(str, json.loads(content))
except ValueError:
err_m('No json format found in config file %s' % self.__js_file) | [
"def",
"load",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"__js_file",
")",
":",
"err_m",
"(",
"'Cannot find json file %s'",
"%",
"self",
".",
"__js_file",
")",
"with",
"open",
"(",
"self",
".",
"__js_file",
",",
"'r'",
")",
"as",
"f",
":",
"tmparray",
"=",
"f",
".",
"readlines",
"(",
")",
"content",
"=",
"''",
"for",
"t",
"in",
"tmparray",
":",
"content",
"+=",
"t",
"try",
":",
"return",
"defaultdict",
"(",
"str",
",",
"json",
".",
"loads",
"(",
"content",
")",
")",
"except",
"ValueError",
":",
"err_m",
"(",
"'No json format found in config file %s'",
"%",
"self",
".",
"__js_file",
")"
] | https://github.com/apache/trafodion/blob/8455c839ad6b6d7b6e04edda5715053095b78046/install/python-installer/scripts/common.py#L554-L566 |
||
Samsung/veles | 95ed733c2e49bc011ad98ccf2416ecec23fbf352 | libVeles/cpplint.py | python | CleanseComments | (line) | return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed. | Removes //-comments and single-line C-style /* */ comments. | [
"Removes",
"//",
"-",
"comments",
"and",
"single",
"-",
"line",
"C",
"-",
"style",
"/",
"*",
"*",
"/",
"comments",
"."
] | def CleanseComments(line):
"""Removes //-comments and single-line C-style /* */ comments.
Args:
line: A line of C++ source.
Returns:
The line with single-line comments removed.
"""
commentpos = line.find('//')
if commentpos != -1 and not IsCppString(line[:commentpos]):
line = line[:commentpos].rstrip()
# get rid of /* ... */
return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) | [
"def",
"CleanseComments",
"(",
"line",
")",
":",
"commentpos",
"=",
"line",
".",
"find",
"(",
"'//'",
")",
"if",
"commentpos",
"!=",
"-",
"1",
"and",
"not",
"IsCppString",
"(",
"line",
"[",
":",
"commentpos",
"]",
")",
":",
"line",
"=",
"line",
"[",
":",
"commentpos",
"]",
".",
"rstrip",
"(",
")",
"# get rid of /* ... */",
"return",
"_RE_PATTERN_CLEANSE_LINE_C_COMMENTS",
".",
"sub",
"(",
"''",
",",
"line",
")"
] | https://github.com/Samsung/veles/blob/95ed733c2e49bc011ad98ccf2416ecec23fbf352/libVeles/cpplint.py#L972-L985 |
|
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | configure.py | python | set_mpi_home | (environ_cp) | Set MPI_HOME. | Set MPI_HOME. | [
"Set",
"MPI_HOME",
"."
] | def set_mpi_home(environ_cp):
"""Set MPI_HOME."""
default_mpi_home = which('mpirun') or which('mpiexec') or ''
default_mpi_home = os.path.dirname(os.path.dirname(default_mpi_home))
ask_mpi_home = ('Please specify the MPI toolkit folder. [Default is %s]: '
) % default_mpi_home
while True:
mpi_home = get_from_env_or_user_or_default(environ_cp, 'MPI_HOME',
ask_mpi_home, default_mpi_home)
if os.path.exists(os.path.join(mpi_home, 'include')) and os.path.exists(
os.path.join(mpi_home, 'lib')):
break
print('Invalid path to the MPI Toolkit. %s or %s cannot be found' %
(os.path.join(mpi_home, 'include'),
os.path.exists(os.path.join(mpi_home, 'lib'))))
environ_cp['MPI_HOME'] = ''
# Set MPI_HOME
environ_cp['MPI_HOME'] = str(mpi_home) | [
"def",
"set_mpi_home",
"(",
"environ_cp",
")",
":",
"default_mpi_home",
"=",
"which",
"(",
"'mpirun'",
")",
"or",
"which",
"(",
"'mpiexec'",
")",
"or",
"''",
"default_mpi_home",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"default_mpi_home",
")",
")",
"ask_mpi_home",
"=",
"(",
"'Please specify the MPI toolkit folder. [Default is %s]: '",
")",
"%",
"default_mpi_home",
"while",
"True",
":",
"mpi_home",
"=",
"get_from_env_or_user_or_default",
"(",
"environ_cp",
",",
"'MPI_HOME'",
",",
"ask_mpi_home",
",",
"default_mpi_home",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"mpi_home",
",",
"'include'",
")",
")",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"mpi_home",
",",
"'lib'",
")",
")",
":",
"break",
"print",
"(",
"'Invalid path to the MPI Toolkit. %s or %s cannot be found'",
"%",
"(",
"os",
".",
"path",
".",
"join",
"(",
"mpi_home",
",",
"'include'",
")",
",",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"mpi_home",
",",
"'lib'",
")",
")",
")",
")",
"environ_cp",
"[",
"'MPI_HOME'",
"]",
"=",
"''",
"# Set MPI_HOME",
"environ_cp",
"[",
"'MPI_HOME'",
"]",
"=",
"str",
"(",
"mpi_home",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/configure.py#L885-L906 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/turtle.py | python | read_docstrings | (lang) | Read in docstrings from lang-specific docstring dictionary.
Transfer docstrings, translated to lang, from a dictionary-file
to the methods of classes Screen and Turtle and - in revised form -
to the corresponding functions. | Read in docstrings from lang-specific docstring dictionary. | [
"Read",
"in",
"docstrings",
"from",
"lang",
"-",
"specific",
"docstring",
"dictionary",
"."
] | def read_docstrings(lang):
"""Read in docstrings from lang-specific docstring dictionary.
Transfer docstrings, translated to lang, from a dictionary-file
to the methods of classes Screen and Turtle and - in revised form -
to the corresponding functions.
"""
modname = "turtle_docstringdict_%(language)s" % {'language':lang.lower()}
module = __import__(modname)
docsdict = module.docsdict
for key in docsdict:
try:
# eval(key).im_func.__doc__ = docsdict[key]
eval(key).__doc__ = docsdict[key]
except Exception:
print("Bad docstring-entry: %s" % key) | [
"def",
"read_docstrings",
"(",
"lang",
")",
":",
"modname",
"=",
"\"turtle_docstringdict_%(language)s\"",
"%",
"{",
"'language'",
":",
"lang",
".",
"lower",
"(",
")",
"}",
"module",
"=",
"__import__",
"(",
"modname",
")",
"docsdict",
"=",
"module",
".",
"docsdict",
"for",
"key",
"in",
"docsdict",
":",
"try",
":",
"# eval(key).im_func.__doc__ = docsdict[key]",
"eval",
"(",
"key",
")",
".",
"__doc__",
"=",
"docsdict",
"[",
"key",
"]",
"except",
"Exception",
":",
"print",
"(",
"\"Bad docstring-entry: %s\"",
"%",
"key",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/turtle.py#L3854-L3869 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/datetime.py | python | datetime.timetz | (self) | return time(self.hour, self.minute, self.second, self.microsecond,
self._tzinfo, fold=self.fold) | Return the time part, with same tzinfo. | Return the time part, with same tzinfo. | [
"Return",
"the",
"time",
"part",
"with",
"same",
"tzinfo",
"."
] | def timetz(self):
"Return the time part, with same tzinfo."
return time(self.hour, self.minute, self.second, self.microsecond,
self._tzinfo, fold=self.fold) | [
"def",
"timetz",
"(",
"self",
")",
":",
"return",
"time",
"(",
"self",
".",
"hour",
",",
"self",
".",
"minute",
",",
"self",
".",
"second",
",",
"self",
".",
"microsecond",
",",
"self",
".",
"_tzinfo",
",",
"fold",
"=",
"self",
".",
"fold",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/datetime.py#L1764-L1767 |
|
mhammond/pywin32 | 44afd86ba8485194df93234639243252deeb40d5 | com/win32com/client/gencache.py | python | GetModuleForTypelib | (typelibCLSID, lcid, major, minor) | return mod | Get a Python module for a type library ID
Given the CLSID of a typelibrary, return an imported Python module,
else None
Params
typelibCLSID -- IID of the type library.
major -- Integer major version.
minor -- Integer minor version
lcid -- Integer LCID for the library. | Get a Python module for a type library ID | [
"Get",
"a",
"Python",
"module",
"for",
"a",
"type",
"library",
"ID"
] | def GetModuleForTypelib(typelibCLSID, lcid, major, minor):
"""Get a Python module for a type library ID
Given the CLSID of a typelibrary, return an imported Python module,
else None
Params
typelibCLSID -- IID of the type library.
major -- Integer major version.
minor -- Integer minor version
lcid -- Integer LCID for the library.
"""
modName = GetGeneratedFileName(typelibCLSID, lcid, major, minor)
mod = _GetModule(modName)
# If the import worked, it doesn't mean we have actually added this
# module to our cache though - check that here.
if "_in_gencache_" not in mod.__dict__:
AddModuleToCache(typelibCLSID, lcid, major, minor)
assert "_in_gencache_" in mod.__dict__
return mod | [
"def",
"GetModuleForTypelib",
"(",
"typelibCLSID",
",",
"lcid",
",",
"major",
",",
"minor",
")",
":",
"modName",
"=",
"GetGeneratedFileName",
"(",
"typelibCLSID",
",",
"lcid",
",",
"major",
",",
"minor",
")",
"mod",
"=",
"_GetModule",
"(",
"modName",
")",
"# If the import worked, it doesn't mean we have actually added this",
"# module to our cache though - check that here.",
"if",
"\"_in_gencache_\"",
"not",
"in",
"mod",
".",
"__dict__",
":",
"AddModuleToCache",
"(",
"typelibCLSID",
",",
"lcid",
",",
"major",
",",
"minor",
")",
"assert",
"\"_in_gencache_\"",
"in",
"mod",
".",
"__dict__",
"return",
"mod"
] | https://github.com/mhammond/pywin32/blob/44afd86ba8485194df93234639243252deeb40d5/com/win32com/client/gencache.py#L267-L286 |
|
eclipse/sumo | 7132a9b8b6eea734bdec38479026b4d8c4336d03 | tools/simpla/_platoonmanager.py | python | PlatoonManager.getPlatoonLeaders | (self) | return [pltn.getVehicles()[0] for pltn in self._platoons.values() if pltn.size() > 1] | getPlatoonLeaders() -> list(PVehicle)
Returns all vehicles currently leading a platoon (of size > 1).
These can be in PlatoonMode.LEADER or in PlatoonMode.CATCHUP | getPlatoonLeaders() -> list(PVehicle) | [
"getPlatoonLeaders",
"()",
"-",
">",
"list",
"(",
"PVehicle",
")"
] | def getPlatoonLeaders(self):
'''getPlatoonLeaders() -> list(PVehicle)
Returns all vehicles currently leading a platoon (of size > 1).
These can be in PlatoonMode.LEADER or in PlatoonMode.CATCHUP
'''
return [pltn.getVehicles()[0] for pltn in self._platoons.values() if pltn.size() > 1] | [
"def",
"getPlatoonLeaders",
"(",
"self",
")",
":",
"return",
"[",
"pltn",
".",
"getVehicles",
"(",
")",
"[",
"0",
"]",
"for",
"pltn",
"in",
"self",
".",
"_platoons",
".",
"values",
"(",
")",
"if",
"pltn",
".",
"size",
"(",
")",
">",
"1",
"]"
] | https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/simpla/_platoonmanager.py#L180-L186 |
|
forkineye/ESPixelStick | 22926f1c0d1131f1369fc7cad405689a095ae3cb | dist/bin/pyserial/serial/rfc2217.py | python | PortManager.telnet_send_option | (self, action, option) | Send DO, DONT, WILL, WONT. | Send DO, DONT, WILL, WONT. | [
"Send",
"DO",
"DONT",
"WILL",
"WONT",
"."
] | def telnet_send_option(self, action, option):
"""Send DO, DONT, WILL, WONT."""
self.connection.write(IAC + action + option) | [
"def",
"telnet_send_option",
"(",
"self",
",",
"action",
",",
"option",
")",
":",
"self",
".",
"connection",
".",
"write",
"(",
"IAC",
"+",
"action",
"+",
"option",
")"
] | https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/pyserial/serial/rfc2217.py#L993-L995 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | configure.py | python | set_tf_cuda_compute_capabilities | (environ_cp) | Set TF_CUDA_COMPUTE_CAPABILITIES. | Set TF_CUDA_COMPUTE_CAPABILITIES. | [
"Set",
"TF_CUDA_COMPUTE_CAPABILITIES",
"."
] | def set_tf_cuda_compute_capabilities(environ_cp):
"""Set TF_CUDA_COMPUTE_CAPABILITIES."""
while True:
native_cuda_compute_capabilities = get_native_cuda_compute_capabilities(
environ_cp)
if not native_cuda_compute_capabilities:
default_cuda_compute_capabilities = _DEFAULT_CUDA_COMPUTE_CAPABILITIES
else:
default_cuda_compute_capabilities = native_cuda_compute_capabilities
ask_cuda_compute_capabilities = (
'Please specify a list of comma-separated '
'Cuda compute capabilities you want to '
'build with.\nYou can find the compute '
'capability of your device at: '
'https://developer.nvidia.com/cuda-gpus.\nPlease'
' note that each additional compute '
'capability significantly increases your '
'build time and binary size. [Default is: %s]' %
default_cuda_compute_capabilities)
tf_cuda_compute_capabilities = get_from_env_or_user_or_default(
environ_cp, 'TF_CUDA_COMPUTE_CAPABILITIES',
ask_cuda_compute_capabilities, default_cuda_compute_capabilities)
# Check whether all capabilities from the input is valid
all_valid = True
for compute_capability in tf_cuda_compute_capabilities.split(','):
m = re.match('[0-9]+.[0-9]+', compute_capability)
if not m:
print('Invalid compute capability: ' % compute_capability)
all_valid = False
else:
ver = int(m.group(0).split('.')[0])
if ver < 3:
print('Only compute capabilities 3.0 or higher are supported.')
all_valid = False
if all_valid:
break
# Reset and Retry
environ_cp['TF_CUDA_COMPUTE_CAPABILITIES'] = ''
# Set TF_CUDA_COMPUTE_CAPABILITIES
environ_cp['TF_CUDA_COMPUTE_CAPABILITIES'] = tf_cuda_compute_capabilities
write_action_env_to_bazelrc('TF_CUDA_COMPUTE_CAPABILITIES',
tf_cuda_compute_capabilities) | [
"def",
"set_tf_cuda_compute_capabilities",
"(",
"environ_cp",
")",
":",
"while",
"True",
":",
"native_cuda_compute_capabilities",
"=",
"get_native_cuda_compute_capabilities",
"(",
"environ_cp",
")",
"if",
"not",
"native_cuda_compute_capabilities",
":",
"default_cuda_compute_capabilities",
"=",
"_DEFAULT_CUDA_COMPUTE_CAPABILITIES",
"else",
":",
"default_cuda_compute_capabilities",
"=",
"native_cuda_compute_capabilities",
"ask_cuda_compute_capabilities",
"=",
"(",
"'Please specify a list of comma-separated '",
"'Cuda compute capabilities you want to '",
"'build with.\\nYou can find the compute '",
"'capability of your device at: '",
"'https://developer.nvidia.com/cuda-gpus.\\nPlease'",
"' note that each additional compute '",
"'capability significantly increases your '",
"'build time and binary size. [Default is: %s]'",
"%",
"default_cuda_compute_capabilities",
")",
"tf_cuda_compute_capabilities",
"=",
"get_from_env_or_user_or_default",
"(",
"environ_cp",
",",
"'TF_CUDA_COMPUTE_CAPABILITIES'",
",",
"ask_cuda_compute_capabilities",
",",
"default_cuda_compute_capabilities",
")",
"# Check whether all capabilities from the input is valid",
"all_valid",
"=",
"True",
"for",
"compute_capability",
"in",
"tf_cuda_compute_capabilities",
".",
"split",
"(",
"','",
")",
":",
"m",
"=",
"re",
".",
"match",
"(",
"'[0-9]+.[0-9]+'",
",",
"compute_capability",
")",
"if",
"not",
"m",
":",
"print",
"(",
"'Invalid compute capability: '",
"%",
"compute_capability",
")",
"all_valid",
"=",
"False",
"else",
":",
"ver",
"=",
"int",
"(",
"m",
".",
"group",
"(",
"0",
")",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
"if",
"ver",
"<",
"3",
":",
"print",
"(",
"'Only compute capabilities 3.0 or higher are supported.'",
")",
"all_valid",
"=",
"False",
"if",
"all_valid",
":",
"break",
"# Reset and Retry",
"environ_cp",
"[",
"'TF_CUDA_COMPUTE_CAPABILITIES'",
"]",
"=",
"''",
"# Set TF_CUDA_COMPUTE_CAPABILITIES",
"environ_cp",
"[",
"'TF_CUDA_COMPUTE_CAPABILITIES'",
"]",
"=",
"tf_cuda_compute_capabilities",
"write_action_env_to_bazelrc",
"(",
"'TF_CUDA_COMPUTE_CAPABILITIES'",
",",
"tf_cuda_compute_capabilities",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/configure.py#L734-L779 |
||
Kitware/VTK | 5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8 | Wrapping/Python/vtkmodules/numpy_interface/dataset_adapter.py | python | CompositeDataSetAttributes.keys | (self) | return self.ArrayNames | Returns the names of the arrays as a list. | Returns the names of the arrays as a list. | [
"Returns",
"the",
"names",
"of",
"the",
"arrays",
"as",
"a",
"list",
"."
] | def keys(self):
"""Returns the names of the arrays as a list."""
return self.ArrayNames | [
"def",
"keys",
"(",
"self",
")",
":",
"return",
"self",
".",
"ArrayNames"
] | https://github.com/Kitware/VTK/blob/5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8/Wrapping/Python/vtkmodules/numpy_interface/dataset_adapter.py#L783-L785 |
|
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/tpu/tensor_tracer.py | python | TensorTracer.reason | (op_idx, details) | return '%d %s'%(op_idx, details) | Returns reason why the Op at op_idx is traced or not. | Returns reason why the Op at op_idx is traced or not. | [
"Returns",
"reason",
"why",
"the",
"Op",
"at",
"op_idx",
"is",
"traced",
"or",
"not",
"."
] | def reason(op_idx, details):
"""Returns reason why the Op at op_idx is traced or not."""
return '%d %s'%(op_idx, details) | [
"def",
"reason",
"(",
"op_idx",
",",
"details",
")",
":",
"return",
"'%d %s'",
"%",
"(",
"op_idx",
",",
"details",
")"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/tpu/tensor_tracer.py#L546-L549 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Tools/Python/3.7.10/windows/Lib/tkinter/tix.py | python | Grid.anchor_clear | (self) | Removes the selection anchor. | Removes the selection anchor. | [
"Removes",
"the",
"selection",
"anchor",
"."
] | def anchor_clear(self):
"""Removes the selection anchor."""
self.tk.call(self, 'anchor', 'clear') | [
"def",
"anchor_clear",
"(",
"self",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"self",
",",
"'anchor'",
",",
"'clear'",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tkinter/tix.py#L1797-L1799 |
||
google/shaka-packager | e1b0c7c45431327fd3ce193514a5407d07b39b22 | packager/third_party/protobuf/python/google/protobuf/internal/well_known_types.py | python | _FieldMaskTree.__init__ | (self, field_mask=None) | Initializes the tree by FieldMask. | Initializes the tree by FieldMask. | [
"Initializes",
"the",
"tree",
"by",
"FieldMask",
"."
] | def __init__(self, field_mask=None):
"""Initializes the tree by FieldMask."""
self._root = {}
if field_mask:
self.MergeFromFieldMask(field_mask) | [
"def",
"__init__",
"(",
"self",
",",
"field_mask",
"=",
"None",
")",
":",
"self",
".",
"_root",
"=",
"{",
"}",
"if",
"field_mask",
":",
"self",
".",
"MergeFromFieldMask",
"(",
"field_mask",
")"
] | https://github.com/google/shaka-packager/blob/e1b0c7c45431327fd3ce193514a5407d07b39b22/packager/third_party/protobuf/python/google/protobuf/internal/well_known_types.py#L549-L553 |
||
apache/incubator-mxnet | f03fb23f1d103fec9541b5ae59ee06b1734a51d9 | python/mxnet/image/image.py | python | ImageIter.next_sample | (self) | Helper function for reading in next sample. | Helper function for reading in next sample. | [
"Helper",
"function",
"for",
"reading",
"in",
"next",
"sample",
"."
] | def next_sample(self):
"""Helper function for reading in next sample."""
if self._allow_read is False:
raise StopIteration
if self.seq is not None:
if self.cur < self.num_image:
idx = self.seq[self.cur]
else:
if self.last_batch_handle != 'discard':
self.cur = 0
raise StopIteration
self.cur += 1
if self.imgrec is not None:
s = self.imgrec.read_idx(idx)
header, img = recordio.unpack(s)
if self.imglist is None:
return header.label, img
else:
return self.imglist[idx][0], img
else:
label, fname = self.imglist[idx]
return label, self.read_image(fname)
else:
s = self.imgrec.read()
if s is None:
if self.last_batch_handle != 'discard':
self.imgrec.reset()
raise StopIteration
header, img = recordio.unpack(s)
return header.label, img | [
"def",
"next_sample",
"(",
"self",
")",
":",
"if",
"self",
".",
"_allow_read",
"is",
"False",
":",
"raise",
"StopIteration",
"if",
"self",
".",
"seq",
"is",
"not",
"None",
":",
"if",
"self",
".",
"cur",
"<",
"self",
".",
"num_image",
":",
"idx",
"=",
"self",
".",
"seq",
"[",
"self",
".",
"cur",
"]",
"else",
":",
"if",
"self",
".",
"last_batch_handle",
"!=",
"'discard'",
":",
"self",
".",
"cur",
"=",
"0",
"raise",
"StopIteration",
"self",
".",
"cur",
"+=",
"1",
"if",
"self",
".",
"imgrec",
"is",
"not",
"None",
":",
"s",
"=",
"self",
".",
"imgrec",
".",
"read_idx",
"(",
"idx",
")",
"header",
",",
"img",
"=",
"recordio",
".",
"unpack",
"(",
"s",
")",
"if",
"self",
".",
"imglist",
"is",
"None",
":",
"return",
"header",
".",
"label",
",",
"img",
"else",
":",
"return",
"self",
".",
"imglist",
"[",
"idx",
"]",
"[",
"0",
"]",
",",
"img",
"else",
":",
"label",
",",
"fname",
"=",
"self",
".",
"imglist",
"[",
"idx",
"]",
"return",
"label",
",",
"self",
".",
"read_image",
"(",
"fname",
")",
"else",
":",
"s",
"=",
"self",
".",
"imgrec",
".",
"read",
"(",
")",
"if",
"s",
"is",
"None",
":",
"if",
"self",
".",
"last_batch_handle",
"!=",
"'discard'",
":",
"self",
".",
"imgrec",
".",
"reset",
"(",
")",
"raise",
"StopIteration",
"header",
",",
"img",
"=",
"recordio",
".",
"unpack",
"(",
"s",
")",
"return",
"header",
".",
"label",
",",
"img"
] | https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/image/image.py#L1459-L1488 |
||
assimp/assimp | 97c7e084c2f7f8c9355ea42f73605890481bddc5 | port/PyAssimp/scripts/transformations.py | python | inverse_matrix | (matrix) | return numpy.linalg.inv(matrix) | Return inverse of square transformation matrix.
>>> M0 = random_rotation_matrix()
>>> M1 = inverse_matrix(M0.T)
>>> numpy.allclose(M1, numpy.linalg.inv(M0.T))
True
>>> for size in range(1, 7):
... M0 = numpy.random.rand(size, size)
... M1 = inverse_matrix(M0)
... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print size | Return inverse of square transformation matrix. | [
"Return",
"inverse",
"of",
"square",
"transformation",
"matrix",
"."
] | def inverse_matrix(matrix):
"""Return inverse of square transformation matrix.
>>> M0 = random_rotation_matrix()
>>> M1 = inverse_matrix(M0.T)
>>> numpy.allclose(M1, numpy.linalg.inv(M0.T))
True
>>> for size in range(1, 7):
... M0 = numpy.random.rand(size, size)
... M1 = inverse_matrix(M0)
... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print size
"""
return numpy.linalg.inv(matrix) | [
"def",
"inverse_matrix",
"(",
"matrix",
")",
":",
"return",
"numpy",
".",
"linalg",
".",
"inv",
"(",
"matrix",
")"
] | https://github.com/assimp/assimp/blob/97c7e084c2f7f8c9355ea42f73605890481bddc5/port/PyAssimp/scripts/transformations.py#L1633-L1646 |
|
lammps/lammps | b75c3065430a75b1b5543a10e10f46d9b4c91913 | python/lammps/pylammps.py | python | Atom.torque | (self) | return self.get("torque", self.index) | Return the total torque acting on the particle
:type: numpy.array (float, float, float) | Return the total torque acting on the particle | [
"Return",
"the",
"total",
"torque",
"acting",
"on",
"the",
"particle"
] | def torque(self):
"""
Return the total torque acting on the particle
:type: numpy.array (float, float, float)
"""
return self.get("torque", self.index) | [
"def",
"torque",
"(",
"self",
")",
":",
"return",
"self",
".",
"get",
"(",
"\"torque\"",
",",
"self",
".",
"index",
")"
] | https://github.com/lammps/lammps/blob/b75c3065430a75b1b5543a10e10f46d9b4c91913/python/lammps/pylammps.py#L259-L265 |
|
openthread/openthread | 9fcdbed9c526c70f1556d1ed84099c1535c7cd32 | third_party/mbedtls/repo/scripts/config.py | python | keep_in_baremetal | (name) | return True | Rules for symbols in the "baremetal" configuration. | Rules for symbols in the "baremetal" configuration. | [
"Rules",
"for",
"symbols",
"in",
"the",
"baremetal",
"configuration",
"."
] | def keep_in_baremetal(name):
"""Rules for symbols in the "baremetal" configuration."""
if name in EXCLUDE_FROM_BAREMETAL:
return False
return True | [
"def",
"keep_in_baremetal",
"(",
"name",
")",
":",
"if",
"name",
"in",
"EXCLUDE_FROM_BAREMETAL",
":",
"return",
"False",
"return",
"True"
] | https://github.com/openthread/openthread/blob/9fcdbed9c526c70f1556d1ed84099c1535c7cd32/third_party/mbedtls/repo/scripts/config.py#L258-L262 |
|
mantidproject/mantid | 03deeb89254ec4289edb8771e0188c2090a02f32 | scripts/Diffraction/isis_powder/abstract_inst.py | python | AbstractInst._generate_out_file_paths | (self, run_details) | return out_file_names | Generates the various output paths and file names to be used during saving or as workspace names
:param run_details: The run details associated with this run
:return: A dictionary containing the various output paths and generated output name | Generates the various output paths and file names to be used during saving or as workspace names
:param run_details: The run details associated with this run
:return: A dictionary containing the various output paths and generated output name | [
"Generates",
"the",
"various",
"output",
"paths",
"and",
"file",
"names",
"to",
"be",
"used",
"during",
"saving",
"or",
"as",
"workspace",
"names",
":",
"param",
"run_details",
":",
"The",
"run",
"details",
"associated",
"with",
"this",
"run",
":",
"return",
":",
"A",
"dictionary",
"containing",
"the",
"various",
"output",
"paths",
"and",
"generated",
"output",
"name"
] | def _generate_out_file_paths(self, run_details):
"""
Generates the various output paths and file names to be used during saving or as workspace names
:param run_details: The run details associated with this run
:return: A dictionary containing the various output paths and generated output name
"""
output_directory = os.path.join(self._output_dir, run_details.label, self._user_name)
output_directory = os.path.abspath(os.path.expanduser(output_directory))
dat_files_directory = output_directory
if self._inst_settings.dat_files_directory:
dat_files_directory = os.path.join(output_directory,
self._inst_settings.dat_files_directory)
file_type = "" if run_details.file_extension is None else run_details.file_extension.lstrip(
".")
out_file_names = {"output_folder": output_directory}
format_options = {
"inst": self._inst_prefix,
"instlow": self._inst_prefix.lower(),
"instshort": self._inst_prefix_short,
"runno": run_details.output_run_string,
"fileext": file_type,
"_fileext": "_" + file_type if file_type else "",
"suffix": run_details.output_suffix if run_details.output_suffix else ""
}
format_options = self._add_formatting_options(format_options)
output_formats = {
"nxs_filename": output_directory,
"gss_filename": output_directory,
"tof_xye_filename": dat_files_directory,
"dspacing_xye_filename": dat_files_directory
}
for key, output_dir in output_formats.items():
filepath = os.path.join(output_dir,
getattr(self._inst_settings, key).format(**format_options))
out_file_names[key] = filepath
out_file_names['output_name'] = os.path.splitext(
os.path.basename(out_file_names['nxs_filename']))[0]
return out_file_names | [
"def",
"_generate_out_file_paths",
"(",
"self",
",",
"run_details",
")",
":",
"output_directory",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_output_dir",
",",
"run_details",
".",
"label",
",",
"self",
".",
"_user_name",
")",
"output_directory",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"expanduser",
"(",
"output_directory",
")",
")",
"dat_files_directory",
"=",
"output_directory",
"if",
"self",
".",
"_inst_settings",
".",
"dat_files_directory",
":",
"dat_files_directory",
"=",
"os",
".",
"path",
".",
"join",
"(",
"output_directory",
",",
"self",
".",
"_inst_settings",
".",
"dat_files_directory",
")",
"file_type",
"=",
"\"\"",
"if",
"run_details",
".",
"file_extension",
"is",
"None",
"else",
"run_details",
".",
"file_extension",
".",
"lstrip",
"(",
"\".\"",
")",
"out_file_names",
"=",
"{",
"\"output_folder\"",
":",
"output_directory",
"}",
"format_options",
"=",
"{",
"\"inst\"",
":",
"self",
".",
"_inst_prefix",
",",
"\"instlow\"",
":",
"self",
".",
"_inst_prefix",
".",
"lower",
"(",
")",
",",
"\"instshort\"",
":",
"self",
".",
"_inst_prefix_short",
",",
"\"runno\"",
":",
"run_details",
".",
"output_run_string",
",",
"\"fileext\"",
":",
"file_type",
",",
"\"_fileext\"",
":",
"\"_\"",
"+",
"file_type",
"if",
"file_type",
"else",
"\"\"",
",",
"\"suffix\"",
":",
"run_details",
".",
"output_suffix",
"if",
"run_details",
".",
"output_suffix",
"else",
"\"\"",
"}",
"format_options",
"=",
"self",
".",
"_add_formatting_options",
"(",
"format_options",
")",
"output_formats",
"=",
"{",
"\"nxs_filename\"",
":",
"output_directory",
",",
"\"gss_filename\"",
":",
"output_directory",
",",
"\"tof_xye_filename\"",
":",
"dat_files_directory",
",",
"\"dspacing_xye_filename\"",
":",
"dat_files_directory",
"}",
"for",
"key",
",",
"output_dir",
"in",
"output_formats",
".",
"items",
"(",
")",
":",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"output_dir",
",",
"getattr",
"(",
"self",
".",
"_inst_settings",
",",
"key",
")",
".",
"format",
"(",
"*",
"*",
"format_options",
")",
")",
"out_file_names",
"[",
"key",
"]",
"=",
"filepath",
"out_file_names",
"[",
"'output_name'",
"]",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"out_file_names",
"[",
"'nxs_filename'",
"]",
")",
")",
"[",
"0",
"]",
"return",
"out_file_names"
] | https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/Diffraction/isis_powder/abstract_inst.py#L294-L334 |
|
acbull/Unbiased_LambdaMart | 7c39abe5caa18ca07df2d23c2db392916d92956c | evaluation/scripts/click_models.py | python | PositionBiasedModel.estimatePropensityWeightsForOneList | (self, click_list, use_non_clicked_data=False) | return propensity_weights | Estimate propensity for clicks in a list.
Parameters
----------
click_list : [type]
[description]
use_non_clicked_data : bool, optional
[description], by default False
Returns
-------
[type]
[description] | Estimate propensity for clicks in a list. | [
"Estimate",
"propensity",
"for",
"clicks",
"in",
"a",
"list",
"."
] | def estimatePropensityWeightsForOneList(self, click_list, use_non_clicked_data=False):
"""Estimate propensity for clicks in a list.
Parameters
----------
click_list : [type]
[description]
use_non_clicked_data : bool, optional
[description], by default False
Returns
-------
[type]
[description]
"""
propensity_weights = []
for r in range(len(click_list)):
pw = 0.0
if use_non_clicked_data | click_list[r] > 0:
pw = 1.0/self.getExamProb(r) * self.getExamProb(0)
propensity_weights.append(pw)
return propensity_weights | [
"def",
"estimatePropensityWeightsForOneList",
"(",
"self",
",",
"click_list",
",",
"use_non_clicked_data",
"=",
"False",
")",
":",
"propensity_weights",
"=",
"[",
"]",
"for",
"r",
"in",
"range",
"(",
"len",
"(",
"click_list",
")",
")",
":",
"pw",
"=",
"0.0",
"if",
"use_non_clicked_data",
"|",
"click_list",
"[",
"r",
"]",
">",
"0",
":",
"pw",
"=",
"1.0",
"/",
"self",
".",
"getExamProb",
"(",
"r",
")",
"*",
"self",
".",
"getExamProb",
"(",
"0",
")",
"propensity_weights",
".",
"append",
"(",
"pw",
")",
"return",
"propensity_weights"
] | https://github.com/acbull/Unbiased_LambdaMart/blob/7c39abe5caa18ca07df2d23c2db392916d92956c/evaluation/scripts/click_models.py#L188-L210 |
|
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/lib/histograms.py | python | _ptp | (x) | return _unsigned_subtract(x.max(), x.min()) | Peak-to-peak value of x.
This implementation avoids the problem of signed integer arrays having a
peak-to-peak value that cannot be represented with the array's data type.
This function returns an unsigned value for signed integer arrays. | Peak-to-peak value of x. | [
"Peak",
"-",
"to",
"-",
"peak",
"value",
"of",
"x",
"."
] | def _ptp(x):
"""Peak-to-peak value of x.
This implementation avoids the problem of signed integer arrays having a
peak-to-peak value that cannot be represented with the array's data type.
This function returns an unsigned value for signed integer arrays.
"""
return _unsigned_subtract(x.max(), x.min()) | [
"def",
"_ptp",
"(",
"x",
")",
":",
"return",
"_unsigned_subtract",
"(",
"x",
".",
"max",
"(",
")",
",",
"x",
".",
"min",
"(",
")",
")"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/lib/histograms.py#L25-L32 |
|
ApolloAuto/apollo | 463fb82f9e979d02dcb25044e60931293ab2dba0 | modules/tools/record_analyzer/common/distribution_analyzer.py | python | DistributionAnalyzer.print_distribution_results | (self, data) | distribution analyzer | distribution analyzer | [
"distribution",
"analyzer"
] | def print_distribution_results(self, data):
"""distribution analyzer"""
if len(data) == 0:
print(PrintColors.FAIL + "No Data Generated!" + PrintColors.ENDC)
return
total = 0
for k, v in data.items():
total += v
for k, v in data.items():
percentage = "{0:.2f}".format((float(v) / total) * 100)
print(PrintColors.OKBLUE + k + " = " + str(v) +
"(" + percentage + "%)" + PrintColors.ENDC) | [
"def",
"print_distribution_results",
"(",
"self",
",",
"data",
")",
":",
"if",
"len",
"(",
"data",
")",
"==",
"0",
":",
"print",
"(",
"PrintColors",
".",
"FAIL",
"+",
"\"No Data Generated!\"",
"+",
"PrintColors",
".",
"ENDC",
")",
"return",
"total",
"=",
"0",
"for",
"k",
",",
"v",
"in",
"data",
".",
"items",
"(",
")",
":",
"total",
"+=",
"v",
"for",
"k",
",",
"v",
"in",
"data",
".",
"items",
"(",
")",
":",
"percentage",
"=",
"\"{0:.2f}\"",
".",
"format",
"(",
"(",
"float",
"(",
"v",
")",
"/",
"total",
")",
"*",
"100",
")",
"print",
"(",
"PrintColors",
".",
"OKBLUE",
"+",
"k",
"+",
"\" = \"",
"+",
"str",
"(",
"v",
")",
"+",
"\"(\"",
"+",
"percentage",
"+",
"\"%)\"",
"+",
"PrintColors",
".",
"ENDC",
")"
] | https://github.com/ApolloAuto/apollo/blob/463fb82f9e979d02dcb25044e60931293ab2dba0/modules/tools/record_analyzer/common/distribution_analyzer.py#L25-L38 |
||
mongodb/mongo | d8ff665343ad29cf286ee2cf4a1960d29371937b | buildscripts/idl/idl/struct_types.py | python | StructTypeInfoBase.get_op_msg_request_deserializer_method | (self) | Get the protected OpMsg deserializer method for a struct. | Get the protected OpMsg deserializer method for a struct. | [
"Get",
"the",
"protected",
"OpMsg",
"deserializer",
"method",
"for",
"a",
"struct",
"."
] | def get_op_msg_request_deserializer_method(self):
# type: () -> Optional[MethodInfo]
"""Get the protected OpMsg deserializer method for a struct."""
# pylint: disable=invalid-name
pass | [
"def",
"get_op_msg_request_deserializer_method",
"(",
"self",
")",
":",
"# type: () -> Optional[MethodInfo]",
"# pylint: disable=invalid-name",
"pass"
] | https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/idl/idl/struct_types.py#L207-L211 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/msw/richtext.py | python | RichTextBuffer_FindHandlerByFilename | (*args, **kwargs) | return _richtext.RichTextBuffer_FindHandlerByFilename(*args, **kwargs) | RichTextBuffer_FindHandlerByFilename(String filename, int imageType) -> RichTextFileHandler | RichTextBuffer_FindHandlerByFilename(String filename, int imageType) -> RichTextFileHandler | [
"RichTextBuffer_FindHandlerByFilename",
"(",
"String",
"filename",
"int",
"imageType",
")",
"-",
">",
"RichTextFileHandler"
] | def RichTextBuffer_FindHandlerByFilename(*args, **kwargs):
"""RichTextBuffer_FindHandlerByFilename(String filename, int imageType) -> RichTextFileHandler"""
return _richtext.RichTextBuffer_FindHandlerByFilename(*args, **kwargs) | [
"def",
"RichTextBuffer_FindHandlerByFilename",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextBuffer_FindHandlerByFilename",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/richtext.py#L2679-L2681 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/script_ops.py | python | FuncRegistry._next_unique_token | (self) | return "pyfunc_%d" % uid | Returns a unique token. | Returns a unique token. | [
"Returns",
"a",
"unique",
"token",
"."
] | def _next_unique_token(self):
"""Returns a unique token."""
with self._lock:
uid = self._unique_id
self._unique_id += 1
return "pyfunc_%d" % uid | [
"def",
"_next_unique_token",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"uid",
"=",
"self",
".",
"_unique_id",
"self",
".",
"_unique_id",
"+=",
"1",
"return",
"\"pyfunc_%d\"",
"%",
"uid"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/script_ops.py#L100-L105 |
|
nest/nest-simulator | f2623eb78518cdbd55e77e0ed486bf1111bcb62f | pynest/nest/lib/hl_api_models.py | python | GetDefaults | (model, keys=None, output='') | return result | Return default parameters of the given model, specified by a string.
Parameters
----------
model : str
Name of the model
keys : str or list, optional
String or a list of strings naming model properties. `GetDefaults` then
returns a single value or a list of values belonging to the keys
given.
output : str, optional
Whether the returned data should be in a format
(``output='json'``). Default is ''.
Returns
-------
dict
A dictionary of default parameters.
type
If keys is a string, the corrsponding default parameter is returned.
list
If keys is a list of strings, a list of corrsponding default parameters
is returned.
str :
If `output` is ``json``, returns parameters in JSON format.
Raises
------
TypeError | Return default parameters of the given model, specified by a string. | [
"Return",
"default",
"parameters",
"of",
"the",
"given",
"model",
"specified",
"by",
"a",
"string",
"."
] | def GetDefaults(model, keys=None, output=''):
"""Return default parameters of the given model, specified by a string.
Parameters
----------
model : str
Name of the model
keys : str or list, optional
String or a list of strings naming model properties. `GetDefaults` then
returns a single value or a list of values belonging to the keys
given.
output : str, optional
Whether the returned data should be in a format
(``output='json'``). Default is ''.
Returns
-------
dict
A dictionary of default parameters.
type
If keys is a string, the corrsponding default parameter is returned.
list
If keys is a list of strings, a list of corrsponding default parameters
is returned.
str :
If `output` is ``json``, returns parameters in JSON format.
Raises
------
TypeError
"""
if keys is None:
cmd = "/{0} GetDefaults".format(model)
elif is_literal(keys):
cmd = '/{0} GetDefaults /{1} get'.format(model, keys)
elif is_iterable(keys):
keys_str = " ".join("/{0}".format(x) for x in keys)
cmd = "/{0} GetDefaults [ {1} ] {{ 1 index exch get }}"\
.format(model, keys_str) + " Map exch pop"
else:
raise TypeError("keys should be either a string or an iterable")
sr(cmd)
result = spp()
if output == 'json':
result = to_json(result)
return result | [
"def",
"GetDefaults",
"(",
"model",
",",
"keys",
"=",
"None",
",",
"output",
"=",
"''",
")",
":",
"if",
"keys",
"is",
"None",
":",
"cmd",
"=",
"\"/{0} GetDefaults\"",
".",
"format",
"(",
"model",
")",
"elif",
"is_literal",
"(",
"keys",
")",
":",
"cmd",
"=",
"'/{0} GetDefaults /{1} get'",
".",
"format",
"(",
"model",
",",
"keys",
")",
"elif",
"is_iterable",
"(",
"keys",
")",
":",
"keys_str",
"=",
"\" \"",
".",
"join",
"(",
"\"/{0}\"",
".",
"format",
"(",
"x",
")",
"for",
"x",
"in",
"keys",
")",
"cmd",
"=",
"\"/{0} GetDefaults [ {1} ] {{ 1 index exch get }}\"",
".",
"format",
"(",
"model",
",",
"keys_str",
")",
"+",
"\" Map exch pop\"",
"else",
":",
"raise",
"TypeError",
"(",
"\"keys should be either a string or an iterable\"",
")",
"sr",
"(",
"cmd",
")",
"result",
"=",
"spp",
"(",
")",
"if",
"output",
"==",
"'json'",
":",
"result",
"=",
"to_json",
"(",
"result",
")",
"return",
"result"
] | https://github.com/nest/nest-simulator/blob/f2623eb78518cdbd55e77e0ed486bf1111bcb62f/pynest/nest/lib/hl_api_models.py#L138-L188 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/python/ops/math_grad.py | python | _ComplexAbsGrad | (op, grad) | return (math_ops.complex(grad, array_ops.zeros_like(grad)) *
math_ops.sign(op.inputs[0])) | Returns the gradient of ComplexAbs. | Returns the gradient of ComplexAbs. | [
"Returns",
"the",
"gradient",
"of",
"ComplexAbs",
"."
] | def _ComplexAbsGrad(op, grad):
"""Returns the gradient of ComplexAbs."""
# TODO(b/27786104): The cast to complex could be removed once arithmetic
# supports mixtures of complex64 and real values.
return (math_ops.complex(grad, array_ops.zeros_like(grad)) *
math_ops.sign(op.inputs[0])) | [
"def",
"_ComplexAbsGrad",
"(",
"op",
",",
"grad",
")",
":",
"# TODO(b/27786104): The cast to complex could be removed once arithmetic",
"# supports mixtures of complex64 and real values.",
"return",
"(",
"math_ops",
".",
"complex",
"(",
"grad",
",",
"array_ops",
".",
"zeros_like",
"(",
"grad",
")",
")",
"*",
"math_ops",
".",
"sign",
"(",
"op",
".",
"inputs",
"[",
"0",
"]",
")",
")"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/math_grad.py#L1025-L1030 |
|
timi-liuliang/echo | 40a5a24d430eee4118314459ab7e03afcb3b8719 | thirdparty/protobuf/python/google/protobuf/internal/python_message.py | python | _Listener.__init__ | (self, parent_message) | Args:
parent_message: The message whose _Modified() method we should call when
we receive Modified() messages. | Args:
parent_message: The message whose _Modified() method we should call when
we receive Modified() messages. | [
"Args",
":",
"parent_message",
":",
"The",
"message",
"whose",
"_Modified",
"()",
"method",
"we",
"should",
"call",
"when",
"we",
"receive",
"Modified",
"()",
"messages",
"."
] | def __init__(self, parent_message):
"""Args:
parent_message: The message whose _Modified() method we should call when
we receive Modified() messages.
"""
# This listener establishes a back reference from a child (contained) object
# to its parent (containing) object. We make this a weak reference to avoid
# creating cyclic garbage when the client finishes with the 'parent' object
# in the tree.
if isinstance(parent_message, weakref.ProxyType):
self._parent_message_weakref = parent_message
else:
self._parent_message_weakref = weakref.proxy(parent_message)
# As an optimization, we also indicate directly on the listener whether
# or not the parent message is dirty. This way we can avoid traversing
# up the tree in the common case.
self.dirty = False | [
"def",
"__init__",
"(",
"self",
",",
"parent_message",
")",
":",
"# This listener establishes a back reference from a child (contained) object",
"# to its parent (containing) object. We make this a weak reference to avoid",
"# creating cyclic garbage when the client finishes with the 'parent' object",
"# in the tree.",
"if",
"isinstance",
"(",
"parent_message",
",",
"weakref",
".",
"ProxyType",
")",
":",
"self",
".",
"_parent_message_weakref",
"=",
"parent_message",
"else",
":",
"self",
".",
"_parent_message_weakref",
"=",
"weakref",
".",
"proxy",
"(",
"parent_message",
")",
"# As an optimization, we also indicate directly on the listener whether",
"# or not the parent message is dirty. This way we can avoid traversing",
"# up the tree in the common case.",
"self",
".",
"dirty",
"=",
"False"
] | https://github.com/timi-liuliang/echo/blob/40a5a24d430eee4118314459ab7e03afcb3b8719/thirdparty/protobuf/python/google/protobuf/internal/python_message.py#L1092-L1109 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/gtk/stc.py | python | StyledTextCtrl.StyleSetWeight | (*args, **kwargs) | return _stc.StyledTextCtrl_StyleSetWeight(*args, **kwargs) | StyleSetWeight(self, int style, int weight) | StyleSetWeight(self, int style, int weight) | [
"StyleSetWeight",
"(",
"self",
"int",
"style",
"int",
"weight",
")"
] | def StyleSetWeight(*args, **kwargs):
"""StyleSetWeight(self, int style, int weight)"""
return _stc.StyledTextCtrl_StyleSetWeight(*args, **kwargs) | [
"def",
"StyleSetWeight",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_StyleSetWeight",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/stc.py#L2707-L2709 |
|
FreeCAD/FreeCAD | ba42231b9c6889b89e064d6d563448ed81e376ec | src/Mod/Draft/draftguitools/gui_scale.py | python | Scale.pickRef | (self) | Pick a point of reference. | Pick a point of reference. | [
"Pick",
"a",
"point",
"of",
"reference",
"."
] | def pickRef(self):
"""Pick a point of reference."""
self.pickmode = True
if self.node:
self.node = self.node[:1] # remove previous picks
_msg(translate("draft", "Pick reference distance from base point"))
self.call = self.view.addEventCallback("SoEvent", self.action) | [
"def",
"pickRef",
"(",
"self",
")",
":",
"self",
".",
"pickmode",
"=",
"True",
"if",
"self",
".",
"node",
":",
"self",
".",
"node",
"=",
"self",
".",
"node",
"[",
":",
"1",
"]",
"# remove previous picks",
"_msg",
"(",
"translate",
"(",
"\"draft\"",
",",
"\"Pick reference distance from base point\"",
")",
")",
"self",
".",
"call",
"=",
"self",
".",
"view",
".",
"addEventCallback",
"(",
"\"SoEvent\"",
",",
"self",
".",
"action",
")"
] | https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftguitools/gui_scale.py#L130-L136 |
||
benoitsteiner/tensorflow-opencl | cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5 | tensorflow/python/eager/tape.py | python | Tape.export | (self) | return pywrap_tensorflow.TFE_Py_TapeExport(self._tape) | Exports the internal state of this tape.
Returns:
tensor_tape: a map from tensor_id(tensor) to <identifier for op>
responsible for generating that tensor.
op_tape: a map from <identifier for op> to TapeEntry for that op. | Exports the internal state of this tape. | [
"Exports",
"the",
"internal",
"state",
"of",
"this",
"tape",
"."
] | def export(self):
"""Exports the internal state of this tape.
Returns:
tensor_tape: a map from tensor_id(tensor) to <identifier for op>
responsible for generating that tensor.
op_tape: a map from <identifier for op> to TapeEntry for that op.
"""
return pywrap_tensorflow.TFE_Py_TapeExport(self._tape) | [
"def",
"export",
"(",
"self",
")",
":",
"return",
"pywrap_tensorflow",
".",
"TFE_Py_TapeExport",
"(",
"self",
".",
"_tape",
")"
] | https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/eager/tape.py#L102-L110 |
|
baidu-research/tensorflow-allreduce | 66d5b855e90b0949e9fa5cca5599fd729a70e874 | tensorflow/contrib/rnn/python/ops/rnn_cell.py | python | GridLSTMCell.__init__ | (self, num_units, use_peepholes=False,
share_time_frequency_weights=False,
cell_clip=None, initializer=None,
num_unit_shards=1, forget_bias=1.0,
feature_size=None, frequency_skip=None,
num_frequency_blocks=None,
start_freqindex_list=None,
end_freqindex_list=None,
couple_input_forget_gates=False,
state_is_tuple=True,
reuse=None) | Initialize the parameters for an LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell
use_peepholes: (optional) bool, default False. Set True to enable
diagonal/peephole connections.
share_time_frequency_weights: (optional) bool, default False. Set True to
enable shared cell weights between time and frequency LSTMs.
cell_clip: (optional) A float value, default None, if provided the cell
state is clipped by this value prior to the cell output activation.
initializer: (optional) The initializer to use for the weight and
projection matrices, default None.
num_unit_shards: (optional) int, default 1, How to split the weight
matrix. If > 1,the weight matrix is stored across num_unit_shards.
forget_bias: (optional) float, default 1.0, The initial bias of the
forget gates, used to reduce the scale of forgetting at the beginning
of the training.
feature_size: (optional) int, default None, The size of the input feature
the LSTM spans over.
frequency_skip: (optional) int, default None, The amount the LSTM filter
is shifted by in frequency.
num_frequency_blocks: [required] A list of frequency blocks needed to
cover the whole input feature splitting defined by start_freqindex_list
and end_freqindex_list.
start_freqindex_list: [optional], list of ints, default None, The
starting frequency index for each frequency block.
end_freqindex_list: [optional], list of ints, default None. The ending
frequency index for each frequency block.
couple_input_forget_gates: (optional) bool, default False, Whether to
couple the input and forget gates, i.e. f_gate = 1.0 - i_gate, to reduce
model parameters and computation cost.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. By default (False), they are concatenated
along the column axis. This default behavior will soon be deprecated.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
ValueError: if the num_frequency_blocks list is not specified | Initialize the parameters for an LSTM cell. | [
"Initialize",
"the",
"parameters",
"for",
"an",
"LSTM",
"cell",
"."
] | def __init__(self, num_units, use_peepholes=False,
share_time_frequency_weights=False,
cell_clip=None, initializer=None,
num_unit_shards=1, forget_bias=1.0,
feature_size=None, frequency_skip=None,
num_frequency_blocks=None,
start_freqindex_list=None,
end_freqindex_list=None,
couple_input_forget_gates=False,
state_is_tuple=True,
reuse=None):
"""Initialize the parameters for an LSTM cell.
Args:
num_units: int, The number of units in the LSTM cell
use_peepholes: (optional) bool, default False. Set True to enable
diagonal/peephole connections.
share_time_frequency_weights: (optional) bool, default False. Set True to
enable shared cell weights between time and frequency LSTMs.
cell_clip: (optional) A float value, default None, if provided the cell
state is clipped by this value prior to the cell output activation.
initializer: (optional) The initializer to use for the weight and
projection matrices, default None.
num_unit_shards: (optional) int, default 1, How to split the weight
matrix. If > 1,the weight matrix is stored across num_unit_shards.
forget_bias: (optional) float, default 1.0, The initial bias of the
forget gates, used to reduce the scale of forgetting at the beginning
of the training.
feature_size: (optional) int, default None, The size of the input feature
the LSTM spans over.
frequency_skip: (optional) int, default None, The amount the LSTM filter
is shifted by in frequency.
num_frequency_blocks: [required] A list of frequency blocks needed to
cover the whole input feature splitting defined by start_freqindex_list
and end_freqindex_list.
start_freqindex_list: [optional], list of ints, default None, The
starting frequency index for each frequency block.
end_freqindex_list: [optional], list of ints, default None. The ending
frequency index for each frequency block.
couple_input_forget_gates: (optional) bool, default False, Whether to
couple the input and forget gates, i.e. f_gate = 1.0 - i_gate, to reduce
model parameters and computation cost.
state_is_tuple: If True, accepted and returned states are 2-tuples of
the `c_state` and `m_state`. By default (False), they are concatenated
along the column axis. This default behavior will soon be deprecated.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already has
the given variables, an error is raised.
Raises:
ValueError: if the num_frequency_blocks list is not specified
"""
super(GridLSTMCell, self).__init__(_reuse=reuse)
if not state_is_tuple:
logging.warn("%s: Using a concatenated state is slower and will soon be "
"deprecated. Use state_is_tuple=True.", self)
self._num_units = num_units
self._use_peepholes = use_peepholes
self._share_time_frequency_weights = share_time_frequency_weights
self._couple_input_forget_gates = couple_input_forget_gates
self._state_is_tuple = state_is_tuple
self._cell_clip = cell_clip
self._initializer = initializer
self._num_unit_shards = num_unit_shards
self._forget_bias = forget_bias
self._feature_size = feature_size
self._frequency_skip = frequency_skip
self._start_freqindex_list = start_freqindex_list
self._end_freqindex_list = end_freqindex_list
self._num_frequency_blocks = num_frequency_blocks
self._total_blocks = 0
self._reuse = reuse
if self._num_frequency_blocks is None:
raise ValueError("Must specify num_frequency_blocks")
for block_index in range(len(self._num_frequency_blocks)):
self._total_blocks += int(self._num_frequency_blocks[block_index])
if state_is_tuple:
state_names = ""
for block_index in range(len(self._num_frequency_blocks)):
for freq_index in range(self._num_frequency_blocks[block_index]):
name_prefix = "state_f%02d_b%02d" % (freq_index, block_index)
state_names += ("%s_c, %s_m," % (name_prefix, name_prefix))
self._state_tuple_type = collections.namedtuple(
"GridLSTMStateTuple", state_names.strip(","))
self._state_size = self._state_tuple_type(
*([num_units, num_units] * self._total_blocks))
else:
self._state_tuple_type = None
self._state_size = num_units * self._total_blocks * 2
self._output_size = num_units * self._total_blocks * 2 | [
"def",
"__init__",
"(",
"self",
",",
"num_units",
",",
"use_peepholes",
"=",
"False",
",",
"share_time_frequency_weights",
"=",
"False",
",",
"cell_clip",
"=",
"None",
",",
"initializer",
"=",
"None",
",",
"num_unit_shards",
"=",
"1",
",",
"forget_bias",
"=",
"1.0",
",",
"feature_size",
"=",
"None",
",",
"frequency_skip",
"=",
"None",
",",
"num_frequency_blocks",
"=",
"None",
",",
"start_freqindex_list",
"=",
"None",
",",
"end_freqindex_list",
"=",
"None",
",",
"couple_input_forget_gates",
"=",
"False",
",",
"state_is_tuple",
"=",
"True",
",",
"reuse",
"=",
"None",
")",
":",
"super",
"(",
"GridLSTMCell",
",",
"self",
")",
".",
"__init__",
"(",
"_reuse",
"=",
"reuse",
")",
"if",
"not",
"state_is_tuple",
":",
"logging",
".",
"warn",
"(",
"\"%s: Using a concatenated state is slower and will soon be \"",
"\"deprecated. Use state_is_tuple=True.\"",
",",
"self",
")",
"self",
".",
"_num_units",
"=",
"num_units",
"self",
".",
"_use_peepholes",
"=",
"use_peepholes",
"self",
".",
"_share_time_frequency_weights",
"=",
"share_time_frequency_weights",
"self",
".",
"_couple_input_forget_gates",
"=",
"couple_input_forget_gates",
"self",
".",
"_state_is_tuple",
"=",
"state_is_tuple",
"self",
".",
"_cell_clip",
"=",
"cell_clip",
"self",
".",
"_initializer",
"=",
"initializer",
"self",
".",
"_num_unit_shards",
"=",
"num_unit_shards",
"self",
".",
"_forget_bias",
"=",
"forget_bias",
"self",
".",
"_feature_size",
"=",
"feature_size",
"self",
".",
"_frequency_skip",
"=",
"frequency_skip",
"self",
".",
"_start_freqindex_list",
"=",
"start_freqindex_list",
"self",
".",
"_end_freqindex_list",
"=",
"end_freqindex_list",
"self",
".",
"_num_frequency_blocks",
"=",
"num_frequency_blocks",
"self",
".",
"_total_blocks",
"=",
"0",
"self",
".",
"_reuse",
"=",
"reuse",
"if",
"self",
".",
"_num_frequency_blocks",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Must specify num_frequency_blocks\"",
")",
"for",
"block_index",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"_num_frequency_blocks",
")",
")",
":",
"self",
".",
"_total_blocks",
"+=",
"int",
"(",
"self",
".",
"_num_frequency_blocks",
"[",
"block_index",
"]",
")",
"if",
"state_is_tuple",
":",
"state_names",
"=",
"\"\"",
"for",
"block_index",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"_num_frequency_blocks",
")",
")",
":",
"for",
"freq_index",
"in",
"range",
"(",
"self",
".",
"_num_frequency_blocks",
"[",
"block_index",
"]",
")",
":",
"name_prefix",
"=",
"\"state_f%02d_b%02d\"",
"%",
"(",
"freq_index",
",",
"block_index",
")",
"state_names",
"+=",
"(",
"\"%s_c, %s_m,\"",
"%",
"(",
"name_prefix",
",",
"name_prefix",
")",
")",
"self",
".",
"_state_tuple_type",
"=",
"collections",
".",
"namedtuple",
"(",
"\"GridLSTMStateTuple\"",
",",
"state_names",
".",
"strip",
"(",
"\",\"",
")",
")",
"self",
".",
"_state_size",
"=",
"self",
".",
"_state_tuple_type",
"(",
"*",
"(",
"[",
"num_units",
",",
"num_units",
"]",
"*",
"self",
".",
"_total_blocks",
")",
")",
"else",
":",
"self",
".",
"_state_tuple_type",
"=",
"None",
"self",
".",
"_state_size",
"=",
"num_units",
"*",
"self",
".",
"_total_blocks",
"*",
"2",
"self",
".",
"_output_size",
"=",
"num_units",
"*",
"self",
".",
"_total_blocks",
"*",
"2"
] | https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/rnn/python/ops/rnn_cell.py#L442-L531 |
||
aws/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/llvmlite/binding/executionengine.py | python | ExecutionEngine._find_module_ptr | (self, module_ptr) | return None | Find the ModuleRef corresponding to the given pointer. | Find the ModuleRef corresponding to the given pointer. | [
"Find",
"the",
"ModuleRef",
"corresponding",
"to",
"the",
"given",
"pointer",
"."
] | def _find_module_ptr(self, module_ptr):
"""
Find the ModuleRef corresponding to the given pointer.
"""
ptr = cast(module_ptr, c_void_p).value
for module in self._modules:
if cast(module._ptr, c_void_p).value == ptr:
return module
return None | [
"def",
"_find_module_ptr",
"(",
"self",
",",
"module_ptr",
")",
":",
"ptr",
"=",
"cast",
"(",
"module_ptr",
",",
"c_void_p",
")",
".",
"value",
"for",
"module",
"in",
"self",
".",
"_modules",
":",
"if",
"cast",
"(",
"module",
".",
"_ptr",
",",
"c_void_p",
")",
".",
"value",
"==",
"ptr",
":",
"return",
"module",
"return",
"None"
] | https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/llvmlite/binding/executionengine.py#L136-L144 |
|
SFTtech/openage | d6a08c53c48dc1e157807471df92197f6ca9e04d | openage/convert/processor/conversion/aoc/upgrade_ability_subprocessor.py | python | AoCUpgradeAbilitySubprocessor.selectable_ability | (converter_group, line, container_obj_ref, diff=None) | return patches | Creates a patch for the Selectable ability of a line.
:param converter_group: Group that gets the patch.
:type converter_group: ...dataformat.converter_object.ConverterObjectGroup
:param line: Unit/Building line that has the ability.
:type line: ...dataformat.converter_object.ConverterObjectGroup
:param container_obj_ref: Reference of the raw API object the patch is nested in.
:type container_obj_ref: str
:param diff: A diff between two ConvertObject instances.
:type diff: ...dataformat.converter_object.ConverterObject
:returns: The forward references for the generated patches.
:rtype: list | Creates a patch for the Selectable ability of a line. | [
"Creates",
"a",
"patch",
"for",
"the",
"Selectable",
"ability",
"of",
"a",
"line",
"."
] | def selectable_ability(converter_group, line, container_obj_ref, diff=None):
"""
Creates a patch for the Selectable ability of a line.
:param converter_group: Group that gets the patch.
:type converter_group: ...dataformat.converter_object.ConverterObjectGroup
:param line: Unit/Building line that has the ability.
:type line: ...dataformat.converter_object.ConverterObjectGroup
:param container_obj_ref: Reference of the raw API object the patch is nested in.
:type container_obj_ref: str
:param diff: A diff between two ConvertObject instances.
:type diff: ...dataformat.converter_object.ConverterObject
:returns: The forward references for the generated patches.
:rtype: list
"""
head_unit_id = line.get_head_unit_id()
tech_id = converter_group.get_id()
dataset = line.data
patches = []
name_lookup_dict = internal_name_lookups.get_entity_lookups(dataset.game_version)
tech_lookup_dict = internal_name_lookups.get_tech_lookups(dataset.game_version)
game_entity_name = name_lookup_dict[head_unit_id][0]
# First patch: Sound for the SelectableSelf ability
changed = False
if diff:
diff_selection_sound = diff["selection_sound_id"]
if not isinstance(diff_selection_sound, NoDiffMember):
changed = True
if isinstance(line, GenieUnitLineGroup):
ability_name = "SelectableSelf"
else:
ability_name = "Selectable"
if changed:
patch_target_ref = f"{game_entity_name}.{ability_name}"
patch_target_forward_ref = ForwardRef(line, patch_target_ref)
# Wrapper
wrapper_name = f"Change{game_entity_name}{ability_name}Wrapper"
wrapper_ref = f"{container_obj_ref}.{wrapper_name}"
wrapper_raw_api_object = RawAPIObject(wrapper_ref,
wrapper_name,
dataset.nyan_api_objects)
wrapper_raw_api_object.add_raw_parent("engine.util.patch.Patch")
if isinstance(line, GenieBuildingLineGroup):
# Store building upgrades next to their game entity definition,
# not in the Age up techs.
wrapper_raw_api_object.set_location("data/game_entity/generic/%s/"
% (name_lookup_dict[head_unit_id][1]))
wrapper_raw_api_object.set_filename(f"{tech_lookup_dict[tech_id][1]}_upgrade")
else:
wrapper_raw_api_object.set_location(ForwardRef(converter_group, container_obj_ref))
# Nyan patch
nyan_patch_name = f"Change{game_entity_name}{ability_name}"
nyan_patch_ref = f"{container_obj_ref}.{wrapper_name}.{nyan_patch_name}"
nyan_patch_location = ForwardRef(converter_group, wrapper_ref)
nyan_patch_raw_api_object = RawAPIObject(nyan_patch_ref,
nyan_patch_name,
dataset.nyan_api_objects,
nyan_patch_location)
nyan_patch_raw_api_object.add_raw_parent("engine.util.patch.NyanPatch")
nyan_patch_raw_api_object.set_patch_target(patch_target_forward_ref)
# Change sound
diff_selection_sound_id = diff_selection_sound.get_value()
sounds_set = []
if diff_selection_sound_id > -1:
# Patch the new sound in
sound_forward_ref = AoCUpgradeAbilitySubprocessor.create_sound(converter_group,
diff_selection_sound_id,
nyan_patch_ref,
ability_name,
"select_")
sounds_set.append(sound_forward_ref)
nyan_patch_raw_api_object.add_raw_patch_member("sounds",
sounds_set,
"engine.ability.property.type.CommandSound",
MemberOperator.ASSIGN)
patch_forward_ref = ForwardRef(converter_group, nyan_patch_ref)
wrapper_raw_api_object.add_raw_member("patch",
patch_forward_ref,
"engine.util.patch.Patch")
converter_group.add_raw_api_object(wrapper_raw_api_object)
converter_group.add_raw_api_object(nyan_patch_raw_api_object)
wrapper_forward_ref = ForwardRef(converter_group, wrapper_ref)
patches.append(wrapper_forward_ref)
# Second patch: Selection box
changed = False
if diff:
diff_radius_x = diff["selection_shape_x"]
diff_radius_y = diff["selection_shape_y"]
if any(not isinstance(value, NoDiffMember) for value in (diff_radius_x,
diff_radius_y)):
changed = True
if changed:
patch_target_ref = f"{game_entity_name}.{ability_name}.Rectangle"
patch_target_forward_ref = ForwardRef(line, patch_target_ref)
# Wrapper
wrapper_name = f"Change{game_entity_name}{ability_name}RectangleWrapper"
wrapper_ref = f"{container_obj_ref}.{wrapper_name}"
wrapper_raw_api_object = RawAPIObject(wrapper_ref,
wrapper_name,
dataset.nyan_api_objects)
wrapper_raw_api_object.add_raw_parent("engine.util.patch.Patch")
if isinstance(line, GenieBuildingLineGroup):
# Store building upgrades next to their game entity definition,
# not in the Age up techs.
wrapper_raw_api_object.set_location("data/game_entity/generic/%s/"
% (name_lookup_dict[head_unit_id][1]))
wrapper_raw_api_object.set_filename(f"{tech_lookup_dict[tech_id][1]}_upgrade")
else:
wrapper_raw_api_object.set_location(ForwardRef(converter_group, container_obj_ref))
# Nyan patch
nyan_patch_name = f"Change{game_entity_name}{ability_name}Rectangle"
nyan_patch_ref = f"{container_obj_ref}.{wrapper_name}.{nyan_patch_name}"
nyan_patch_location = ForwardRef(converter_group, wrapper_ref)
nyan_patch_raw_api_object = RawAPIObject(nyan_patch_ref,
nyan_patch_name,
dataset.nyan_api_objects,
nyan_patch_location)
nyan_patch_raw_api_object.add_raw_parent("engine.util.patch.NyanPatch")
nyan_patch_raw_api_object.set_patch_target(patch_target_forward_ref)
if not isinstance(diff_radius_x, NoDiffMember):
diff_width_value = diff_radius_x.get_value()
nyan_patch_raw_api_object.add_raw_patch_member("width",
diff_width_value,
"engine.util.selection_box.type.Rectangle",
MemberOperator.ADD)
if not isinstance(diff_radius_y, NoDiffMember):
diff_height_value = diff_radius_y.get_value()
nyan_patch_raw_api_object.add_raw_patch_member("height",
diff_height_value,
"engine.util.selection_box.type.Rectangle",
MemberOperator.ADD)
patch_forward_ref = ForwardRef(converter_group, nyan_patch_ref)
wrapper_raw_api_object.add_raw_member("patch",
patch_forward_ref,
"engine.util.patch.Patch")
converter_group.add_raw_api_object(wrapper_raw_api_object)
converter_group.add_raw_api_object(nyan_patch_raw_api_object)
wrapper_forward_ref = ForwardRef(converter_group, wrapper_ref)
patches.append(wrapper_forward_ref)
return patches | [
"def",
"selectable_ability",
"(",
"converter_group",
",",
"line",
",",
"container_obj_ref",
",",
"diff",
"=",
"None",
")",
":",
"head_unit_id",
"=",
"line",
".",
"get_head_unit_id",
"(",
")",
"tech_id",
"=",
"converter_group",
".",
"get_id",
"(",
")",
"dataset",
"=",
"line",
".",
"data",
"patches",
"=",
"[",
"]",
"name_lookup_dict",
"=",
"internal_name_lookups",
".",
"get_entity_lookups",
"(",
"dataset",
".",
"game_version",
")",
"tech_lookup_dict",
"=",
"internal_name_lookups",
".",
"get_tech_lookups",
"(",
"dataset",
".",
"game_version",
")",
"game_entity_name",
"=",
"name_lookup_dict",
"[",
"head_unit_id",
"]",
"[",
"0",
"]",
"# First patch: Sound for the SelectableSelf ability",
"changed",
"=",
"False",
"if",
"diff",
":",
"diff_selection_sound",
"=",
"diff",
"[",
"\"selection_sound_id\"",
"]",
"if",
"not",
"isinstance",
"(",
"diff_selection_sound",
",",
"NoDiffMember",
")",
":",
"changed",
"=",
"True",
"if",
"isinstance",
"(",
"line",
",",
"GenieUnitLineGroup",
")",
":",
"ability_name",
"=",
"\"SelectableSelf\"",
"else",
":",
"ability_name",
"=",
"\"Selectable\"",
"if",
"changed",
":",
"patch_target_ref",
"=",
"f\"{game_entity_name}.{ability_name}\"",
"patch_target_forward_ref",
"=",
"ForwardRef",
"(",
"line",
",",
"patch_target_ref",
")",
"# Wrapper",
"wrapper_name",
"=",
"f\"Change{game_entity_name}{ability_name}Wrapper\"",
"wrapper_ref",
"=",
"f\"{container_obj_ref}.{wrapper_name}\"",
"wrapper_raw_api_object",
"=",
"RawAPIObject",
"(",
"wrapper_ref",
",",
"wrapper_name",
",",
"dataset",
".",
"nyan_api_objects",
")",
"wrapper_raw_api_object",
".",
"add_raw_parent",
"(",
"\"engine.util.patch.Patch\"",
")",
"if",
"isinstance",
"(",
"line",
",",
"GenieBuildingLineGroup",
")",
":",
"# Store building upgrades next to their game entity definition,",
"# not in the Age up techs.",
"wrapper_raw_api_object",
".",
"set_location",
"(",
"\"data/game_entity/generic/%s/\"",
"%",
"(",
"name_lookup_dict",
"[",
"head_unit_id",
"]",
"[",
"1",
"]",
")",
")",
"wrapper_raw_api_object",
".",
"set_filename",
"(",
"f\"{tech_lookup_dict[tech_id][1]}_upgrade\"",
")",
"else",
":",
"wrapper_raw_api_object",
".",
"set_location",
"(",
"ForwardRef",
"(",
"converter_group",
",",
"container_obj_ref",
")",
")",
"# Nyan patch",
"nyan_patch_name",
"=",
"f\"Change{game_entity_name}{ability_name}\"",
"nyan_patch_ref",
"=",
"f\"{container_obj_ref}.{wrapper_name}.{nyan_patch_name}\"",
"nyan_patch_location",
"=",
"ForwardRef",
"(",
"converter_group",
",",
"wrapper_ref",
")",
"nyan_patch_raw_api_object",
"=",
"RawAPIObject",
"(",
"nyan_patch_ref",
",",
"nyan_patch_name",
",",
"dataset",
".",
"nyan_api_objects",
",",
"nyan_patch_location",
")",
"nyan_patch_raw_api_object",
".",
"add_raw_parent",
"(",
"\"engine.util.patch.NyanPatch\"",
")",
"nyan_patch_raw_api_object",
".",
"set_patch_target",
"(",
"patch_target_forward_ref",
")",
"# Change sound",
"diff_selection_sound_id",
"=",
"diff_selection_sound",
".",
"get_value",
"(",
")",
"sounds_set",
"=",
"[",
"]",
"if",
"diff_selection_sound_id",
">",
"-",
"1",
":",
"# Patch the new sound in",
"sound_forward_ref",
"=",
"AoCUpgradeAbilitySubprocessor",
".",
"create_sound",
"(",
"converter_group",
",",
"diff_selection_sound_id",
",",
"nyan_patch_ref",
",",
"ability_name",
",",
"\"select_\"",
")",
"sounds_set",
".",
"append",
"(",
"sound_forward_ref",
")",
"nyan_patch_raw_api_object",
".",
"add_raw_patch_member",
"(",
"\"sounds\"",
",",
"sounds_set",
",",
"\"engine.ability.property.type.CommandSound\"",
",",
"MemberOperator",
".",
"ASSIGN",
")",
"patch_forward_ref",
"=",
"ForwardRef",
"(",
"converter_group",
",",
"nyan_patch_ref",
")",
"wrapper_raw_api_object",
".",
"add_raw_member",
"(",
"\"patch\"",
",",
"patch_forward_ref",
",",
"\"engine.util.patch.Patch\"",
")",
"converter_group",
".",
"add_raw_api_object",
"(",
"wrapper_raw_api_object",
")",
"converter_group",
".",
"add_raw_api_object",
"(",
"nyan_patch_raw_api_object",
")",
"wrapper_forward_ref",
"=",
"ForwardRef",
"(",
"converter_group",
",",
"wrapper_ref",
")",
"patches",
".",
"append",
"(",
"wrapper_forward_ref",
")",
"# Second patch: Selection box",
"changed",
"=",
"False",
"if",
"diff",
":",
"diff_radius_x",
"=",
"diff",
"[",
"\"selection_shape_x\"",
"]",
"diff_radius_y",
"=",
"diff",
"[",
"\"selection_shape_y\"",
"]",
"if",
"any",
"(",
"not",
"isinstance",
"(",
"value",
",",
"NoDiffMember",
")",
"for",
"value",
"in",
"(",
"diff_radius_x",
",",
"diff_radius_y",
")",
")",
":",
"changed",
"=",
"True",
"if",
"changed",
":",
"patch_target_ref",
"=",
"f\"{game_entity_name}.{ability_name}.Rectangle\"",
"patch_target_forward_ref",
"=",
"ForwardRef",
"(",
"line",
",",
"patch_target_ref",
")",
"# Wrapper",
"wrapper_name",
"=",
"f\"Change{game_entity_name}{ability_name}RectangleWrapper\"",
"wrapper_ref",
"=",
"f\"{container_obj_ref}.{wrapper_name}\"",
"wrapper_raw_api_object",
"=",
"RawAPIObject",
"(",
"wrapper_ref",
",",
"wrapper_name",
",",
"dataset",
".",
"nyan_api_objects",
")",
"wrapper_raw_api_object",
".",
"add_raw_parent",
"(",
"\"engine.util.patch.Patch\"",
")",
"if",
"isinstance",
"(",
"line",
",",
"GenieBuildingLineGroup",
")",
":",
"# Store building upgrades next to their game entity definition,",
"# not in the Age up techs.",
"wrapper_raw_api_object",
".",
"set_location",
"(",
"\"data/game_entity/generic/%s/\"",
"%",
"(",
"name_lookup_dict",
"[",
"head_unit_id",
"]",
"[",
"1",
"]",
")",
")",
"wrapper_raw_api_object",
".",
"set_filename",
"(",
"f\"{tech_lookup_dict[tech_id][1]}_upgrade\"",
")",
"else",
":",
"wrapper_raw_api_object",
".",
"set_location",
"(",
"ForwardRef",
"(",
"converter_group",
",",
"container_obj_ref",
")",
")",
"# Nyan patch",
"nyan_patch_name",
"=",
"f\"Change{game_entity_name}{ability_name}Rectangle\"",
"nyan_patch_ref",
"=",
"f\"{container_obj_ref}.{wrapper_name}.{nyan_patch_name}\"",
"nyan_patch_location",
"=",
"ForwardRef",
"(",
"converter_group",
",",
"wrapper_ref",
")",
"nyan_patch_raw_api_object",
"=",
"RawAPIObject",
"(",
"nyan_patch_ref",
",",
"nyan_patch_name",
",",
"dataset",
".",
"nyan_api_objects",
",",
"nyan_patch_location",
")",
"nyan_patch_raw_api_object",
".",
"add_raw_parent",
"(",
"\"engine.util.patch.NyanPatch\"",
")",
"nyan_patch_raw_api_object",
".",
"set_patch_target",
"(",
"patch_target_forward_ref",
")",
"if",
"not",
"isinstance",
"(",
"diff_radius_x",
",",
"NoDiffMember",
")",
":",
"diff_width_value",
"=",
"diff_radius_x",
".",
"get_value",
"(",
")",
"nyan_patch_raw_api_object",
".",
"add_raw_patch_member",
"(",
"\"width\"",
",",
"diff_width_value",
",",
"\"engine.util.selection_box.type.Rectangle\"",
",",
"MemberOperator",
".",
"ADD",
")",
"if",
"not",
"isinstance",
"(",
"diff_radius_y",
",",
"NoDiffMember",
")",
":",
"diff_height_value",
"=",
"diff_radius_y",
".",
"get_value",
"(",
")",
"nyan_patch_raw_api_object",
".",
"add_raw_patch_member",
"(",
"\"height\"",
",",
"diff_height_value",
",",
"\"engine.util.selection_box.type.Rectangle\"",
",",
"MemberOperator",
".",
"ADD",
")",
"patch_forward_ref",
"=",
"ForwardRef",
"(",
"converter_group",
",",
"nyan_patch_ref",
")",
"wrapper_raw_api_object",
".",
"add_raw_member",
"(",
"\"patch\"",
",",
"patch_forward_ref",
",",
"\"engine.util.patch.Patch\"",
")",
"converter_group",
".",
"add_raw_api_object",
"(",
"wrapper_raw_api_object",
")",
"converter_group",
".",
"add_raw_api_object",
"(",
"nyan_patch_raw_api_object",
")",
"wrapper_forward_ref",
"=",
"ForwardRef",
"(",
"converter_group",
",",
"wrapper_ref",
")",
"patches",
".",
"append",
"(",
"wrapper_forward_ref",
")",
"return",
"patches"
] | https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/processor/conversion/aoc/upgrade_ability_subprocessor.py#L1228-L1397 |
|
bingwin/MicroChat | 81d9a71a212c1cbca5bba497ec42659a7d25dccf | mars/lint/cpplint.py | python | IsBlankLine | (line) | return not line or line.isspace() | Returns true if the given line is blank.
We consider a line to be blank if the line is empty or consists of
only white spaces.
Args:
line: A line of a string.
Returns:
True, if the given line is blank. | Returns true if the given line is blank. | [
"Returns",
"true",
"if",
"the",
"given",
"line",
"is",
"blank",
"."
] | def IsBlankLine(line):
"""Returns true if the given line is blank.
We consider a line to be blank if the line is empty or consists of
only white spaces.
Args:
line: A line of a string.
Returns:
True, if the given line is blank.
"""
return not line or line.isspace() | [
"def",
"IsBlankLine",
"(",
"line",
")",
":",
"return",
"not",
"line",
"or",
"line",
".",
"isspace",
"(",
")"
] | https://github.com/bingwin/MicroChat/blob/81d9a71a212c1cbca5bba497ec42659a7d25dccf/mars/lint/cpplint.py#L2818-L2830 |
|
FEniCS/dolfinx | 3dfdf038cccdb70962865b58a63bf29c2e55ec6e | python/dolfinx/fem/forms.py | python | form | (form: typing.Union[ufl.Form, typing.Iterable[ufl.Form]], dtype: np.dtype = PETSc.ScalarType,
form_compiler_parameters: dict = {}, jit_parameters: dict = {}) | return _create_form(form) | Create a DOLFINx Form or an array of Forms
Args:
form: A UFL form or list(s) of UFL forms
dtype: Scalar type to use for the compiled form
form_compiler_parameters: See :func:`ffcx_jit <dolfinx.jit.ffcx_jit>`
jit_parameters:See :func:`ffcx_jit <dolfinx.jit.ffcx_jit>`
Returns:
Compiled finite element Form
Notes:
This function is responsible for the compilation of a UFL form
(using FFCx) and attaching coefficients and domains specific
data to the underlying C++ form. It dynamically create a
:class:`Form` instance with an appropriate base class for the
scalar type, e.g. `_cpp.fem.Form_float64`. | Create a DOLFINx Form or an array of Forms | [
"Create",
"a",
"DOLFINx",
"Form",
"or",
"an",
"array",
"of",
"Forms"
] | def form(form: typing.Union[ufl.Form, typing.Iterable[ufl.Form]], dtype: np.dtype = PETSc.ScalarType,
form_compiler_parameters: dict = {}, jit_parameters: dict = {}) -> FormMetaClass:
"""Create a DOLFINx Form or an array of Forms
Args:
form: A UFL form or list(s) of UFL forms
dtype: Scalar type to use for the compiled form
form_compiler_parameters: See :func:`ffcx_jit <dolfinx.jit.ffcx_jit>`
jit_parameters:See :func:`ffcx_jit <dolfinx.jit.ffcx_jit>`
Returns:
Compiled finite element Form
Notes:
This function is responsible for the compilation of a UFL form
(using FFCx) and attaching coefficients and domains specific
data to the underlying C++ form. It dynamically create a
:class:`Form` instance with an appropriate base class for the
scalar type, e.g. `_cpp.fem.Form_float64`.
"""
if dtype == np.float32:
ftype = _cpp.fem.Form_float32
form_compiler_parameters["scalar_type"] = "float"
elif dtype == np.float64:
ftype = _cpp.fem.Form_float64
form_compiler_parameters["scalar_type"] = "double"
elif dtype == np.complex128:
ftype = _cpp.fem.Form_complex128
form_compiler_parameters["scalar_type"] = "double _Complex"
else:
raise NotImplementedError(f"Type {dtype} not supported.")
formcls = type("Form", (FormMetaClass, ftype), {})
def _form(form):
""""Compile a single UFL form"""
# Extract subdomain data from UFL form
sd = form.subdomain_data()
subdomains, = list(sd.values()) # Assuming single domain
domain, = list(sd.keys()) # Assuming single domain
mesh = domain.ufl_cargo()
if mesh is None:
raise RuntimeError("Expecting to find a Mesh in the form.")
ufcx_form, module, code = jit.ffcx_jit(mesh.comm, form,
form_compiler_parameters=form_compiler_parameters,
jit_parameters=jit_parameters)
# For each argument in form extract its function space
V = [arg.ufl_function_space()._cpp_object for arg in form.arguments()]
# Prepare coefficients data. For every coefficient in form take its
# C++ object.
original_coefficients = form.coefficients()
coeffs = [original_coefficients[ufcx_form.original_coefficient_position[i]
]._cpp_object for i in range(ufcx_form.num_coefficients)]
constants = [c._cpp_object for c in form.constants()]
# Subdomain markers (possibly None for some dimensions)
subdomains = {_cpp.fem.IntegralType.cell: subdomains.get("cell"),
_cpp.fem.IntegralType.exterior_facet: subdomains.get("exterior_facet"),
_cpp.fem.IntegralType.interior_facet: subdomains.get("interior_facet"),
_cpp.fem.IntegralType.vertex: subdomains.get("vertex")}
return formcls(ufcx_form, V, coeffs, constants, subdomains, mesh, code)
def _create_form(form):
"""Recursively convert ufl.Forms to dolfinx.fem.Form, otherwise
return form argument"""
if isinstance(form, ufl.Form):
return _form(form)
elif isinstance(form, collections.Iterable):
return list(map(lambda sub_form: _create_form(sub_form), form))
return form
return _create_form(form) | [
"def",
"form",
"(",
"form",
":",
"typing",
".",
"Union",
"[",
"ufl",
".",
"Form",
",",
"typing",
".",
"Iterable",
"[",
"ufl",
".",
"Form",
"]",
"]",
",",
"dtype",
":",
"np",
".",
"dtype",
"=",
"PETSc",
".",
"ScalarType",
",",
"form_compiler_parameters",
":",
"dict",
"=",
"{",
"}",
",",
"jit_parameters",
":",
"dict",
"=",
"{",
"}",
")",
"->",
"FormMetaClass",
":",
"if",
"dtype",
"==",
"np",
".",
"float32",
":",
"ftype",
"=",
"_cpp",
".",
"fem",
".",
"Form_float32",
"form_compiler_parameters",
"[",
"\"scalar_type\"",
"]",
"=",
"\"float\"",
"elif",
"dtype",
"==",
"np",
".",
"float64",
":",
"ftype",
"=",
"_cpp",
".",
"fem",
".",
"Form_float64",
"form_compiler_parameters",
"[",
"\"scalar_type\"",
"]",
"=",
"\"double\"",
"elif",
"dtype",
"==",
"np",
".",
"complex128",
":",
"ftype",
"=",
"_cpp",
".",
"fem",
".",
"Form_complex128",
"form_compiler_parameters",
"[",
"\"scalar_type\"",
"]",
"=",
"\"double _Complex\"",
"else",
":",
"raise",
"NotImplementedError",
"(",
"f\"Type {dtype} not supported.\"",
")",
"formcls",
"=",
"type",
"(",
"\"Form\"",
",",
"(",
"FormMetaClass",
",",
"ftype",
")",
",",
"{",
"}",
")",
"def",
"_form",
"(",
"form",
")",
":",
"\"\"\"\"Compile a single UFL form\"\"\"",
"# Extract subdomain data from UFL form",
"sd",
"=",
"form",
".",
"subdomain_data",
"(",
")",
"subdomains",
",",
"=",
"list",
"(",
"sd",
".",
"values",
"(",
")",
")",
"# Assuming single domain",
"domain",
",",
"=",
"list",
"(",
"sd",
".",
"keys",
"(",
")",
")",
"# Assuming single domain",
"mesh",
"=",
"domain",
".",
"ufl_cargo",
"(",
")",
"if",
"mesh",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"\"Expecting to find a Mesh in the form.\"",
")",
"ufcx_form",
",",
"module",
",",
"code",
"=",
"jit",
".",
"ffcx_jit",
"(",
"mesh",
".",
"comm",
",",
"form",
",",
"form_compiler_parameters",
"=",
"form_compiler_parameters",
",",
"jit_parameters",
"=",
"jit_parameters",
")",
"# For each argument in form extract its function space",
"V",
"=",
"[",
"arg",
".",
"ufl_function_space",
"(",
")",
".",
"_cpp_object",
"for",
"arg",
"in",
"form",
".",
"arguments",
"(",
")",
"]",
"# Prepare coefficients data. For every coefficient in form take its",
"# C++ object.",
"original_coefficients",
"=",
"form",
".",
"coefficients",
"(",
")",
"coeffs",
"=",
"[",
"original_coefficients",
"[",
"ufcx_form",
".",
"original_coefficient_position",
"[",
"i",
"]",
"]",
".",
"_cpp_object",
"for",
"i",
"in",
"range",
"(",
"ufcx_form",
".",
"num_coefficients",
")",
"]",
"constants",
"=",
"[",
"c",
".",
"_cpp_object",
"for",
"c",
"in",
"form",
".",
"constants",
"(",
")",
"]",
"# Subdomain markers (possibly None for some dimensions)",
"subdomains",
"=",
"{",
"_cpp",
".",
"fem",
".",
"IntegralType",
".",
"cell",
":",
"subdomains",
".",
"get",
"(",
"\"cell\"",
")",
",",
"_cpp",
".",
"fem",
".",
"IntegralType",
".",
"exterior_facet",
":",
"subdomains",
".",
"get",
"(",
"\"exterior_facet\"",
")",
",",
"_cpp",
".",
"fem",
".",
"IntegralType",
".",
"interior_facet",
":",
"subdomains",
".",
"get",
"(",
"\"interior_facet\"",
")",
",",
"_cpp",
".",
"fem",
".",
"IntegralType",
".",
"vertex",
":",
"subdomains",
".",
"get",
"(",
"\"vertex\"",
")",
"}",
"return",
"formcls",
"(",
"ufcx_form",
",",
"V",
",",
"coeffs",
",",
"constants",
",",
"subdomains",
",",
"mesh",
",",
"code",
")",
"def",
"_create_form",
"(",
"form",
")",
":",
"\"\"\"Recursively convert ufl.Forms to dolfinx.fem.Form, otherwise\n return form argument\"\"\"",
"if",
"isinstance",
"(",
"form",
",",
"ufl",
".",
"Form",
")",
":",
"return",
"_form",
"(",
"form",
")",
"elif",
"isinstance",
"(",
"form",
",",
"collections",
".",
"Iterable",
")",
":",
"return",
"list",
"(",
"map",
"(",
"lambda",
"sub_form",
":",
"_create_form",
"(",
"sub_form",
")",
",",
"form",
")",
")",
"return",
"form",
"return",
"_create_form",
"(",
"form",
")"
] | https://github.com/FEniCS/dolfinx/blob/3dfdf038cccdb70962865b58a63bf29c2e55ec6e/python/dolfinx/fem/forms.py#L63-L140 |
|
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/pandas/py2/pandas/core/indexes/datetimelike.py | python | DatetimeIndexOpsMixin._create_comparison_method | (cls, op) | return wrapper | Create a comparison method that dispatches to ``cls.values``. | Create a comparison method that dispatches to ``cls.values``. | [
"Create",
"a",
"comparison",
"method",
"that",
"dispatches",
"to",
"cls",
".",
"values",
"."
] | def _create_comparison_method(cls, op):
"""
Create a comparison method that dispatches to ``cls.values``.
"""
def wrapper(self, other):
if isinstance(other, ABCSeries):
# the arrays defer to Series for comparison ops but the indexes
# don't, so we have to unwrap here.
other = other._values
result = op(self._data, maybe_unwrap_index(other))
return result
wrapper.__doc__ = op.__doc__
wrapper.__name__ = '__{}__'.format(op.__name__)
return wrapper | [
"def",
"_create_comparison_method",
"(",
"cls",
",",
"op",
")",
":",
"def",
"wrapper",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"ABCSeries",
")",
":",
"# the arrays defer to Series for comparison ops but the indexes",
"# don't, so we have to unwrap here.",
"other",
"=",
"other",
".",
"_values",
"result",
"=",
"op",
"(",
"self",
".",
"_data",
",",
"maybe_unwrap_index",
"(",
"other",
")",
")",
"return",
"result",
"wrapper",
".",
"__doc__",
"=",
"op",
".",
"__doc__",
"wrapper",
".",
"__name__",
"=",
"'__{}__'",
".",
"format",
"(",
"op",
".",
"__name__",
")",
"return",
"wrapper"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/indexes/datetimelike.py#L107-L122 |
|
LiquidPlayer/LiquidCore | 9405979363f2353ac9a71ad8ab59685dd7f919c9 | deps/node-10.15.3/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py | python | MakefileWriter.Absolutify | (self, path) | return os.path.normpath(os.path.join(self.path, path)) | Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables. | Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables. | [
"Convert",
"a",
"subdirectory",
"-",
"relative",
"path",
"into",
"a",
"base",
"-",
"relative",
"path",
".",
"Skips",
"over",
"paths",
"that",
"contain",
"variables",
"."
] | def Absolutify(self, path):
"""Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables."""
if '$(' in path:
# Don't call normpath in this case, as it might collapse the
# path too aggressively if it features '..'. However it's still
# important to strip trailing slashes.
return path.rstrip('/')
return os.path.normpath(os.path.join(self.path, path)) | [
"def",
"Absolutify",
"(",
"self",
",",
"path",
")",
":",
"if",
"'$('",
"in",
"path",
":",
"# Don't call normpath in this case, as it might collapse the",
"# path too aggressively if it features '..'. However it's still",
"# important to strip trailing slashes.",
"return",
"path",
".",
"rstrip",
"(",
"'/'",
")",
"return",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"path",
",",
"path",
")",
")"
] | https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py#L1928-L1936 |
|
echronos/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | external_tools/ply_info/example/ansic/cparse.py | python | p_shift_expression_1 | (t) | shift_expression : additive_expression | shift_expression : additive_expression | [
"shift_expression",
":",
"additive_expression"
] | def p_shift_expression_1(t):
'shift_expression : additive_expression'
pass | [
"def",
"p_shift_expression_1",
"(",
"t",
")",
":",
"pass"
] | https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/ply_info/example/ansic/cparse.py#L703-L705 |
||
tensorflow/tensorflow | 419e3a6b650ea4bd1b0cba23c4348f8a69f3272e | tensorflow/python/ops/ragged/ragged_tensor_shape.py | python | RaggedTensorDynamicShape.rank | (self) | The number of dimensions in this shape, or None if unknown. | The number of dimensions in this shape, or None if unknown. | [
"The",
"number",
"of",
"dimensions",
"in",
"this",
"shape",
"or",
"None",
"if",
"unknown",
"."
] | def rank(self):
"""The number of dimensions in this shape, or None if unknown."""
inner_ndims = tensor_shape.dimension_value(self._inner_dim_sizes.shape[0])
if inner_ndims is None:
return None
else:
return len(self._partitioned_dim_sizes) + inner_ndims | [
"def",
"rank",
"(",
"self",
")",
":",
"inner_ndims",
"=",
"tensor_shape",
".",
"dimension_value",
"(",
"self",
".",
"_inner_dim_sizes",
".",
"shape",
"[",
"0",
"]",
")",
"if",
"inner_ndims",
"is",
"None",
":",
"return",
"None",
"else",
":",
"return",
"len",
"(",
"self",
".",
"_partitioned_dim_sizes",
")",
"+",
"inner_ndims"
] | https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/ragged/ragged_tensor_shape.py#L215-L221 |
||
catboost/catboost | 167f64f237114a4d10b2b4ee42adb4569137debe | contrib/python/packaging/py2/packaging/tags.py | python | _abi3_applies | (python_version) | return len(python_version) > 1 and tuple(python_version) >= (3, 2) | Determine if the Python version supports abi3.
PEP 384 was first implemented in Python 3.2. | Determine if the Python version supports abi3. | [
"Determine",
"if",
"the",
"Python",
"version",
"supports",
"abi3",
"."
] | def _abi3_applies(python_version):
# type: (PythonVersion) -> bool
"""
Determine if the Python version supports abi3.
PEP 384 was first implemented in Python 3.2.
"""
return len(python_version) > 1 and tuple(python_version) >= (3, 2) | [
"def",
"_abi3_applies",
"(",
"python_version",
")",
":",
"# type: (PythonVersion) -> bool",
"return",
"len",
"(",
"python_version",
")",
">",
"1",
"and",
"tuple",
"(",
"python_version",
")",
">=",
"(",
"3",
",",
"2",
")"
] | https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/packaging/py2/packaging/tags.py#L188-L195 |
|
timi-liuliang/echo | 40a5a24d430eee4118314459ab7e03afcb3b8719 | thirdparty/protobuf/python/mox.py | python | Reset | (*args) | Reset mocks.
Args:
# args is any number of mocks to be reset. | Reset mocks. | [
"Reset",
"mocks",
"."
] | def Reset(*args):
"""Reset mocks.
Args:
# args is any number of mocks to be reset.
"""
for mock in args:
mock._Reset() | [
"def",
"Reset",
"(",
"*",
"args",
")",
":",
"for",
"mock",
"in",
"args",
":",
"mock",
".",
"_Reset",
"(",
")"
] | https://github.com/timi-liuliang/echo/blob/40a5a24d430eee4118314459ab7e03afcb3b8719/thirdparty/protobuf/python/mox.py#L257-L265 |
||
wxWidgets/wxPython-Classic | 19571e1ae65f1ac445f5491474121998c97a1bf0 | src/osx_cocoa/richtext.py | python | RichTextCtrl.DeleteSelection | (*args, **kwargs) | return _richtext.RichTextCtrl_DeleteSelection(*args, **kwargs) | DeleteSelection(self)
Remove the current selection. | DeleteSelection(self) | [
"DeleteSelection",
"(",
"self",
")"
] | def DeleteSelection(*args, **kwargs):
"""
DeleteSelection(self)
Remove the current selection.
"""
return _richtext.RichTextCtrl_DeleteSelection(*args, **kwargs) | [
"def",
"DeleteSelection",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextCtrl_DeleteSelection",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/richtext.py#L3228-L3234 |
|
thalium/icebox | 99d147d5b9269222225443ce171b4fd46d8985d4 | third_party/virtualbox/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py | python | SimpleParser.parse | (self) | return self.printErrors() | Parses the given file.
Returns number or errors.
Raises exception on fatal trouble. | Parses the given file.
Returns number or errors.
Raises exception on fatal trouble. | [
"Parses",
"the",
"given",
"file",
".",
"Returns",
"number",
"or",
"errors",
".",
"Raises",
"exception",
"on",
"fatal",
"trouble",
"."
] | def parse(self):
"""
Parses the given file.
Returns number or errors.
Raises exception on fatal trouble.
"""
#self.debug('Parsing %s' % (self.sSrcFile,));
while self.iLine < len(self.asLines):
sLine = self.asLines[self.iLine];
self.iLine += 1;
# We only look for comments, so only lines with a slash might possibly
# influence the parser state.
offSlash = sLine.find('/');
if offSlash >= 0:
if offSlash + 1 >= len(sLine) or sLine[offSlash + 1] != '/' or self.iState != self.kiCode:
offLine = 0;
while offLine < len(sLine):
if self.iState == self.kiCode:
offHit = sLine.find('/*', offLine); # only multiline comments for now.
if offHit >= 0:
self.checkCodeForMacro(sLine[offLine:offHit]);
self.sComment = '';
self.iCommentLine = self.iLine;
self.iState = self.kiCommentMulti;
offLine = offHit + 2;
else:
self.checkCodeForMacro(sLine[offLine:]);
offLine = len(sLine);
elif self.iState == self.kiCommentMulti:
offHit = sLine.find('*/', offLine);
if offHit >= 0:
self.sComment += sLine[offLine:offHit];
self.iState = self.kiCode;
offLine = offHit + 2;
self.parseComment();
else:
self.sComment += sLine[offLine:];
offLine = len(sLine);
else:
assert False;
# C++ line comment.
elif offSlash > 0:
self.checkCodeForMacro(sLine[:offSlash]);
# No slash, but append the line if in multi-line comment.
elif self.iState == self.kiCommentMulti:
#self.debug('line %d: multi' % (self.iLine,));
self.sComment += sLine;
# No slash, but check code line for relevant macro.
elif self.iState == self.kiCode and sLine.find('IEMOP_') >= 0:
#self.debug('line %d: macro' % (self.iLine,));
self.checkCodeForMacro(sLine);
# If the line is a '}' in the first position, complete the instructions.
elif self.iState == self.kiCode and sLine[0] == '}':
#self.debug('line %d: }' % (self.iLine,));
self.doneInstructions();
self.doneInstructions();
self.debug('%3s stubs out of %3s instructions in %s'
% (self.cTotalStubs, self.cTotalInstr, os.path.basename(self.sSrcFile),));
return self.printErrors(); | [
"def",
"parse",
"(",
"self",
")",
":",
"#self.debug('Parsing %s' % (self.sSrcFile,));",
"while",
"self",
".",
"iLine",
"<",
"len",
"(",
"self",
".",
"asLines",
")",
":",
"sLine",
"=",
"self",
".",
"asLines",
"[",
"self",
".",
"iLine",
"]",
"self",
".",
"iLine",
"+=",
"1",
"# We only look for comments, so only lines with a slash might possibly",
"# influence the parser state.",
"offSlash",
"=",
"sLine",
".",
"find",
"(",
"'/'",
")",
"if",
"offSlash",
">=",
"0",
":",
"if",
"offSlash",
"+",
"1",
">=",
"len",
"(",
"sLine",
")",
"or",
"sLine",
"[",
"offSlash",
"+",
"1",
"]",
"!=",
"'/'",
"or",
"self",
".",
"iState",
"!=",
"self",
".",
"kiCode",
":",
"offLine",
"=",
"0",
"while",
"offLine",
"<",
"len",
"(",
"sLine",
")",
":",
"if",
"self",
".",
"iState",
"==",
"self",
".",
"kiCode",
":",
"offHit",
"=",
"sLine",
".",
"find",
"(",
"'/*'",
",",
"offLine",
")",
"# only multiline comments for now.",
"if",
"offHit",
">=",
"0",
":",
"self",
".",
"checkCodeForMacro",
"(",
"sLine",
"[",
"offLine",
":",
"offHit",
"]",
")",
"self",
".",
"sComment",
"=",
"''",
"self",
".",
"iCommentLine",
"=",
"self",
".",
"iLine",
"self",
".",
"iState",
"=",
"self",
".",
"kiCommentMulti",
"offLine",
"=",
"offHit",
"+",
"2",
"else",
":",
"self",
".",
"checkCodeForMacro",
"(",
"sLine",
"[",
"offLine",
":",
"]",
")",
"offLine",
"=",
"len",
"(",
"sLine",
")",
"elif",
"self",
".",
"iState",
"==",
"self",
".",
"kiCommentMulti",
":",
"offHit",
"=",
"sLine",
".",
"find",
"(",
"'*/'",
",",
"offLine",
")",
"if",
"offHit",
">=",
"0",
":",
"self",
".",
"sComment",
"+=",
"sLine",
"[",
"offLine",
":",
"offHit",
"]",
"self",
".",
"iState",
"=",
"self",
".",
"kiCode",
"offLine",
"=",
"offHit",
"+",
"2",
"self",
".",
"parseComment",
"(",
")",
"else",
":",
"self",
".",
"sComment",
"+=",
"sLine",
"[",
"offLine",
":",
"]",
"offLine",
"=",
"len",
"(",
"sLine",
")",
"else",
":",
"assert",
"False",
"# C++ line comment.",
"elif",
"offSlash",
">",
"0",
":",
"self",
".",
"checkCodeForMacro",
"(",
"sLine",
"[",
":",
"offSlash",
"]",
")",
"# No slash, but append the line if in multi-line comment.",
"elif",
"self",
".",
"iState",
"==",
"self",
".",
"kiCommentMulti",
":",
"#self.debug('line %d: multi' % (self.iLine,));",
"self",
".",
"sComment",
"+=",
"sLine",
"# No slash, but check code line for relevant macro.",
"elif",
"self",
".",
"iState",
"==",
"self",
".",
"kiCode",
"and",
"sLine",
".",
"find",
"(",
"'IEMOP_'",
")",
">=",
"0",
":",
"#self.debug('line %d: macro' % (self.iLine,));",
"self",
".",
"checkCodeForMacro",
"(",
"sLine",
")",
"# If the line is a '}' in the first position, complete the instructions.",
"elif",
"self",
".",
"iState",
"==",
"self",
".",
"kiCode",
"and",
"sLine",
"[",
"0",
"]",
"==",
"'}'",
":",
"#self.debug('line %d: }' % (self.iLine,));",
"self",
".",
"doneInstructions",
"(",
")",
"self",
".",
"doneInstructions",
"(",
")",
"self",
".",
"debug",
"(",
"'%3s stubs out of %3s instructions in %s'",
"%",
"(",
"self",
".",
"cTotalStubs",
",",
"self",
".",
"cTotalInstr",
",",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"sSrcFile",
")",
",",
")",
")",
"return",
"self",
".",
"printErrors",
"(",
")"
] | https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py#L3228-L3293 |
|
PaddlePaddle/Paddle | 1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c | python/paddle/fluid/metrics.py | python | MetricBase.__init__ | (self, name) | The constructor of the metric class.
Args:
name(str): The name of metric instance. such as, "accuracy".
It can be used to distinguish different metric instances in a model.
Returns:
The constructed class instance.
Return types:
The MetricBase or its succeed classes | The constructor of the metric class. | [
"The",
"constructor",
"of",
"the",
"metric",
"class",
"."
] | def __init__(self, name):
"""
The constructor of the metric class.
Args:
name(str): The name of metric instance. such as, "accuracy".
It can be used to distinguish different metric instances in a model.
Returns:
The constructed class instance.
Return types:
The MetricBase or its succeed classes
"""
self._name = str(name) if name != None else self.__class__.__name__ | [
"def",
"__init__",
"(",
"self",
",",
"name",
")",
":",
"self",
".",
"_name",
"=",
"str",
"(",
"name",
")",
"if",
"name",
"!=",
"None",
"else",
"self",
".",
"__class__",
".",
"__name__"
] | https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/metrics.py#L87-L102 |
||
gimli-org/gimli | 17aa2160de9b15ababd9ef99e89b1bc3277bbb23 | pygimli/_version.py | python | render_pep440_post | (pieces) | return rendered | TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0] | TAG[.postDISTANCE[.dev0]+gHEX] . | [
"TAG",
"[",
".",
"postDISTANCE",
"[",
".",
"dev0",
"]",
"+",
"gHEX",
"]",
"."
] | def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered | [
"def",
"render_pep440_post",
"(",
"pieces",
")",
":",
"if",
"pieces",
"[",
"\"closest-tag\"",
"]",
":",
"rendered",
"=",
"pieces",
"[",
"\"closest-tag\"",
"]",
"if",
"pieces",
"[",
"\"distance\"",
"]",
"or",
"pieces",
"[",
"\"dirty\"",
"]",
":",
"rendered",
"+=",
"\".post%d\"",
"%",
"pieces",
"[",
"\"distance\"",
"]",
"if",
"pieces",
"[",
"\"dirty\"",
"]",
":",
"rendered",
"+=",
"\".dev0\"",
"rendered",
"+=",
"plus_or_dot",
"(",
"pieces",
")",
"rendered",
"+=",
"\"g%s\"",
"%",
"pieces",
"[",
"\"short\"",
"]",
"else",
":",
"# exception #1",
"rendered",
"=",
"\"0.post%d\"",
"%",
"pieces",
"[",
"\"distance\"",
"]",
"if",
"pieces",
"[",
"\"dirty\"",
"]",
":",
"rendered",
"+=",
"\".dev0\"",
"rendered",
"+=",
"\"+g%s\"",
"%",
"pieces",
"[",
"\"short\"",
"]",
"return",
"rendered"
] | https://github.com/gimli-org/gimli/blob/17aa2160de9b15ababd9ef99e89b1bc3277bbb23/pygimli/_version.py#L363-L387 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.