nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/AWSPythonSDK/1.5.8/docutils/frontend.py
|
python
|
read_config_file
|
(option, opt, value, parser)
|
Read a configuration file during option processing. (Option callback.)
|
Read a configuration file during option processing. (Option callback.)
|
[
"Read",
"a",
"configuration",
"file",
"during",
"option",
"processing",
".",
"(",
"Option",
"callback",
".",
")"
] |
def read_config_file(option, opt, value, parser):
"""
Read a configuration file during option processing. (Option callback.)
"""
try:
new_settings = parser.get_config_file_settings(value)
except ValueError, error:
parser.error(error)
parser.values.update(new_settings, parser)
|
[
"def",
"read_config_file",
"(",
"option",
",",
"opt",
",",
"value",
",",
"parser",
")",
":",
"try",
":",
"new_settings",
"=",
"parser",
".",
"get_config_file_settings",
"(",
"value",
")",
"except",
"ValueError",
",",
"error",
":",
"parser",
".",
"error",
"(",
"error",
")",
"parser",
".",
"values",
".",
"update",
"(",
"new_settings",
",",
"parser",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/frontend.py#L59-L67
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/aui.py
|
python
|
AuiDockArt.DrawPaneButton
|
(*args, **kwargs)
|
return _aui.AuiDockArt_DrawPaneButton(*args, **kwargs)
|
DrawPaneButton(self, DC dc, Window window, int button, int buttonState,
Rect rect, AuiPaneInfo pane)
|
DrawPaneButton(self, DC dc, Window window, int button, int buttonState,
Rect rect, AuiPaneInfo pane)
|
[
"DrawPaneButton",
"(",
"self",
"DC",
"dc",
"Window",
"window",
"int",
"button",
"int",
"buttonState",
"Rect",
"rect",
"AuiPaneInfo",
"pane",
")"
] |
def DrawPaneButton(*args, **kwargs):
"""
DrawPaneButton(self, DC dc, Window window, int button, int buttonState,
Rect rect, AuiPaneInfo pane)
"""
return _aui.AuiDockArt_DrawPaneButton(*args, **kwargs)
|
[
"def",
"DrawPaneButton",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_aui",
".",
"AuiDockArt_DrawPaneButton",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/aui.py#L1018-L1023
|
|
cyberbotics/webots
|
af7fa7d68dcf7b4550f1f2e132092b41e83698fc
|
projects/humans/c3d/controllers/c3d_viewer/c3d.py
|
python
|
Manager.get
|
(self, group, default=None)
|
return group
|
Get a group or parameter.
Parameters
----------
group : str
If this string contains a period (.), then the part before the
period will be used to retrieve a group, and the part after the
period will be used to retrieve a parameter from that group. If this
string does not contain a period, then just a group will be
returned.
default : any
Return this value if the named group and parameter are not found.
Returns
-------
value : :class:`Group` or :class:`Param`
Either a group or parameter with the specified name(s). If neither
is found, returns the default value.
|
Get a group or parameter.
|
[
"Get",
"a",
"group",
"or",
"parameter",
"."
] |
def get(self, group, default=None):
'''Get a group or parameter.
Parameters
----------
group : str
If this string contains a period (.), then the part before the
period will be used to retrieve a group, and the part after the
period will be used to retrieve a parameter from that group. If this
string does not contain a period, then just a group will be
returned.
default : any
Return this value if the named group and parameter are not found.
Returns
-------
value : :class:`Group` or :class:`Param`
Either a group or parameter with the specified name(s). If neither
is found, returns the default value.
'''
if isinstance(group, int):
return self.groups.get(group, default)
group = group.upper()
param = None
if '.' in group:
group, param = group.split('.', 1)
if ':' in group:
group, param = group.split(':', 1)
if group not in self.groups:
return default
group = self.groups[group]
if param is not None:
return group.get(param, default)
return group
|
[
"def",
"get",
"(",
"self",
",",
"group",
",",
"default",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"group",
",",
"int",
")",
":",
"return",
"self",
".",
"groups",
".",
"get",
"(",
"group",
",",
"default",
")",
"group",
"=",
"group",
".",
"upper",
"(",
")",
"param",
"=",
"None",
"if",
"'.'",
"in",
"group",
":",
"group",
",",
"param",
"=",
"group",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"if",
"':'",
"in",
"group",
":",
"group",
",",
"param",
"=",
"group",
".",
"split",
"(",
"':'",
",",
"1",
")",
"if",
"group",
"not",
"in",
"self",
".",
"groups",
":",
"return",
"default",
"group",
"=",
"self",
".",
"groups",
"[",
"group",
"]",
"if",
"param",
"is",
"not",
"None",
":",
"return",
"group",
".",
"get",
"(",
"param",
",",
"default",
")",
"return",
"group"
] |
https://github.com/cyberbotics/webots/blob/af7fa7d68dcf7b4550f1f2e132092b41e83698fc/projects/humans/c3d/controllers/c3d_viewer/c3d.py#L597-L630
|
|
google/fhir
|
d77f57706c1a168529b0b87ca7ccb1c0113e83c2
|
py/google/fhir/json_format/_json_printer.py
|
python
|
JsonPrinter._print_message_field
|
(self, field_name: str,
field: descriptor.FieldDescriptor,
value: Any)
|
Prints singular and repeated fields from a message.
|
Prints singular and repeated fields from a message.
|
[
"Prints",
"singular",
"and",
"repeated",
"fields",
"from",
"a",
"message",
"."
] |
def _print_message_field(self, field_name: str,
field: descriptor.FieldDescriptor,
value: Any) -> None:
"""Prints singular and repeated fields from a message."""
self.generator.add_field(field_name)
if proto_utils.field_is_repeated(field):
self._print_list(cast(List[Any], value), self._print)
else:
self._print(cast(message.Message, value))
|
[
"def",
"_print_message_field",
"(",
"self",
",",
"field_name",
":",
"str",
",",
"field",
":",
"descriptor",
".",
"FieldDescriptor",
",",
"value",
":",
"Any",
")",
"->",
"None",
":",
"self",
".",
"generator",
".",
"add_field",
"(",
"field_name",
")",
"if",
"proto_utils",
".",
"field_is_repeated",
"(",
"field",
")",
":",
"self",
".",
"_print_list",
"(",
"cast",
"(",
"List",
"[",
"Any",
"]",
",",
"value",
")",
",",
"self",
".",
"_print",
")",
"else",
":",
"self",
".",
"_print",
"(",
"cast",
"(",
"message",
".",
"Message",
",",
"value",
")",
")"
] |
https://github.com/google/fhir/blob/d77f57706c1a168529b0b87ca7ccb1c0113e83c2/py/google/fhir/json_format/_json_printer.py#L303-L311
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_core.py
|
python
|
GBPosition.SetCol
|
(*args, **kwargs)
|
return _core_.GBPosition_SetCol(*args, **kwargs)
|
SetCol(self, int col)
|
SetCol(self, int col)
|
[
"SetCol",
"(",
"self",
"int",
"col",
")"
] |
def SetCol(*args, **kwargs):
"""SetCol(self, int col)"""
return _core_.GBPosition_SetCol(*args, **kwargs)
|
[
"def",
"SetCol",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"GBPosition_SetCol",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L15584-L15586
|
|
FreeCAD/FreeCAD
|
ba42231b9c6889b89e064d6d563448ed81e376ec
|
src/Mod/Draft/draftguitools/gui_upgrade.py
|
python
|
Upgrade.proceed
|
(self)
|
Proceed with execution of the command after selection.
|
Proceed with execution of the command after selection.
|
[
"Proceed",
"with",
"execution",
"of",
"the",
"command",
"after",
"selection",
"."
] |
def proceed(self):
"""Proceed with execution of the command after selection."""
if Gui.Selection.getSelection():
Gui.addModule("Draft")
_cmd = 'Draft.upgrade'
_cmd += '('
_cmd += 'FreeCADGui.Selection.getSelection(), '
_cmd += 'delete=True'
_cmd += ')'
_cmd_list = ['_objs_ = ' + _cmd,
'FreeCAD.ActiveDocument.recompute()']
self.commit(translate("draft", "Upgrade"),
_cmd_list)
self.finish()
|
[
"def",
"proceed",
"(",
"self",
")",
":",
"if",
"Gui",
".",
"Selection",
".",
"getSelection",
"(",
")",
":",
"Gui",
".",
"addModule",
"(",
"\"Draft\"",
")",
"_cmd",
"=",
"'Draft.upgrade'",
"_cmd",
"+=",
"'('",
"_cmd",
"+=",
"'FreeCADGui.Selection.getSelection(), '",
"_cmd",
"+=",
"'delete=True'",
"_cmd",
"+=",
"')'",
"_cmd_list",
"=",
"[",
"'_objs_ = '",
"+",
"_cmd",
",",
"'FreeCAD.ActiveDocument.recompute()'",
"]",
"self",
".",
"commit",
"(",
"translate",
"(",
"\"draft\"",
",",
"\"Upgrade\"",
")",
",",
"_cmd_list",
")",
"self",
".",
"finish",
"(",
")"
] |
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftguitools/gui_upgrade.py#L74-L87
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_windows.py
|
python
|
CalculateLayoutEvent.GetFlags
|
(*args, **kwargs)
|
return _windows_.CalculateLayoutEvent_GetFlags(*args, **kwargs)
|
GetFlags(self) -> int
|
GetFlags(self) -> int
|
[
"GetFlags",
"(",
"self",
")",
"-",
">",
"int"
] |
def GetFlags(*args, **kwargs):
"""GetFlags(self) -> int"""
return _windows_.CalculateLayoutEvent_GetFlags(*args, **kwargs)
|
[
"def",
"GetFlags",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"CalculateLayoutEvent_GetFlags",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L2015-L2017
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/ops/structured/structured_tensor.py
|
python
|
StructuredTensor._from_pylist_of_value
|
(cls, pyval, typespec, path_so_far)
|
Converts python list `pyval` to a Tensor or RaggedTensor with rank>1.
|
Converts python list `pyval` to a Tensor or RaggedTensor with rank>1.
|
[
"Converts",
"python",
"list",
"pyval",
"to",
"a",
"Tensor",
"or",
"RaggedTensor",
"with",
"rank",
">",
"1",
"."
] |
def _from_pylist_of_value(cls, pyval, typespec, path_so_far):
"""Converts python list `pyval` to a Tensor or RaggedTensor with rank>1."""
if typespec is None:
try:
return ragged_factory_ops.constant(pyval)
except Exception as exc:
raise ValueError('Error parsing path %r' % (path_so_far,)) from exc
elif isinstance(typespec, tensor_spec.TensorSpec):
try:
result = constant_op.constant(pyval, typespec.dtype)
except Exception as exc:
raise ValueError('Error parsing path %r' % (path_so_far,)) from exc
if not typespec.shape.is_compatible_with(result.shape):
raise ValueError('Value at %r does not match typespec: %r vs %r' %
(path_so_far, typespec, pyval))
return result
elif isinstance(typespec, ragged_tensor.RaggedTensorSpec):
# pylint: disable=protected-access
try:
return ragged_factory_ops.constant(
pyval,
dtype=typespec._dtype,
ragged_rank=typespec._ragged_rank,
row_splits_dtype=typespec._row_splits_dtype,
inner_shape=typespec._shape[typespec._ragged_rank + 1:])
except Exception as exc:
raise ValueError('Error parsing path %r' % (path_so_far,)) from exc
elif isinstance(typespec, StructuredTensorSpec):
empty_rank = _pyval_empty_list_depth(pyval)
if empty_rank is None:
raise ValueError('Value at %r does not match typespec: %r vs %r' %
(path_so_far, typespec, pyval))
else:
return cls._from_pylist_of_dict(pyval, set(), empty_rank, typespec,
path_so_far)
else:
raise ValueError('Value at %r does not match typespec: %r vs %r' %
(path_so_far, typespec, pyval))
|
[
"def",
"_from_pylist_of_value",
"(",
"cls",
",",
"pyval",
",",
"typespec",
",",
"path_so_far",
")",
":",
"if",
"typespec",
"is",
"None",
":",
"try",
":",
"return",
"ragged_factory_ops",
".",
"constant",
"(",
"pyval",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"ValueError",
"(",
"'Error parsing path %r'",
"%",
"(",
"path_so_far",
",",
")",
")",
"from",
"exc",
"elif",
"isinstance",
"(",
"typespec",
",",
"tensor_spec",
".",
"TensorSpec",
")",
":",
"try",
":",
"result",
"=",
"constant_op",
".",
"constant",
"(",
"pyval",
",",
"typespec",
".",
"dtype",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"ValueError",
"(",
"'Error parsing path %r'",
"%",
"(",
"path_so_far",
",",
")",
")",
"from",
"exc",
"if",
"not",
"typespec",
".",
"shape",
".",
"is_compatible_with",
"(",
"result",
".",
"shape",
")",
":",
"raise",
"ValueError",
"(",
"'Value at %r does not match typespec: %r vs %r'",
"%",
"(",
"path_so_far",
",",
"typespec",
",",
"pyval",
")",
")",
"return",
"result",
"elif",
"isinstance",
"(",
"typespec",
",",
"ragged_tensor",
".",
"RaggedTensorSpec",
")",
":",
"# pylint: disable=protected-access",
"try",
":",
"return",
"ragged_factory_ops",
".",
"constant",
"(",
"pyval",
",",
"dtype",
"=",
"typespec",
".",
"_dtype",
",",
"ragged_rank",
"=",
"typespec",
".",
"_ragged_rank",
",",
"row_splits_dtype",
"=",
"typespec",
".",
"_row_splits_dtype",
",",
"inner_shape",
"=",
"typespec",
".",
"_shape",
"[",
"typespec",
".",
"_ragged_rank",
"+",
"1",
":",
"]",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"ValueError",
"(",
"'Error parsing path %r'",
"%",
"(",
"path_so_far",
",",
")",
")",
"from",
"exc",
"elif",
"isinstance",
"(",
"typespec",
",",
"StructuredTensorSpec",
")",
":",
"empty_rank",
"=",
"_pyval_empty_list_depth",
"(",
"pyval",
")",
"if",
"empty_rank",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Value at %r does not match typespec: %r vs %r'",
"%",
"(",
"path_so_far",
",",
"typespec",
",",
"pyval",
")",
")",
"else",
":",
"return",
"cls",
".",
"_from_pylist_of_dict",
"(",
"pyval",
",",
"set",
"(",
")",
",",
"empty_rank",
",",
"typespec",
",",
"path_so_far",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Value at %r does not match typespec: %r vs %r'",
"%",
"(",
"path_so_far",
",",
"typespec",
",",
"pyval",
")",
")"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/structured/structured_tensor.py#L987-L1024
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/html.py
|
python
|
HtmlParser.GetSource
|
(*args, **kwargs)
|
return _html.HtmlParser_GetSource(*args, **kwargs)
|
GetSource(self) -> String
|
GetSource(self) -> String
|
[
"GetSource",
"(",
"self",
")",
"-",
">",
"String"
] |
def GetSource(*args, **kwargs):
"""GetSource(self) -> String"""
return _html.HtmlParser_GetSource(*args, **kwargs)
|
[
"def",
"GetSource",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_html",
".",
"HtmlParser_GetSource",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/html.py#L225-L227
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/py3/scipy/stats/morestats.py
|
python
|
probplot
|
(x, sparams=(), dist='norm', fit=True, plot=None, rvalue=False)
|
Calculate quantiles for a probability plot, and optionally show the plot.
Generates a probability plot of sample data against the quantiles of a
specified theoretical distribution (the normal distribution by default).
`probplot` optionally calculates a best-fit line for the data and plots the
results using Matplotlib or a given plot function.
Parameters
----------
x : array_like
Sample/response data from which `probplot` creates the plot.
sparams : tuple, optional
Distribution-specific shape parameters (shape parameters plus location
and scale).
dist : str or stats.distributions instance, optional
Distribution or distribution function name. The default is 'norm' for a
normal probability plot. Objects that look enough like a
stats.distributions instance (i.e. they have a ``ppf`` method) are also
accepted.
fit : bool, optional
Fit a least-squares regression (best-fit) line to the sample data if
True (default).
plot : object, optional
If given, plots the quantiles and least squares fit.
`plot` is an object that has to have methods "plot" and "text".
The `matplotlib.pyplot` module or a Matplotlib Axes object can be used,
or a custom object with the same methods.
Default is None, which means that no plot is created.
Returns
-------
(osm, osr) : tuple of ndarrays
Tuple of theoretical quantiles (osm, or order statistic medians) and
ordered responses (osr). `osr` is simply sorted input `x`.
For details on how `osm` is calculated see the Notes section.
(slope, intercept, r) : tuple of floats, optional
Tuple containing the result of the least-squares fit, if that is
performed by `probplot`. `r` is the square root of the coefficient of
determination. If ``fit=False`` and ``plot=None``, this tuple is not
returned.
Notes
-----
Even if `plot` is given, the figure is not shown or saved by `probplot`;
``plt.show()`` or ``plt.savefig('figname.png')`` should be used after
calling `probplot`.
`probplot` generates a probability plot, which should not be confused with
a Q-Q or a P-P plot. Statsmodels has more extensive functionality of this
type, see ``statsmodels.api.ProbPlot``.
The formula used for the theoretical quantiles (horizontal axis of the
probability plot) is Filliben's estimate::
quantiles = dist.ppf(val), for
0.5**(1/n), for i = n
val = (i - 0.3175) / (n + 0.365), for i = 2, ..., n-1
1 - 0.5**(1/n), for i = 1
where ``i`` indicates the i-th ordered value and ``n`` is the total number
of values.
Examples
--------
>>> from scipy import stats
>>> import matplotlib.pyplot as plt
>>> nsample = 100
>>> np.random.seed(7654321)
A t distribution with small degrees of freedom:
>>> ax1 = plt.subplot(221)
>>> x = stats.t.rvs(3, size=nsample)
>>> res = stats.probplot(x, plot=plt)
A t distribution with larger degrees of freedom:
>>> ax2 = plt.subplot(222)
>>> x = stats.t.rvs(25, size=nsample)
>>> res = stats.probplot(x, plot=plt)
A mixture of two normal distributions with broadcasting:
>>> ax3 = plt.subplot(223)
>>> x = stats.norm.rvs(loc=[0,5], scale=[1,1.5],
... size=(nsample//2,2)).ravel()
>>> res = stats.probplot(x, plot=plt)
A standard normal distribution:
>>> ax4 = plt.subplot(224)
>>> x = stats.norm.rvs(loc=0, scale=1, size=nsample)
>>> res = stats.probplot(x, plot=plt)
Produce a new figure with a loggamma distribution, using the ``dist`` and
``sparams`` keywords:
>>> fig = plt.figure()
>>> ax = fig.add_subplot(111)
>>> x = stats.loggamma.rvs(c=2.5, size=500)
>>> res = stats.probplot(x, dist=stats.loggamma, sparams=(2.5,), plot=ax)
>>> ax.set_title("Probplot for loggamma dist with shape parameter 2.5")
Show the results with Matplotlib:
>>> plt.show()
|
Calculate quantiles for a probability plot, and optionally show the plot.
|
[
"Calculate",
"quantiles",
"for",
"a",
"probability",
"plot",
"and",
"optionally",
"show",
"the",
"plot",
"."
] |
def probplot(x, sparams=(), dist='norm', fit=True, plot=None, rvalue=False):
"""
Calculate quantiles for a probability plot, and optionally show the plot.
Generates a probability plot of sample data against the quantiles of a
specified theoretical distribution (the normal distribution by default).
`probplot` optionally calculates a best-fit line for the data and plots the
results using Matplotlib or a given plot function.
Parameters
----------
x : array_like
Sample/response data from which `probplot` creates the plot.
sparams : tuple, optional
Distribution-specific shape parameters (shape parameters plus location
and scale).
dist : str or stats.distributions instance, optional
Distribution or distribution function name. The default is 'norm' for a
normal probability plot. Objects that look enough like a
stats.distributions instance (i.e. they have a ``ppf`` method) are also
accepted.
fit : bool, optional
Fit a least-squares regression (best-fit) line to the sample data if
True (default).
plot : object, optional
If given, plots the quantiles and least squares fit.
`plot` is an object that has to have methods "plot" and "text".
The `matplotlib.pyplot` module or a Matplotlib Axes object can be used,
or a custom object with the same methods.
Default is None, which means that no plot is created.
Returns
-------
(osm, osr) : tuple of ndarrays
Tuple of theoretical quantiles (osm, or order statistic medians) and
ordered responses (osr). `osr` is simply sorted input `x`.
For details on how `osm` is calculated see the Notes section.
(slope, intercept, r) : tuple of floats, optional
Tuple containing the result of the least-squares fit, if that is
performed by `probplot`. `r` is the square root of the coefficient of
determination. If ``fit=False`` and ``plot=None``, this tuple is not
returned.
Notes
-----
Even if `plot` is given, the figure is not shown or saved by `probplot`;
``plt.show()`` or ``plt.savefig('figname.png')`` should be used after
calling `probplot`.
`probplot` generates a probability plot, which should not be confused with
a Q-Q or a P-P plot. Statsmodels has more extensive functionality of this
type, see ``statsmodels.api.ProbPlot``.
The formula used for the theoretical quantiles (horizontal axis of the
probability plot) is Filliben's estimate::
quantiles = dist.ppf(val), for
0.5**(1/n), for i = n
val = (i - 0.3175) / (n + 0.365), for i = 2, ..., n-1
1 - 0.5**(1/n), for i = 1
where ``i`` indicates the i-th ordered value and ``n`` is the total number
of values.
Examples
--------
>>> from scipy import stats
>>> import matplotlib.pyplot as plt
>>> nsample = 100
>>> np.random.seed(7654321)
A t distribution with small degrees of freedom:
>>> ax1 = plt.subplot(221)
>>> x = stats.t.rvs(3, size=nsample)
>>> res = stats.probplot(x, plot=plt)
A t distribution with larger degrees of freedom:
>>> ax2 = plt.subplot(222)
>>> x = stats.t.rvs(25, size=nsample)
>>> res = stats.probplot(x, plot=plt)
A mixture of two normal distributions with broadcasting:
>>> ax3 = plt.subplot(223)
>>> x = stats.norm.rvs(loc=[0,5], scale=[1,1.5],
... size=(nsample//2,2)).ravel()
>>> res = stats.probplot(x, plot=plt)
A standard normal distribution:
>>> ax4 = plt.subplot(224)
>>> x = stats.norm.rvs(loc=0, scale=1, size=nsample)
>>> res = stats.probplot(x, plot=plt)
Produce a new figure with a loggamma distribution, using the ``dist`` and
``sparams`` keywords:
>>> fig = plt.figure()
>>> ax = fig.add_subplot(111)
>>> x = stats.loggamma.rvs(c=2.5, size=500)
>>> res = stats.probplot(x, dist=stats.loggamma, sparams=(2.5,), plot=ax)
>>> ax.set_title("Probplot for loggamma dist with shape parameter 2.5")
Show the results with Matplotlib:
>>> plt.show()
"""
x = np.asarray(x)
_perform_fit = fit or (plot is not None)
if x.size == 0:
if _perform_fit:
return (x, x), (np.nan, np.nan, 0.0)
else:
return x, x
osm_uniform = _calc_uniform_order_statistic_medians(len(x))
dist = _parse_dist_kw(dist, enforce_subclass=False)
if sparams is None:
sparams = ()
if isscalar(sparams):
sparams = (sparams,)
if not isinstance(sparams, tuple):
sparams = tuple(sparams)
osm = dist.ppf(osm_uniform, *sparams)
osr = sort(x)
if _perform_fit:
# perform a linear least squares fit.
slope, intercept, r, prob, sterrest = stats.linregress(osm, osr)
if plot is not None:
plot.plot(osm, osr, 'bo', osm, slope*osm + intercept, 'r-')
_add_axis_labels_title(plot, xlabel='Theoretical quantiles',
ylabel='Ordered Values',
title='Probability Plot')
# Add R^2 value to the plot as text
if rvalue:
xmin = amin(osm)
xmax = amax(osm)
ymin = amin(x)
ymax = amax(x)
posx = xmin + 0.70 * (xmax - xmin)
posy = ymin + 0.01 * (ymax - ymin)
plot.text(posx, posy, "$R^2=%1.4f$" % r**2)
if fit:
return (osm, osr), (slope, intercept, r)
else:
return osm, osr
|
[
"def",
"probplot",
"(",
"x",
",",
"sparams",
"=",
"(",
")",
",",
"dist",
"=",
"'norm'",
",",
"fit",
"=",
"True",
",",
"plot",
"=",
"None",
",",
"rvalue",
"=",
"False",
")",
":",
"x",
"=",
"np",
".",
"asarray",
"(",
"x",
")",
"_perform_fit",
"=",
"fit",
"or",
"(",
"plot",
"is",
"not",
"None",
")",
"if",
"x",
".",
"size",
"==",
"0",
":",
"if",
"_perform_fit",
":",
"return",
"(",
"x",
",",
"x",
")",
",",
"(",
"np",
".",
"nan",
",",
"np",
".",
"nan",
",",
"0.0",
")",
"else",
":",
"return",
"x",
",",
"x",
"osm_uniform",
"=",
"_calc_uniform_order_statistic_medians",
"(",
"len",
"(",
"x",
")",
")",
"dist",
"=",
"_parse_dist_kw",
"(",
"dist",
",",
"enforce_subclass",
"=",
"False",
")",
"if",
"sparams",
"is",
"None",
":",
"sparams",
"=",
"(",
")",
"if",
"isscalar",
"(",
"sparams",
")",
":",
"sparams",
"=",
"(",
"sparams",
",",
")",
"if",
"not",
"isinstance",
"(",
"sparams",
",",
"tuple",
")",
":",
"sparams",
"=",
"tuple",
"(",
"sparams",
")",
"osm",
"=",
"dist",
".",
"ppf",
"(",
"osm_uniform",
",",
"*",
"sparams",
")",
"osr",
"=",
"sort",
"(",
"x",
")",
"if",
"_perform_fit",
":",
"# perform a linear least squares fit.",
"slope",
",",
"intercept",
",",
"r",
",",
"prob",
",",
"sterrest",
"=",
"stats",
".",
"linregress",
"(",
"osm",
",",
"osr",
")",
"if",
"plot",
"is",
"not",
"None",
":",
"plot",
".",
"plot",
"(",
"osm",
",",
"osr",
",",
"'bo'",
",",
"osm",
",",
"slope",
"*",
"osm",
"+",
"intercept",
",",
"'r-'",
")",
"_add_axis_labels_title",
"(",
"plot",
",",
"xlabel",
"=",
"'Theoretical quantiles'",
",",
"ylabel",
"=",
"'Ordered Values'",
",",
"title",
"=",
"'Probability Plot'",
")",
"# Add R^2 value to the plot as text",
"if",
"rvalue",
":",
"xmin",
"=",
"amin",
"(",
"osm",
")",
"xmax",
"=",
"amax",
"(",
"osm",
")",
"ymin",
"=",
"amin",
"(",
"x",
")",
"ymax",
"=",
"amax",
"(",
"x",
")",
"posx",
"=",
"xmin",
"+",
"0.70",
"*",
"(",
"xmax",
"-",
"xmin",
")",
"posy",
"=",
"ymin",
"+",
"0.01",
"*",
"(",
"ymax",
"-",
"ymin",
")",
"plot",
".",
"text",
"(",
"posx",
",",
"posy",
",",
"\"$R^2=%1.4f$\"",
"%",
"r",
"**",
"2",
")",
"if",
"fit",
":",
"return",
"(",
"osm",
",",
"osr",
")",
",",
"(",
"slope",
",",
"intercept",
",",
"r",
")",
"else",
":",
"return",
"osm",
",",
"osr"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/stats/morestats.py#L474-L627
|
||
nsnam/ns-3-dev-git
|
efdb2e21f45c0a87a60b47c547b68fa140a7b686
|
utils/grid.py
|
python
|
GraphicRenderer.get_selection_rectangle
|
(self)
|
return(x_start, y_start, x_end - x_start, y_height)
|
! Get Selection Rectangle
@param self this object
@return rectangle
|
! Get Selection Rectangle
|
[
"!",
"Get",
"Selection",
"Rectangle"
] |
def get_selection_rectangle(self):
"""! Get Selection Rectangle
@param self this object
@return rectangle
"""
y_start = self.__top_legend.get_height() + self.__data.get_height() + self.__mid_scale.get_height() + 20
y_height = self.__bot_scale.get_height() + 20
x_start = self.__bot_scale.get_position(self.__r_start)
x_end = self.__bot_scale.get_position(self.__r_end)
return(x_start, y_start, x_end - x_start, y_height)
|
[
"def",
"get_selection_rectangle",
"(",
"self",
")",
":",
"y_start",
"=",
"self",
".",
"__top_legend",
".",
"get_height",
"(",
")",
"+",
"self",
".",
"__data",
".",
"get_height",
"(",
")",
"+",
"self",
".",
"__mid_scale",
".",
"get_height",
"(",
")",
"+",
"20",
"y_height",
"=",
"self",
".",
"__bot_scale",
".",
"get_height",
"(",
")",
"+",
"20",
"x_start",
"=",
"self",
".",
"__bot_scale",
".",
"get_position",
"(",
"self",
".",
"__r_start",
")",
"x_end",
"=",
"self",
".",
"__bot_scale",
".",
"get_position",
"(",
"self",
".",
"__r_end",
")",
"return",
"(",
"x_start",
",",
"y_start",
",",
"x_end",
"-",
"x_start",
",",
"y_height",
")"
] |
https://github.com/nsnam/ns-3-dev-git/blob/efdb2e21f45c0a87a60b47c547b68fa140a7b686/utils/grid.py#L1039-L1048
|
|
macchina-io/macchina.io
|
ef24ba0e18379c3dd48fb84e6dbf991101cb8db0
|
platform/JS/V8/tools/gyp/pylib/gyp/input.py
|
python
|
BuildTargetsDict
|
(data)
|
return targets
|
Builds a dict mapping fully-qualified target names to their target dicts.
|data| is a dict mapping loaded build files by pathname relative to the
current directory. Values in |data| are build file contents. For each
|data| value with a "targets" key, the value of the "targets" key is taken
as a list containing target dicts. Each target's fully-qualified name is
constructed from the pathname of the build file (|data| key) and its
"target_name" property. These fully-qualified names are used as the keys
in the returned dict. These keys provide access to the target dicts,
the dicts in the "targets" lists.
|
Builds a dict mapping fully-qualified target names to their target dicts.
|
[
"Builds",
"a",
"dict",
"mapping",
"fully",
"-",
"qualified",
"target",
"names",
"to",
"their",
"target",
"dicts",
"."
] |
def BuildTargetsDict(data):
"""Builds a dict mapping fully-qualified target names to their target dicts.
|data| is a dict mapping loaded build files by pathname relative to the
current directory. Values in |data| are build file contents. For each
|data| value with a "targets" key, the value of the "targets" key is taken
as a list containing target dicts. Each target's fully-qualified name is
constructed from the pathname of the build file (|data| key) and its
"target_name" property. These fully-qualified names are used as the keys
in the returned dict. These keys provide access to the target dicts,
the dicts in the "targets" lists.
"""
targets = {}
for build_file in data['target_build_files']:
for target in data[build_file].get('targets', []):
target_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if target_name in targets:
raise GypError('Duplicate target definitions for ' + target_name)
targets[target_name] = target
return targets
|
[
"def",
"BuildTargetsDict",
"(",
"data",
")",
":",
"targets",
"=",
"{",
"}",
"for",
"build_file",
"in",
"data",
"[",
"'target_build_files'",
"]",
":",
"for",
"target",
"in",
"data",
"[",
"build_file",
"]",
".",
"get",
"(",
"'targets'",
",",
"[",
"]",
")",
":",
"target_name",
"=",
"gyp",
".",
"common",
".",
"QualifiedTarget",
"(",
"build_file",
",",
"target",
"[",
"'target_name'",
"]",
",",
"target",
"[",
"'toolset'",
"]",
")",
"if",
"target_name",
"in",
"targets",
":",
"raise",
"GypError",
"(",
"'Duplicate target definitions for '",
"+",
"target_name",
")",
"targets",
"[",
"target_name",
"]",
"=",
"target",
"return",
"targets"
] |
https://github.com/macchina-io/macchina.io/blob/ef24ba0e18379c3dd48fb84e6dbf991101cb8db0/platform/JS/V8/tools/gyp/pylib/gyp/input.py#L1333-L1356
|
|
ApolloAuto/apollo-platform
|
86d9dc6743b496ead18d597748ebabd34a513289
|
ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/oldnumeric/ma.py
|
python
|
_MaskedPrintOption.set_display
|
(self, s)
|
set_display(s) sets what prints for masked values.
|
set_display(s) sets what prints for masked values.
|
[
"set_display",
"(",
"s",
")",
"sets",
"what",
"prints",
"for",
"masked",
"values",
"."
] |
def set_display (self, s):
"set_display(s) sets what prints for masked values."
self._display = s
|
[
"def",
"set_display",
"(",
"self",
",",
"s",
")",
":",
"self",
".",
"_display",
"=",
"s"
] |
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/oldnumeric/ma.py#L69-L71
|
||
ceph/ceph
|
959663007321a369c83218414a29bd9dbc8bda3a
|
qa/tasks/rgw_multisite.py
|
python
|
create_zone
|
(ctx, cluster, gateways, creds, zonegroup, config)
|
return zone
|
create a zone with the given configuration
|
create a zone with the given configuration
|
[
"create",
"a",
"zone",
"with",
"the",
"given",
"configuration"
] |
def create_zone(ctx, cluster, gateways, creds, zonegroup, config):
""" create a zone with the given configuration """
zone = multisite.Zone(config['name'], zonegroup, cluster)
if config.pop('is_pubsub', False):
zone = PSZone(config['name'], zonegroup, cluster)
else:
zone = RadosZone(config['name'], zonegroup, cluster)
# collect Gateways for the zone's endpoints
endpoints = config.get('endpoints')
if not endpoints:
raise ConfigError('no \'endpoints\' for zone %s' % config['name'])
zone.gateways = [gateways[role] for role in endpoints]
for gateway in zone.gateways:
gateway.set_zone(zone)
# format the gateway endpoints
endpoints = [g.endpoint() for g in zone.gateways]
args = is_default_arg(config)
args += is_master_arg(config)
args += creds.credential_args()
if len(endpoints):
args += ['--endpoints', ','.join(endpoints)]
zone.create(cluster, args)
zonegroup.zones.append(zone)
create_zone_pools(ctx, zone)
if ctx.rgw.compression_type:
configure_zone_compression(zone, ctx.rgw.compression_type)
zonegroup.zones_by_type.setdefault(zone.tier_type(), []).append(zone)
if zone.is_read_only():
zonegroup.ro_zones.append(zone)
else:
zonegroup.rw_zones.append(zone)
return zone
|
[
"def",
"create_zone",
"(",
"ctx",
",",
"cluster",
",",
"gateways",
",",
"creds",
",",
"zonegroup",
",",
"config",
")",
":",
"zone",
"=",
"multisite",
".",
"Zone",
"(",
"config",
"[",
"'name'",
"]",
",",
"zonegroup",
",",
"cluster",
")",
"if",
"config",
".",
"pop",
"(",
"'is_pubsub'",
",",
"False",
")",
":",
"zone",
"=",
"PSZone",
"(",
"config",
"[",
"'name'",
"]",
",",
"zonegroup",
",",
"cluster",
")",
"else",
":",
"zone",
"=",
"RadosZone",
"(",
"config",
"[",
"'name'",
"]",
",",
"zonegroup",
",",
"cluster",
")",
"# collect Gateways for the zone's endpoints",
"endpoints",
"=",
"config",
".",
"get",
"(",
"'endpoints'",
")",
"if",
"not",
"endpoints",
":",
"raise",
"ConfigError",
"(",
"'no \\'endpoints\\' for zone %s'",
"%",
"config",
"[",
"'name'",
"]",
")",
"zone",
".",
"gateways",
"=",
"[",
"gateways",
"[",
"role",
"]",
"for",
"role",
"in",
"endpoints",
"]",
"for",
"gateway",
"in",
"zone",
".",
"gateways",
":",
"gateway",
".",
"set_zone",
"(",
"zone",
")",
"# format the gateway endpoints",
"endpoints",
"=",
"[",
"g",
".",
"endpoint",
"(",
")",
"for",
"g",
"in",
"zone",
".",
"gateways",
"]",
"args",
"=",
"is_default_arg",
"(",
"config",
")",
"args",
"+=",
"is_master_arg",
"(",
"config",
")",
"args",
"+=",
"creds",
".",
"credential_args",
"(",
")",
"if",
"len",
"(",
"endpoints",
")",
":",
"args",
"+=",
"[",
"'--endpoints'",
",",
"','",
".",
"join",
"(",
"endpoints",
")",
"]",
"zone",
".",
"create",
"(",
"cluster",
",",
"args",
")",
"zonegroup",
".",
"zones",
".",
"append",
"(",
"zone",
")",
"create_zone_pools",
"(",
"ctx",
",",
"zone",
")",
"if",
"ctx",
".",
"rgw",
".",
"compression_type",
":",
"configure_zone_compression",
"(",
"zone",
",",
"ctx",
".",
"rgw",
".",
"compression_type",
")",
"zonegroup",
".",
"zones_by_type",
".",
"setdefault",
"(",
"zone",
".",
"tier_type",
"(",
")",
",",
"[",
"]",
")",
".",
"append",
"(",
"zone",
")",
"if",
"zone",
".",
"is_read_only",
"(",
")",
":",
"zonegroup",
".",
"ro_zones",
".",
"append",
"(",
"zone",
")",
"else",
":",
"zonegroup",
".",
"rw_zones",
".",
"append",
"(",
"zone",
")",
"return",
"zone"
] |
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/qa/tasks/rgw_multisite.py#L373-L411
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/distlib/resources.py
|
python
|
Resource.as_stream
|
(self)
|
return self.finder.get_stream(self)
|
Get the resource as a stream.
This is not a property to make it obvious that it returns a new stream
each time.
|
Get the resource as a stream.
|
[
"Get",
"the",
"resource",
"as",
"a",
"stream",
"."
] |
def as_stream(self):
"""
Get the resource as a stream.
This is not a property to make it obvious that it returns a new stream
each time.
"""
return self.finder.get_stream(self)
|
[
"def",
"as_stream",
"(",
"self",
")",
":",
"return",
"self",
".",
"finder",
".",
"get_stream",
"(",
"self",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/distlib/resources.py#L86-L93
|
|
apiaryio/snowcrash
|
b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3
|
tools/gyp/pylib/gyp/generator/analyzer.py
|
python
|
_AddSources
|
(sources, base_path, base_path_components, result)
|
Extracts valid sources from |sources| and adds them to |result|. Each
source file is relative to |base_path|, but may contain '..'. To make
resolving '..' easier |base_path_components| contains each of the
directories in |base_path|. Additionally each source may contain variables.
Such sources are ignored as it is assumed dependencies on them are expressed
and tracked in some other means.
|
Extracts valid sources from |sources| and adds them to |result|. Each
source file is relative to |base_path|, but may contain '..'. To make
resolving '..' easier |base_path_components| contains each of the
directories in |base_path|. Additionally each source may contain variables.
Such sources are ignored as it is assumed dependencies on them are expressed
and tracked in some other means.
|
[
"Extracts",
"valid",
"sources",
"from",
"|sources|",
"and",
"adds",
"them",
"to",
"|result|",
".",
"Each",
"source",
"file",
"is",
"relative",
"to",
"|base_path|",
"but",
"may",
"contain",
"..",
".",
"To",
"make",
"resolving",
"..",
"easier",
"|base_path_components|",
"contains",
"each",
"of",
"the",
"directories",
"in",
"|base_path|",
".",
"Additionally",
"each",
"source",
"may",
"contain",
"variables",
".",
"Such",
"sources",
"are",
"ignored",
"as",
"it",
"is",
"assumed",
"dependencies",
"on",
"them",
"are",
"expressed",
"and",
"tracked",
"in",
"some",
"other",
"means",
"."
] |
def _AddSources(sources, base_path, base_path_components, result):
"""Extracts valid sources from |sources| and adds them to |result|. Each
source file is relative to |base_path|, but may contain '..'. To make
resolving '..' easier |base_path_components| contains each of the
directories in |base_path|. Additionally each source may contain variables.
Such sources are ignored as it is assumed dependencies on them are expressed
and tracked in some other means."""
# NOTE: gyp paths are always posix style.
for source in sources:
if not len(source) or source.startswith('!!!') or source.startswith('$'):
continue
# variable expansion may lead to //.
org_source = source
source = source[0] + source[1:].replace('//', '/')
if source.startswith('../'):
source = _ResolveParent(source, base_path_components)
if len(source):
result.append(source)
continue
result.append(base_path + source)
if debug:
print 'AddSource', org_source, result[len(result) - 1]
|
[
"def",
"_AddSources",
"(",
"sources",
",",
"base_path",
",",
"base_path_components",
",",
"result",
")",
":",
"# NOTE: gyp paths are always posix style.",
"for",
"source",
"in",
"sources",
":",
"if",
"not",
"len",
"(",
"source",
")",
"or",
"source",
".",
"startswith",
"(",
"'!!!'",
")",
"or",
"source",
".",
"startswith",
"(",
"'$'",
")",
":",
"continue",
"# variable expansion may lead to //.",
"org_source",
"=",
"source",
"source",
"=",
"source",
"[",
"0",
"]",
"+",
"source",
"[",
"1",
":",
"]",
".",
"replace",
"(",
"'//'",
",",
"'/'",
")",
"if",
"source",
".",
"startswith",
"(",
"'../'",
")",
":",
"source",
"=",
"_ResolveParent",
"(",
"source",
",",
"base_path_components",
")",
"if",
"len",
"(",
"source",
")",
":",
"result",
".",
"append",
"(",
"source",
")",
"continue",
"result",
".",
"append",
"(",
"base_path",
"+",
"source",
")",
"if",
"debug",
":",
"print",
"'AddSource'",
",",
"org_source",
",",
"result",
"[",
"len",
"(",
"result",
")",
"-",
"1",
"]"
] |
https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/pylib/gyp/generator/analyzer.py#L137-L158
|
||
eventql/eventql
|
7ca0dbb2e683b525620ea30dc40540a22d5eb227
|
deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/make.py
|
python
|
EscapeCppDefine
|
(s)
|
return s.replace('#', r'\#')
|
Escapes a CPP define so that it will reach the compiler unaltered.
|
Escapes a CPP define so that it will reach the compiler unaltered.
|
[
"Escapes",
"a",
"CPP",
"define",
"so",
"that",
"it",
"will",
"reach",
"the",
"compiler",
"unaltered",
"."
] |
def EscapeCppDefine(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = EscapeShellArgument(s)
s = EscapeMakeVariableExpansion(s)
# '#' characters must be escaped even embedded in a string, else Make will
# treat it as the start of a comment.
return s.replace('#', r'\#')
|
[
"def",
"EscapeCppDefine",
"(",
"s",
")",
":",
"s",
"=",
"EscapeShellArgument",
"(",
"s",
")",
"s",
"=",
"EscapeMakeVariableExpansion",
"(",
"s",
")",
"# '#' characters must be escaped even embedded in a string, else Make will",
"# treat it as the start of a comment.",
"return",
"s",
".",
"replace",
"(",
"'#'",
",",
"r'\\#'",
")"
] |
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/media/webrtc/trunk/tools/gyp/pylib/gyp/generator/make.py#L586-L592
|
|
TheLegendAli/DeepLab-Context
|
fb04e9e2fc2682490ad9f60533b9d6c4c0e0479c
|
scripts/cpp_lint.py
|
python
|
FileInfo.FullName
|
(self)
|
return os.path.abspath(self._filename).replace('\\', '/')
|
Make Windows paths like Unix.
|
Make Windows paths like Unix.
|
[
"Make",
"Windows",
"paths",
"like",
"Unix",
"."
] |
def FullName(self):
"""Make Windows paths like Unix."""
return os.path.abspath(self._filename).replace('\\', '/')
|
[
"def",
"FullName",
"(",
"self",
")",
":",
"return",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"_filename",
")",
".",
"replace",
"(",
"'\\\\'",
",",
"'/'",
")"
] |
https://github.com/TheLegendAli/DeepLab-Context/blob/fb04e9e2fc2682490ad9f60533b9d6c4c0e0479c/scripts/cpp_lint.py#L881-L883
|
|
su2code/SU2
|
72b2fa977b64b9683a388920f05298a40d39e5c5
|
SU2_PY/SU2/util/ordered_bunch.py
|
python
|
OrderedBunch.__delattr__
|
(self, k)
|
Deletes attribute k if it exists, otherwise deletes key k. A KeyError
raised by deleting the key--such as when the key is missing--will
propagate as an AttributeError instead.
>>> b = OrderedBunch(lol=42)
>>> del b.values
Traceback (most recent call last):
...
AttributeError: 'OrderedBunch' object attribute 'values' is read-only
>>> del b.lol
>>> b.lol
Traceback (most recent call last):
...
AttributeError: lol
|
Deletes attribute k if it exists, otherwise deletes key k. A KeyError
raised by deleting the key--such as when the key is missing--will
propagate as an AttributeError instead.
>>> b = OrderedBunch(lol=42)
>>> del b.values
Traceback (most recent call last):
...
AttributeError: 'OrderedBunch' object attribute 'values' is read-only
>>> del b.lol
>>> b.lol
Traceback (most recent call last):
...
AttributeError: lol
|
[
"Deletes",
"attribute",
"k",
"if",
"it",
"exists",
"otherwise",
"deletes",
"key",
"k",
".",
"A",
"KeyError",
"raised",
"by",
"deleting",
"the",
"key",
"--",
"such",
"as",
"when",
"the",
"key",
"is",
"missing",
"--",
"will",
"propagate",
"as",
"an",
"AttributeError",
"instead",
".",
">>>",
"b",
"=",
"OrderedBunch",
"(",
"lol",
"=",
"42",
")",
">>>",
"del",
"b",
".",
"values",
"Traceback",
"(",
"most",
"recent",
"call",
"last",
")",
":",
"...",
"AttributeError",
":",
"OrderedBunch",
"object",
"attribute",
"values",
"is",
"read",
"-",
"only",
">>>",
"del",
"b",
".",
"lol",
">>>",
"b",
".",
"lol",
"Traceback",
"(",
"most",
"recent",
"call",
"last",
")",
":",
"...",
"AttributeError",
":",
"lol"
] |
def __delattr__(self, k):
""" Deletes attribute k if it exists, otherwise deletes key k. A KeyError
raised by deleting the key--such as when the key is missing--will
propagate as an AttributeError instead.
>>> b = OrderedBunch(lol=42)
>>> del b.values
Traceback (most recent call last):
...
AttributeError: 'OrderedBunch' object attribute 'values' is read-only
>>> del b.lol
>>> b.lol
Traceback (most recent call last):
...
AttributeError: lol
"""
try:
# Throws exception if not in prototype chain
object.__getattribute__(self, k)
except AttributeError:
try:
del self[k]
except KeyError:
raise AttributeError(k)
else:
object.__delattr__(self, k)
|
[
"def",
"__delattr__",
"(",
"self",
",",
"k",
")",
":",
"try",
":",
"# Throws exception if not in prototype chain",
"object",
".",
"__getattribute__",
"(",
"self",
",",
"k",
")",
"except",
"AttributeError",
":",
"try",
":",
"del",
"self",
"[",
"k",
"]",
"except",
"KeyError",
":",
"raise",
"AttributeError",
"(",
"k",
")",
"else",
":",
"object",
".",
"__delattr__",
"(",
"self",
",",
"k",
")"
] |
https://github.com/su2code/SU2/blob/72b2fa977b64b9683a388920f05298a40d39e5c5/SU2_PY/SU2/util/ordered_bunch.py#L169-L194
|
||
SoarGroup/Soar
|
a1c5e249499137a27da60533c72969eef3b8ab6b
|
scons/scons-local-4.1.0/SCons/Tool/dvipdf.py
|
python
|
DviPdfPsFunction
|
(XXXDviAction, target = None, source= None, env=None)
|
return result
|
A builder for DVI files that sets the TEXPICTS environment
variable before running dvi2ps or dvipdf.
|
A builder for DVI files that sets the TEXPICTS environment
variable before running dvi2ps or dvipdf.
|
[
"A",
"builder",
"for",
"DVI",
"files",
"that",
"sets",
"the",
"TEXPICTS",
"environment",
"variable",
"before",
"running",
"dvi2ps",
"or",
"dvipdf",
"."
] |
def DviPdfPsFunction(XXXDviAction, target = None, source= None, env=None):
"""A builder for DVI files that sets the TEXPICTS environment
variable before running dvi2ps or dvipdf."""
try:
abspath = source[0].attributes.path
except AttributeError :
abspath = ''
saved_env = SCons.Scanner.LaTeX.modify_env_var(env, 'TEXPICTS', abspath)
result = XXXDviAction(target, source, env)
if saved_env is _null:
try:
del env['ENV']['TEXPICTS']
except KeyError:
pass # was never set
else:
env['ENV']['TEXPICTS'] = saved_env
return result
|
[
"def",
"DviPdfPsFunction",
"(",
"XXXDviAction",
",",
"target",
"=",
"None",
",",
"source",
"=",
"None",
",",
"env",
"=",
"None",
")",
":",
"try",
":",
"abspath",
"=",
"source",
"[",
"0",
"]",
".",
"attributes",
".",
"path",
"except",
"AttributeError",
":",
"abspath",
"=",
"''",
"saved_env",
"=",
"SCons",
".",
"Scanner",
".",
"LaTeX",
".",
"modify_env_var",
"(",
"env",
",",
"'TEXPICTS'",
",",
"abspath",
")",
"result",
"=",
"XXXDviAction",
"(",
"target",
",",
"source",
",",
"env",
")",
"if",
"saved_env",
"is",
"_null",
":",
"try",
":",
"del",
"env",
"[",
"'ENV'",
"]",
"[",
"'TEXPICTS'",
"]",
"except",
"KeyError",
":",
"pass",
"# was never set",
"else",
":",
"env",
"[",
"'ENV'",
"]",
"[",
"'TEXPICTS'",
"]",
"=",
"saved_env",
"return",
"result"
] |
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Tool/dvipdf.py#L43-L64
|
|
PaddlePaddle/Paddle
|
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
|
python/paddle/tensor/einsum.py
|
python
|
build_view
|
(in_labels, out_labels)
|
return inv_map
|
Build an inverse map of dimension indices. Three conditions must hold for
the result to be meaningful.
First, no duplicate letter labels in each label string.
Second, the number of dots in dimout_labels >= that in in_labels.
Third, dots are contiguous in each label string.
Parameters
----------
in_labels:
The dimension labels to map to
out_labels:
The dimension labels to map from
Returns
-------
The inverse map from out_labels to in_labels. The length of the inverse map equals that of
out_labels. -1 is filled if there's no matching intput dimension for a specific label.
Examples
--------
in_labels = 'ij..', out_labels = '..ji'
inv_map = [2, 3, 1, 0]
in_labels = 'ij..', out_labels = '..kji'
inv_map = [2, 3, -1, 1, 0]
|
Build an inverse map of dimension indices. Three conditions must hold for
the result to be meaningful.
First, no duplicate letter labels in each label string.
Second, the number of dots in dimout_labels >= that in in_labels.
Third, dots are contiguous in each label string.
|
[
"Build",
"an",
"inverse",
"map",
"of",
"dimension",
"indices",
".",
"Three",
"conditions",
"must",
"hold",
"for",
"the",
"result",
"to",
"be",
"meaningful",
".",
"First",
"no",
"duplicate",
"letter",
"labels",
"in",
"each",
"label",
"string",
".",
"Second",
"the",
"number",
"of",
"dots",
"in",
"dimout_labels",
">",
"=",
"that",
"in",
"in_labels",
".",
"Third",
"dots",
"are",
"contiguous",
"in",
"each",
"label",
"string",
"."
] |
def build_view(in_labels, out_labels):
'''
Build an inverse map of dimension indices. Three conditions must hold for
the result to be meaningful.
First, no duplicate letter labels in each label string.
Second, the number of dots in dimout_labels >= that in in_labels.
Third, dots are contiguous in each label string.
Parameters
----------
in_labels:
The dimension labels to map to
out_labels:
The dimension labels to map from
Returns
-------
The inverse map from out_labels to in_labels. The length of the inverse map equals that of
out_labels. -1 is filled if there's no matching intput dimension for a specific label.
Examples
--------
in_labels = 'ij..', out_labels = '..ji'
inv_map = [2, 3, 1, 0]
in_labels = 'ij..', out_labels = '..kji'
inv_map = [2, 3, -1, 1, 0]
'''
inv_map = [-1] * len(out_labels)
# First build the broadcast dimension mapping
# Find the broadcast index range in out_labels
r = re.search(r'\.+', out_labels)
if r:
start, end = r.start(), r.end()
s = re.search(r'\.+', in_labels)
# fill the broadcast dimension indices from right to left.
if s:
for ax, dim in zip(
range(start, end)[::-1], range(s.start(), s.end())[::-1]):
inv_map[ax] = dim
# Now work on non-broadcast dimensions
if r:
it = itertools.chain(range(start), range(end, len(out_labels)))
else:
it = iter(range(len(out_labels)))
for i in it:
inv_map[i] = in_labels.find(out_labels[i])
return inv_map
|
[
"def",
"build_view",
"(",
"in_labels",
",",
"out_labels",
")",
":",
"inv_map",
"=",
"[",
"-",
"1",
"]",
"*",
"len",
"(",
"out_labels",
")",
"# First build the broadcast dimension mapping",
"# Find the broadcast index range in out_labels",
"r",
"=",
"re",
".",
"search",
"(",
"r'\\.+'",
",",
"out_labels",
")",
"if",
"r",
":",
"start",
",",
"end",
"=",
"r",
".",
"start",
"(",
")",
",",
"r",
".",
"end",
"(",
")",
"s",
"=",
"re",
".",
"search",
"(",
"r'\\.+'",
",",
"in_labels",
")",
"# fill the broadcast dimension indices from right to left.",
"if",
"s",
":",
"for",
"ax",
",",
"dim",
"in",
"zip",
"(",
"range",
"(",
"start",
",",
"end",
")",
"[",
":",
":",
"-",
"1",
"]",
",",
"range",
"(",
"s",
".",
"start",
"(",
")",
",",
"s",
".",
"end",
"(",
")",
")",
"[",
":",
":",
"-",
"1",
"]",
")",
":",
"inv_map",
"[",
"ax",
"]",
"=",
"dim",
"# Now work on non-broadcast dimensions ",
"if",
"r",
":",
"it",
"=",
"itertools",
".",
"chain",
"(",
"range",
"(",
"start",
")",
",",
"range",
"(",
"end",
",",
"len",
"(",
"out_labels",
")",
")",
")",
"else",
":",
"it",
"=",
"iter",
"(",
"range",
"(",
"len",
"(",
"out_labels",
")",
")",
")",
"for",
"i",
"in",
"it",
":",
"inv_map",
"[",
"i",
"]",
"=",
"in_labels",
".",
"find",
"(",
"out_labels",
"[",
"i",
"]",
")",
"return",
"inv_map"
] |
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/tensor/einsum.py#L145-L196
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_controls.py
|
python
|
ComboBox.Create
|
(*args, **kwargs)
|
return _controls_.ComboBox_Create(*args, **kwargs)
|
Create(Window parent, int id=-1, String value=EmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
List choices=EmptyList, long style=0, Validator validator=DefaultValidator,
String name=ChoiceNameStr) -> bool
Actually create the GUI wxComboBox control for 2-phase creation
|
Create(Window parent, int id=-1, String value=EmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
List choices=EmptyList, long style=0, Validator validator=DefaultValidator,
String name=ChoiceNameStr) -> bool
|
[
"Create",
"(",
"Window",
"parent",
"int",
"id",
"=",
"-",
"1",
"String",
"value",
"=",
"EmptyString",
"Point",
"pos",
"=",
"DefaultPosition",
"Size",
"size",
"=",
"DefaultSize",
"List",
"choices",
"=",
"EmptyList",
"long",
"style",
"=",
"0",
"Validator",
"validator",
"=",
"DefaultValidator",
"String",
"name",
"=",
"ChoiceNameStr",
")",
"-",
">",
"bool"
] |
def Create(*args, **kwargs):
"""
Create(Window parent, int id=-1, String value=EmptyString,
Point pos=DefaultPosition, Size size=DefaultSize,
List choices=EmptyList, long style=0, Validator validator=DefaultValidator,
String name=ChoiceNameStr) -> bool
Actually create the GUI wxComboBox control for 2-phase creation
"""
return _controls_.ComboBox_Create(*args, **kwargs)
|
[
"def",
"Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"ComboBox_Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L600-L609
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/lib/scimath.py
|
python
|
logn
|
(n, x)
|
return nx.log(x)/nx.log(n)
|
Take log base n of x.
If `x` contains negative inputs, the answer is computed and returned in the
complex domain.
Parameters
----------
n : array_like
The integer base(s) in which the log is taken.
x : array_like
The value(s) whose log base `n` is (are) required.
Returns
-------
out : ndarray or scalar
The log base `n` of the `x` value(s). If `x` was a scalar, so is
`out`, otherwise an array is returned.
Examples
--------
>>> np.set_printoptions(precision=4)
>>> np.lib.scimath.logn(2, [4, 8])
array([2., 3.])
>>> np.lib.scimath.logn(2, [-4, -8, 8])
array([2.+4.5324j, 3.+4.5324j, 3.+0.j ])
|
Take log base n of x.
|
[
"Take",
"log",
"base",
"n",
"of",
"x",
"."
] |
def logn(n, x):
"""
Take log base n of x.
If `x` contains negative inputs, the answer is computed and returned in the
complex domain.
Parameters
----------
n : array_like
The integer base(s) in which the log is taken.
x : array_like
The value(s) whose log base `n` is (are) required.
Returns
-------
out : ndarray or scalar
The log base `n` of the `x` value(s). If `x` was a scalar, so is
`out`, otherwise an array is returned.
Examples
--------
>>> np.set_printoptions(precision=4)
>>> np.lib.scimath.logn(2, [4, 8])
array([2., 3.])
>>> np.lib.scimath.logn(2, [-4, -8, 8])
array([2.+4.5324j, 3.+4.5324j, 3.+0.j ])
"""
x = _fix_real_lt_zero(x)
n = _fix_real_lt_zero(n)
return nx.log(x)/nx.log(n)
|
[
"def",
"logn",
"(",
"n",
",",
"x",
")",
":",
"x",
"=",
"_fix_real_lt_zero",
"(",
"x",
")",
"n",
"=",
"_fix_real_lt_zero",
"(",
"n",
")",
"return",
"nx",
".",
"log",
"(",
"x",
")",
"/",
"nx",
".",
"log",
"(",
"n",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/lib/scimath.py#L332-L364
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/dataflow.py
|
python
|
BlockInfo.make_incoming
|
(self)
|
return ret
|
Create an incoming variable (due to not enough values being
available on our stack) and request its assignment from our
incoming blocks' own stacks.
|
Create an incoming variable (due to not enough values being
available on our stack) and request its assignment from our
incoming blocks' own stacks.
|
[
"Create",
"an",
"incoming",
"variable",
"(",
"due",
"to",
"not",
"enough",
"values",
"being",
"available",
"on",
"our",
"stack",
")",
"and",
"request",
"its",
"assignment",
"from",
"our",
"incoming",
"blocks",
"own",
"stacks",
"."
] |
def make_incoming(self):
"""
Create an incoming variable (due to not enough values being
available on our stack) and request its assignment from our
incoming blocks' own stacks.
"""
assert self.incoming_blocks
ret = self.make_temp('phi')
for ib in self.incoming_blocks:
stack_index = self.stack_offset + self.stack_effect
ib.request_outgoing(self, ret, stack_index)
return ret
|
[
"def",
"make_incoming",
"(",
"self",
")",
":",
"assert",
"self",
".",
"incoming_blocks",
"ret",
"=",
"self",
".",
"make_temp",
"(",
"'phi'",
")",
"for",
"ib",
"in",
"self",
".",
"incoming_blocks",
":",
"stack_index",
"=",
"self",
".",
"stack_offset",
"+",
"self",
".",
"stack_effect",
"ib",
".",
"request_outgoing",
"(",
"self",
",",
"ret",
",",
"stack_index",
")",
"return",
"ret"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/dataflow.py#L873-L884
|
|
savoirfairelinux/jami-daemon
|
7634487e9f568ae727f2d4cffbb735d23fa0324c
|
tools/jamictrl/controller.py
|
python
|
DRingCtrl.setActiveCodecList
|
(self, account=None, codec_list='')
|
Activate given codecs on an account. If no account is provided, active account is used
|
Activate given codecs on an account. If no account is provided, active account is used
|
[
"Activate",
"given",
"codecs",
"on",
"an",
"account",
".",
"If",
"no",
"account",
"is",
"provided",
"active",
"account",
"is",
"used"
] |
def setActiveCodecList(self, account=None, codec_list=''):
"""Activate given codecs on an account. If no account is provided, active account is used"""
account = self._valid_account(account)
if self.isAccountExists(account):
codec_list = [dbus.UInt32(x) for x in codec_list.split(',')]
self.configurationmanager.setActiveCodecList(account, codec_list)
|
[
"def",
"setActiveCodecList",
"(",
"self",
",",
"account",
"=",
"None",
",",
"codec_list",
"=",
"''",
")",
":",
"account",
"=",
"self",
".",
"_valid_account",
"(",
"account",
")",
"if",
"self",
".",
"isAccountExists",
"(",
"account",
")",
":",
"codec_list",
"=",
"[",
"dbus",
".",
"UInt32",
"(",
"x",
")",
"for",
"x",
"in",
"codec_list",
".",
"split",
"(",
"','",
")",
"]",
"self",
".",
"configurationmanager",
".",
"setActiveCodecList",
"(",
"account",
",",
"codec_list",
")"
] |
https://github.com/savoirfairelinux/jami-daemon/blob/7634487e9f568ae727f2d4cffbb735d23fa0324c/tools/jamictrl/controller.py#L386-L392
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/__init__.py
|
python
|
ResourceManager._warn_unsafe_extraction_path
|
(path)
|
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
See Distribute #375 for more details.
|
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
|
[
"If",
"the",
"default",
"extraction",
"path",
"is",
"overridden",
"and",
"set",
"to",
"an",
"insecure",
"location",
"such",
"as",
"/",
"tmp",
"it",
"opens",
"up",
"an",
"opportunity",
"for",
"an",
"attacker",
"to",
"replace",
"an",
"extracted",
"file",
"with",
"an",
"unauthorized",
"payload",
".",
"Warn",
"the",
"user",
"if",
"a",
"known",
"insecure",
"location",
"is",
"used",
"."
] |
def _warn_unsafe_extraction_path(path):
"""
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
See Distribute #375 for more details.
"""
if os.name == 'nt' and not path.startswith(os.environ['windir']):
# On Windows, permissions are generally restrictive by default
# and temp directories are not writable by other users, so
# bypass the warning.
return
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = (
"%s is writable by group/others and vulnerable to attack "
"when "
"used with get_resource_filename. Consider a more secure "
"location (set with .set_extraction_path or the "
"PYTHON_EGG_CACHE environment variable)." % path
)
warnings.warn(msg, UserWarning)
|
[
"def",
"_warn_unsafe_extraction_path",
"(",
"path",
")",
":",
"if",
"os",
".",
"name",
"==",
"'nt'",
"and",
"not",
"path",
".",
"startswith",
"(",
"os",
".",
"environ",
"[",
"'windir'",
"]",
")",
":",
"# On Windows, permissions are generally restrictive by default",
"# and temp directories are not writable by other users, so",
"# bypass the warning.",
"return",
"mode",
"=",
"os",
".",
"stat",
"(",
"path",
")",
".",
"st_mode",
"if",
"mode",
"&",
"stat",
".",
"S_IWOTH",
"or",
"mode",
"&",
"stat",
".",
"S_IWGRP",
":",
"msg",
"=",
"(",
"\"%s is writable by group/others and vulnerable to attack \"",
"\"when \"",
"\"used with get_resource_filename. Consider a more secure \"",
"\"location (set with .set_extraction_path or the \"",
"\"PYTHON_EGG_CACHE environment variable).\"",
"%",
"path",
")",
"warnings",
".",
"warn",
"(",
"msg",
",",
"UserWarning",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/__init__.py#L1301-L1324
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/handlers.py
|
python
|
validate_ascii_metadata
|
(params, **kwargs)
|
Verify S3 Metadata only contains ascii characters.
From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
"Amazon S3 stores user-defined metadata in lowercase. Each name, value pair
must conform to US-ASCII when using REST and UTF-8 when using SOAP or
browser-based uploads via POST."
|
Verify S3 Metadata only contains ascii characters.
|
[
"Verify",
"S3",
"Metadata",
"only",
"contains",
"ascii",
"characters",
"."
] |
def validate_ascii_metadata(params, **kwargs):
"""Verify S3 Metadata only contains ascii characters.
From: http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingMetadata.html
"Amazon S3 stores user-defined metadata in lowercase. Each name, value pair
must conform to US-ASCII when using REST and UTF-8 when using SOAP or
browser-based uploads via POST."
"""
metadata = params.get('Metadata')
if not metadata or not isinstance(metadata, dict):
# We have to at least type check the metadata as a dict type
# because this handler is called before param validation.
# We'll go ahead and return because the param validator will
# give a descriptive error message for us.
# We might need a post-param validation event.
return
for key, value in metadata.items():
try:
key.encode('ascii')
value.encode('ascii')
except UnicodeEncodeError as e:
error_msg = (
'Non ascii characters found in S3 metadata '
'for key "%s", value: "%s". \nS3 metadata can only '
'contain ASCII characters. ' % (key, value)
)
raise ParamValidationError(
report=error_msg)
|
[
"def",
"validate_ascii_metadata",
"(",
"params",
",",
"*",
"*",
"kwargs",
")",
":",
"metadata",
"=",
"params",
".",
"get",
"(",
"'Metadata'",
")",
"if",
"not",
"metadata",
"or",
"not",
"isinstance",
"(",
"metadata",
",",
"dict",
")",
":",
"# We have to at least type check the metadata as a dict type",
"# because this handler is called before param validation.",
"# We'll go ahead and return because the param validator will",
"# give a descriptive error message for us.",
"# We might need a post-param validation event.",
"return",
"for",
"key",
",",
"value",
"in",
"metadata",
".",
"items",
"(",
")",
":",
"try",
":",
"key",
".",
"encode",
"(",
"'ascii'",
")",
"value",
".",
"encode",
"(",
"'ascii'",
")",
"except",
"UnicodeEncodeError",
"as",
"e",
":",
"error_msg",
"=",
"(",
"'Non ascii characters found in S3 metadata '",
"'for key \"%s\", value: \"%s\". \\nS3 metadata can only '",
"'contain ASCII characters. '",
"%",
"(",
"key",
",",
"value",
")",
")",
"raise",
"ParamValidationError",
"(",
"report",
"=",
"error_msg",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/botocore/handlers.py#L517-L546
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/idlelib/config.py
|
python
|
IdleConf.GetCurrentKeySet
|
(self)
|
return result
|
Return CurrentKeys with 'darwin' modifications.
|
Return CurrentKeys with 'darwin' modifications.
|
[
"Return",
"CurrentKeys",
"with",
"darwin",
"modifications",
"."
] |
def GetCurrentKeySet(self):
"Return CurrentKeys with 'darwin' modifications."
result = self.GetKeySet(self.CurrentKeys())
if sys.platform == "darwin":
# macOS (OS X) Tk variants do not support the "Alt"
# keyboard modifier. Replace it with "Option".
# TODO (Ned?): the "Option" modifier does not work properly
# for Cocoa Tk and XQuartz Tk so we should not use it
# in the default 'OSX' keyset.
for k, v in result.items():
v2 = [ x.replace('<Alt-', '<Option-') for x in v ]
if v != v2:
result[k] = v2
return result
|
[
"def",
"GetCurrentKeySet",
"(",
"self",
")",
":",
"result",
"=",
"self",
".",
"GetKeySet",
"(",
"self",
".",
"CurrentKeys",
"(",
")",
")",
"if",
"sys",
".",
"platform",
"==",
"\"darwin\"",
":",
"# macOS (OS X) Tk variants do not support the \"Alt\"",
"# keyboard modifier. Replace it with \"Option\".",
"# TODO (Ned?): the \"Option\" modifier does not work properly",
"# for Cocoa Tk and XQuartz Tk so we should not use it",
"# in the default 'OSX' keyset.",
"for",
"k",
",",
"v",
"in",
"result",
".",
"items",
"(",
")",
":",
"v2",
"=",
"[",
"x",
".",
"replace",
"(",
"'<Alt-'",
",",
"'<Option-'",
")",
"for",
"x",
"in",
"v",
"]",
"if",
"v",
"!=",
"v2",
":",
"result",
"[",
"k",
"]",
"=",
"v2",
"return",
"result"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/idlelib/config.py#L538-L553
|
|
smartdevicelink/sdl_core
|
68f082169e0a40fccd9eb0db3c83911c28870f07
|
tools/InterfaceGenerator/generator/generators/SmartFactoryBase.py
|
python
|
CodeGenerator._gen_schema_item_fill
|
(self, member, since, until, deprecated, removed)
|
Generate schema item fill code.
Generates source code that fills new schema with item.
Keyword arguments:
member -- struct member/function parameter to process.
Returns:
String with schema item fill code.
|
Generate schema item fill code.
|
[
"Generate",
"schema",
"item",
"fill",
"code",
"."
] |
def _gen_schema_item_fill(self, member, since, until, deprecated, removed):
"""Generate schema item fill code.
Generates source code that fills new schema with item.
Keyword arguments:
member -- struct member/function parameter to process.
Returns:
String with schema item fill code.
"""
self._check_member_history(member)
if (since is not None or
member.since is not None):
if member.history is not None:
return self._impl_code_item_fill_template_with_version_and_history_vector.substitute(
name=member.name,
var_name=self._gen_schema_item_var_name(member),
is_mandatory=u"true" if member.is_mandatory is True else u"false",
since=member.since if member.since is not None else since if since is not None else "",
until=member.until if member.until is not None else until if until is not None else "",
deprecated=member.deprecated if member.deprecated is not None else deprecated if deprecated is not None else u"false",
removed=member.removed if member.removed is not None else removed if removed is not None else u"false",
vector_name=member.name)
else:
return self._impl_code_item_fill_template_with_version.substitute(
name=member.name,
var_name=self._gen_schema_item_var_name(member),
is_mandatory=u"true" if member.is_mandatory is True else u"false",
since=member.since if member.since is not None else since if since is not None else "",
until=member.until if member.until is not None else until if until is not None else "",
deprecated=member.deprecated if member.deprecated is not None else deprecated if deprecated is not None else u"false",
removed=member.removed if member.removed is not None else removed if removed is not None else u"false")
else:
return self._impl_code_item_fill_template.substitute(
name=member.name,
var_name=self._gen_schema_item_var_name(member),
is_mandatory=u"true" if member.is_mandatory is True else u"false")
|
[
"def",
"_gen_schema_item_fill",
"(",
"self",
",",
"member",
",",
"since",
",",
"until",
",",
"deprecated",
",",
"removed",
")",
":",
"self",
".",
"_check_member_history",
"(",
"member",
")",
"if",
"(",
"since",
"is",
"not",
"None",
"or",
"member",
".",
"since",
"is",
"not",
"None",
")",
":",
"if",
"member",
".",
"history",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_impl_code_item_fill_template_with_version_and_history_vector",
".",
"substitute",
"(",
"name",
"=",
"member",
".",
"name",
",",
"var_name",
"=",
"self",
".",
"_gen_schema_item_var_name",
"(",
"member",
")",
",",
"is_mandatory",
"=",
"u\"true\"",
"if",
"member",
".",
"is_mandatory",
"is",
"True",
"else",
"u\"false\"",
",",
"since",
"=",
"member",
".",
"since",
"if",
"member",
".",
"since",
"is",
"not",
"None",
"else",
"since",
"if",
"since",
"is",
"not",
"None",
"else",
"\"\"",
",",
"until",
"=",
"member",
".",
"until",
"if",
"member",
".",
"until",
"is",
"not",
"None",
"else",
"until",
"if",
"until",
"is",
"not",
"None",
"else",
"\"\"",
",",
"deprecated",
"=",
"member",
".",
"deprecated",
"if",
"member",
".",
"deprecated",
"is",
"not",
"None",
"else",
"deprecated",
"if",
"deprecated",
"is",
"not",
"None",
"else",
"u\"false\"",
",",
"removed",
"=",
"member",
".",
"removed",
"if",
"member",
".",
"removed",
"is",
"not",
"None",
"else",
"removed",
"if",
"removed",
"is",
"not",
"None",
"else",
"u\"false\"",
",",
"vector_name",
"=",
"member",
".",
"name",
")",
"else",
":",
"return",
"self",
".",
"_impl_code_item_fill_template_with_version",
".",
"substitute",
"(",
"name",
"=",
"member",
".",
"name",
",",
"var_name",
"=",
"self",
".",
"_gen_schema_item_var_name",
"(",
"member",
")",
",",
"is_mandatory",
"=",
"u\"true\"",
"if",
"member",
".",
"is_mandatory",
"is",
"True",
"else",
"u\"false\"",
",",
"since",
"=",
"member",
".",
"since",
"if",
"member",
".",
"since",
"is",
"not",
"None",
"else",
"since",
"if",
"since",
"is",
"not",
"None",
"else",
"\"\"",
",",
"until",
"=",
"member",
".",
"until",
"if",
"member",
".",
"until",
"is",
"not",
"None",
"else",
"until",
"if",
"until",
"is",
"not",
"None",
"else",
"\"\"",
",",
"deprecated",
"=",
"member",
".",
"deprecated",
"if",
"member",
".",
"deprecated",
"is",
"not",
"None",
"else",
"deprecated",
"if",
"deprecated",
"is",
"not",
"None",
"else",
"u\"false\"",
",",
"removed",
"=",
"member",
".",
"removed",
"if",
"member",
".",
"removed",
"is",
"not",
"None",
"else",
"removed",
"if",
"removed",
"is",
"not",
"None",
"else",
"u\"false\"",
")",
"else",
":",
"return",
"self",
".",
"_impl_code_item_fill_template",
".",
"substitute",
"(",
"name",
"=",
"member",
".",
"name",
",",
"var_name",
"=",
"self",
".",
"_gen_schema_item_var_name",
"(",
"member",
")",
",",
"is_mandatory",
"=",
"u\"true\"",
"if",
"member",
".",
"is_mandatory",
"is",
"True",
"else",
"u\"false\"",
")"
] |
https://github.com/smartdevicelink/sdl_core/blob/68f082169e0a40fccd9eb0db3c83911c28870f07/tools/InterfaceGenerator/generator/generators/SmartFactoryBase.py#L1072-L1111
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/logging/handlers.py
|
python
|
SMTPHandler.emit
|
(self, record)
|
Emit a record.
Format the record and send it to the specified addressees.
|
Emit a record.
|
[
"Emit",
"a",
"record",
"."
] |
def emit(self, record):
"""
Emit a record.
Format the record and send it to the specified addressees.
"""
try:
import smtplib
from email.utils import formatdate
port = self.mailport
if not port:
port = smtplib.SMTP_PORT
smtp = smtplib.SMTP(self.mailhost, port, timeout=self._timeout)
msg = self.format(record)
msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
self.fromaddr,
",".join(self.toaddrs),
self.getSubject(record),
formatdate(), msg)
if self.username:
if self.secure is not None:
smtp.ehlo()
smtp.starttls(*self.secure)
smtp.ehlo()
smtp.login(self.username, self.password)
smtp.sendmail(self.fromaddr, self.toaddrs, msg)
smtp.quit()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
|
[
"def",
"emit",
"(",
"self",
",",
"record",
")",
":",
"try",
":",
"import",
"smtplib",
"from",
"email",
".",
"utils",
"import",
"formatdate",
"port",
"=",
"self",
".",
"mailport",
"if",
"not",
"port",
":",
"port",
"=",
"smtplib",
".",
"SMTP_PORT",
"smtp",
"=",
"smtplib",
".",
"SMTP",
"(",
"self",
".",
"mailhost",
",",
"port",
",",
"timeout",
"=",
"self",
".",
"_timeout",
")",
"msg",
"=",
"self",
".",
"format",
"(",
"record",
")",
"msg",
"=",
"\"From: %s\\r\\nTo: %s\\r\\nSubject: %s\\r\\nDate: %s\\r\\n\\r\\n%s\"",
"%",
"(",
"self",
".",
"fromaddr",
",",
"\",\"",
".",
"join",
"(",
"self",
".",
"toaddrs",
")",
",",
"self",
".",
"getSubject",
"(",
"record",
")",
",",
"formatdate",
"(",
")",
",",
"msg",
")",
"if",
"self",
".",
"username",
":",
"if",
"self",
".",
"secure",
"is",
"not",
"None",
":",
"smtp",
".",
"ehlo",
"(",
")",
"smtp",
".",
"starttls",
"(",
"*",
"self",
".",
"secure",
")",
"smtp",
".",
"ehlo",
"(",
")",
"smtp",
".",
"login",
"(",
"self",
".",
"username",
",",
"self",
".",
"password",
")",
"smtp",
".",
"sendmail",
"(",
"self",
".",
"fromaddr",
",",
"self",
".",
"toaddrs",
",",
"msg",
")",
"smtp",
".",
"quit",
"(",
")",
"except",
"(",
"KeyboardInterrupt",
",",
"SystemExit",
")",
":",
"raise",
"except",
":",
"self",
".",
"handleError",
"(",
"record",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/logging/handlers.py#L916-L946
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_misc.py
|
python
|
DateTime_IsLeapYear
|
(*args, **kwargs)
|
return _misc_.DateTime_IsLeapYear(*args, **kwargs)
|
DateTime_IsLeapYear(int year=Inv_Year, int cal=Gregorian) -> bool
|
DateTime_IsLeapYear(int year=Inv_Year, int cal=Gregorian) -> bool
|
[
"DateTime_IsLeapYear",
"(",
"int",
"year",
"=",
"Inv_Year",
"int",
"cal",
"=",
"Gregorian",
")",
"-",
">",
"bool"
] |
def DateTime_IsLeapYear(*args, **kwargs):
"""DateTime_IsLeapYear(int year=Inv_Year, int cal=Gregorian) -> bool"""
return _misc_.DateTime_IsLeapYear(*args, **kwargs)
|
[
"def",
"DateTime_IsLeapYear",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"DateTime_IsLeapYear",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_misc.py#L4249-L4251
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
samples/pySketch/pySketch.py
|
python
|
DrawingFrame.deselectAll
|
(self)
|
Deselect every DrawingObject in our document.
|
Deselect every DrawingObject in our document.
|
[
"Deselect",
"every",
"DrawingObject",
"in",
"our",
"document",
"."
] |
def deselectAll(self):
""" Deselect every DrawingObject in our document.
"""
self.selection = []
self.requestRedraw()
self._adjustMenus()
|
[
"def",
"deselectAll",
"(",
"self",
")",
":",
"self",
".",
"selection",
"=",
"[",
"]",
"self",
".",
"requestRedraw",
"(",
")",
"self",
".",
"_adjustMenus",
"(",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/samples/pySketch/pySketch.py#L1150-L1155
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/prompt-toolkit/py2/prompt_toolkit/document.py
|
python
|
Document.get_cursor_right_position
|
(self, count=1)
|
return min(count, len(self.current_line_after_cursor))
|
Relative position for cursor_right.
|
Relative position for cursor_right.
|
[
"Relative",
"position",
"for",
"cursor_right",
"."
] |
def get_cursor_right_position(self, count=1):
"""
Relative position for cursor_right.
"""
if count < 0:
return self.get_cursor_left_position(-count)
return min(count, len(self.current_line_after_cursor))
|
[
"def",
"get_cursor_right_position",
"(",
"self",
",",
"count",
"=",
"1",
")",
":",
"if",
"count",
"<",
"0",
":",
"return",
"self",
".",
"get_cursor_left_position",
"(",
"-",
"count",
")",
"return",
"min",
"(",
"count",
",",
"len",
"(",
"self",
".",
"current_line_after_cursor",
")",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py2/prompt_toolkit/document.py#L614-L621
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2class.py
|
python
|
xmlNode.freeNode
|
(self)
|
Free a node, this is a recursive behaviour, all the
children are freed too. This doesn't unlink the child from
the list, use xmlUnlinkNode() first.
|
Free a node, this is a recursive behaviour, all the
children are freed too. This doesn't unlink the child from
the list, use xmlUnlinkNode() first.
|
[
"Free",
"a",
"node",
"this",
"is",
"a",
"recursive",
"behaviour",
"all",
"the",
"children",
"are",
"freed",
"too",
".",
"This",
"doesn",
"t",
"unlink",
"the",
"child",
"from",
"the",
"list",
"use",
"xmlUnlinkNode",
"()",
"first",
"."
] |
def freeNode(self):
"""Free a node, this is a recursive behaviour, all the
children are freed too. This doesn't unlink the child from
the list, use xmlUnlinkNode() first. """
libxml2mod.xmlFreeNode(self._o)
|
[
"def",
"freeNode",
"(",
"self",
")",
":",
"libxml2mod",
".",
"xmlFreeNode",
"(",
"self",
".",
"_o",
")"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L2439-L2443
|
||
kamyu104/LeetCode-Solutions
|
77605708a927ea3b85aee5a479db733938c7c211
|
Python/find-all-duplicates-in-an-array.py
|
python
|
Solution.findDuplicates
|
(self, nums)
|
return result
|
:type nums: List[int]
:rtype: List[int]
|
:type nums: List[int]
:rtype: List[int]
|
[
":",
"type",
"nums",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"List",
"[",
"int",
"]"
] |
def findDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
result = []
for i in nums:
if nums[abs(i)-1] < 0:
result.append(abs(i))
else:
nums[abs(i)-1] *= -1
return result
|
[
"def",
"findDuplicates",
"(",
"self",
",",
"nums",
")",
":",
"result",
"=",
"[",
"]",
"for",
"i",
"in",
"nums",
":",
"if",
"nums",
"[",
"abs",
"(",
"i",
")",
"-",
"1",
"]",
"<",
"0",
":",
"result",
".",
"append",
"(",
"abs",
"(",
"i",
")",
")",
"else",
":",
"nums",
"[",
"abs",
"(",
"i",
")",
"-",
"1",
"]",
"*=",
"-",
"1",
"return",
"result"
] |
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/find-all-duplicates-in-an-array.py#L5-L16
|
|
htcondor/htcondor
|
4829724575176d1d6c936e4693dfd78a728569b0
|
bindings/python/htcondor/htchirp/htchirp.py
|
python
|
HTChirp._write
|
(
self, fd, data, length, offset=None, stride_length=None, stride_skip=None
)
|
return wb
|
Write to a file on the Chirp server
:param fd: File descriptor
:param data: Data to write
:param length: Number of bytes to write
:param offset: Skip this many bytes when writing
:param stride_length: Write this many bytes every stride_skip bytes
:param stride_skip: Skip this many bytes between writes
:returns: Number of bytes written
|
Write to a file on the Chirp server
|
[
"Write",
"to",
"a",
"file",
"on",
"the",
"Chirp",
"server"
] |
def _write(
self, fd, data, length, offset=None, stride_length=None, stride_skip=None
):
"""Write to a file on the Chirp server
:param fd: File descriptor
:param data: Data to write
:param length: Number of bytes to write
:param offset: Skip this many bytes when writing
:param stride_length: Write this many bytes every stride_skip bytes
:param stride_skip: Skip this many bytes between writes
:returns: Number of bytes written
"""
# check that client is connected
self._check_connection()
if offset is None and (stride_length, stride_skip) != (None, None):
offset = 0 # assume offset is 0 if stride given but not offset
if (offset, stride_length, stride_skip) == (None, None, None):
# write
self._simple_command(
"write {0} {1}\n".format(int(fd), int(length)), get_response=False
)
elif (offset != None) and (stride_length, stride_skip) == (None, None):
# pwrite
self._simple_command(
"pwrite {0} {1} {2}\n".format(int(fd), int(length), int(offset)),
get_response=False,
)
elif (stride_length, stride_skip) != (None, None):
# swrite
wb = self._simple_command(
"swrite {0} {1} {2} {3} {4}\n".format(
int(fd),
int(length),
int(offset),
int(stride_length),
int(stride_skip),
),
get_response=False,
)
else:
raise self.InvalidRequest(
"Both stride_length and stride_skip must be specified"
)
wfd = self.socket.makefile("wb") # open socket as a file object
wfd.write(data) # write data
wfd.close() # close socket file object
wb = int(self._simple_response()) # get bytes written
return wb
|
[
"def",
"_write",
"(",
"self",
",",
"fd",
",",
"data",
",",
"length",
",",
"offset",
"=",
"None",
",",
"stride_length",
"=",
"None",
",",
"stride_skip",
"=",
"None",
")",
":",
"# check that client is connected",
"self",
".",
"_check_connection",
"(",
")",
"if",
"offset",
"is",
"None",
"and",
"(",
"stride_length",
",",
"stride_skip",
")",
"!=",
"(",
"None",
",",
"None",
")",
":",
"offset",
"=",
"0",
"# assume offset is 0 if stride given but not offset",
"if",
"(",
"offset",
",",
"stride_length",
",",
"stride_skip",
")",
"==",
"(",
"None",
",",
"None",
",",
"None",
")",
":",
"# write",
"self",
".",
"_simple_command",
"(",
"\"write {0} {1}\\n\"",
".",
"format",
"(",
"int",
"(",
"fd",
")",
",",
"int",
"(",
"length",
")",
")",
",",
"get_response",
"=",
"False",
")",
"elif",
"(",
"offset",
"!=",
"None",
")",
"and",
"(",
"stride_length",
",",
"stride_skip",
")",
"==",
"(",
"None",
",",
"None",
")",
":",
"# pwrite",
"self",
".",
"_simple_command",
"(",
"\"pwrite {0} {1} {2}\\n\"",
".",
"format",
"(",
"int",
"(",
"fd",
")",
",",
"int",
"(",
"length",
")",
",",
"int",
"(",
"offset",
")",
")",
",",
"get_response",
"=",
"False",
",",
")",
"elif",
"(",
"stride_length",
",",
"stride_skip",
")",
"!=",
"(",
"None",
",",
"None",
")",
":",
"# swrite",
"wb",
"=",
"self",
".",
"_simple_command",
"(",
"\"swrite {0} {1} {2} {3} {4}\\n\"",
".",
"format",
"(",
"int",
"(",
"fd",
")",
",",
"int",
"(",
"length",
")",
",",
"int",
"(",
"offset",
")",
",",
"int",
"(",
"stride_length",
")",
",",
"int",
"(",
"stride_skip",
")",
",",
")",
",",
"get_response",
"=",
"False",
",",
")",
"else",
":",
"raise",
"self",
".",
"InvalidRequest",
"(",
"\"Both stride_length and stride_skip must be specified\"",
")",
"wfd",
"=",
"self",
".",
"socket",
".",
"makefile",
"(",
"\"wb\"",
")",
"# open socket as a file object",
"wfd",
".",
"write",
"(",
"data",
")",
"# write data",
"wfd",
".",
"close",
"(",
")",
"# close socket file object",
"wb",
"=",
"int",
"(",
"self",
".",
"_simple_response",
"(",
")",
")",
"# get bytes written",
"return",
"wb"
] |
https://github.com/htcondor/htcondor/blob/4829724575176d1d6c936e4693dfd78a728569b0/bindings/python/htcondor/htchirp/htchirp.py#L460-L517
|
|
LiquidPlayer/LiquidCore
|
9405979363f2353ac9a71ad8ab59685dd7f919c9
|
deps/node-10.15.3/deps/v8/third_party/jinja2/idtracking.py
|
python
|
FrameSymbolVisitor.visit_Block
|
(self, node, **kwargs)
|
Stop visiting at blocks.
|
Stop visiting at blocks.
|
[
"Stop",
"visiting",
"at",
"blocks",
"."
] |
def visit_Block(self, node, **kwargs):
"""Stop visiting at blocks."""
|
[
"def",
"visit_Block",
"(",
"self",
",",
"node",
",",
"*",
"*",
"kwargs",
")",
":"
] |
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/deps/v8/third_party/jinja2/idtracking.py#L282-L283
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/scipy/optimize/linesearch.py
|
python
|
scalar_search_wolfe1
|
(phi, derphi, phi0=None, old_phi0=None, derphi0=None,
c1=1e-4, c2=0.9,
amax=50, amin=1e-8, xtol=1e-14)
|
return stp, phi1, phi0
|
Scalar function search for alpha that satisfies strong Wolfe conditions
alpha > 0 is assumed to be a descent direction.
Parameters
----------
phi : callable phi(alpha)
Function at point `alpha`
derphi : callable dphi(alpha)
Derivative `d phi(alpha)/ds`. Returns a scalar.
phi0 : float, optional
Value of `f` at 0
old_phi0 : float, optional
Value of `f` at the previous point
derphi0 : float, optional
Value `derphi` at 0
c1, c2 : float, optional
Wolfe parameters
amax, amin : float, optional
Maximum and minimum step size
xtol : float, optional
Relative tolerance for an acceptable step.
Returns
-------
alpha : float
Step size, or None if no suitable step was found
phi : float
Value of `phi` at the new point `alpha`
phi0 : float
Value of `phi` at `alpha=0`
Notes
-----
Uses routine DCSRCH from MINPACK.
|
Scalar function search for alpha that satisfies strong Wolfe conditions
|
[
"Scalar",
"function",
"search",
"for",
"alpha",
"that",
"satisfies",
"strong",
"Wolfe",
"conditions"
] |
def scalar_search_wolfe1(phi, derphi, phi0=None, old_phi0=None, derphi0=None,
c1=1e-4, c2=0.9,
amax=50, amin=1e-8, xtol=1e-14):
"""
Scalar function search for alpha that satisfies strong Wolfe conditions
alpha > 0 is assumed to be a descent direction.
Parameters
----------
phi : callable phi(alpha)
Function at point `alpha`
derphi : callable dphi(alpha)
Derivative `d phi(alpha)/ds`. Returns a scalar.
phi0 : float, optional
Value of `f` at 0
old_phi0 : float, optional
Value of `f` at the previous point
derphi0 : float, optional
Value `derphi` at 0
c1, c2 : float, optional
Wolfe parameters
amax, amin : float, optional
Maximum and minimum step size
xtol : float, optional
Relative tolerance for an acceptable step.
Returns
-------
alpha : float
Step size, or None if no suitable step was found
phi : float
Value of `phi` at the new point `alpha`
phi0 : float
Value of `phi` at `alpha=0`
Notes
-----
Uses routine DCSRCH from MINPACK.
"""
if phi0 is None:
phi0 = phi(0.)
if derphi0 is None:
derphi0 = derphi(0.)
if old_phi0 is not None and derphi0 != 0:
alpha1 = min(1.0, 1.01*2*(phi0 - old_phi0)/derphi0)
if alpha1 < 0:
alpha1 = 1.0
else:
alpha1 = 1.0
phi1 = phi0
derphi1 = derphi0
isave = np.zeros((2,), np.intc)
dsave = np.zeros((13,), float)
task = b'START'
maxiter = 100
for i in xrange(maxiter):
stp, phi1, derphi1, task = minpack2.dcsrch(alpha1, phi1, derphi1,
c1, c2, xtol, task,
amin, amax, isave, dsave)
if task[:2] == b'FG':
alpha1 = stp
phi1 = phi(stp)
derphi1 = derphi(stp)
else:
break
else:
# maxiter reached, the line search did not converge
stp = None
if task[:5] == b'ERROR' or task[:4] == b'WARN':
stp = None # failed
return stp, phi1, phi0
|
[
"def",
"scalar_search_wolfe1",
"(",
"phi",
",",
"derphi",
",",
"phi0",
"=",
"None",
",",
"old_phi0",
"=",
"None",
",",
"derphi0",
"=",
"None",
",",
"c1",
"=",
"1e-4",
",",
"c2",
"=",
"0.9",
",",
"amax",
"=",
"50",
",",
"amin",
"=",
"1e-8",
",",
"xtol",
"=",
"1e-14",
")",
":",
"if",
"phi0",
"is",
"None",
":",
"phi0",
"=",
"phi",
"(",
"0.",
")",
"if",
"derphi0",
"is",
"None",
":",
"derphi0",
"=",
"derphi",
"(",
"0.",
")",
"if",
"old_phi0",
"is",
"not",
"None",
"and",
"derphi0",
"!=",
"0",
":",
"alpha1",
"=",
"min",
"(",
"1.0",
",",
"1.01",
"*",
"2",
"*",
"(",
"phi0",
"-",
"old_phi0",
")",
"/",
"derphi0",
")",
"if",
"alpha1",
"<",
"0",
":",
"alpha1",
"=",
"1.0",
"else",
":",
"alpha1",
"=",
"1.0",
"phi1",
"=",
"phi0",
"derphi1",
"=",
"derphi0",
"isave",
"=",
"np",
".",
"zeros",
"(",
"(",
"2",
",",
")",
",",
"np",
".",
"intc",
")",
"dsave",
"=",
"np",
".",
"zeros",
"(",
"(",
"13",
",",
")",
",",
"float",
")",
"task",
"=",
"b'START'",
"maxiter",
"=",
"100",
"for",
"i",
"in",
"xrange",
"(",
"maxiter",
")",
":",
"stp",
",",
"phi1",
",",
"derphi1",
",",
"task",
"=",
"minpack2",
".",
"dcsrch",
"(",
"alpha1",
",",
"phi1",
",",
"derphi1",
",",
"c1",
",",
"c2",
",",
"xtol",
",",
"task",
",",
"amin",
",",
"amax",
",",
"isave",
",",
"dsave",
")",
"if",
"task",
"[",
":",
"2",
"]",
"==",
"b'FG'",
":",
"alpha1",
"=",
"stp",
"phi1",
"=",
"phi",
"(",
"stp",
")",
"derphi1",
"=",
"derphi",
"(",
"stp",
")",
"else",
":",
"break",
"else",
":",
"# maxiter reached, the line search did not converge",
"stp",
"=",
"None",
"if",
"task",
"[",
":",
"5",
"]",
"==",
"b'ERROR'",
"or",
"task",
"[",
":",
"4",
"]",
"==",
"b'WARN'",
":",
"stp",
"=",
"None",
"# failed",
"return",
"stp",
",",
"phi1",
",",
"phi0"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/optimize/linesearch.py#L106-L185
|
|
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/contrib/learn/python/learn/models.py
|
python
|
linear_regression
|
(x, y, init_mean=None, init_stddev=1.0)
|
Creates linear regression TensorFlow subgraph.
Args:
x: tensor or placeholder for input features.
y: tensor or placeholder for target.
init_mean: the mean value to use for initialization.
init_stddev: the standard devation to use for initialization.
Returns:
Predictions and loss tensors.
Side effects:
The variables linear_regression.weights and linear_regression.bias are
initialized as follows. If init_mean is not None, then initialization
will be done using a random normal initializer with the given init_mean
and init_stddv. (These may be set to 0.0 each if a zero initialization
is desirable for convex use cases.) If init_mean is None, then the
uniform_unit_scaling_initialzer will be used.
|
Creates linear regression TensorFlow subgraph.
|
[
"Creates",
"linear",
"regression",
"TensorFlow",
"subgraph",
"."
] |
def linear_regression(x, y, init_mean=None, init_stddev=1.0):
"""Creates linear regression TensorFlow subgraph.
Args:
x: tensor or placeholder for input features.
y: tensor or placeholder for target.
init_mean: the mean value to use for initialization.
init_stddev: the standard devation to use for initialization.
Returns:
Predictions and loss tensors.
Side effects:
The variables linear_regression.weights and linear_regression.bias are
initialized as follows. If init_mean is not None, then initialization
will be done using a random normal initializer with the given init_mean
and init_stddv. (These may be set to 0.0 each if a zero initialization
is desirable for convex use cases.) If init_mean is None, then the
uniform_unit_scaling_initialzer will be used.
"""
with vs.variable_scope('linear_regression'):
logging_ops.histogram_summary('linear_regression.x', x)
logging_ops.histogram_summary('linear_regression.y', y)
dtype = x.dtype.base_dtype
y_shape = y.get_shape()
if len(y_shape) == 1:
output_shape = 1
else:
output_shape = y_shape[1]
# Set up the requested initialization.
if init_mean is None:
weights = vs.get_variable(
'weights', [x.get_shape()[1], output_shape], dtype=dtype)
bias = vs.get_variable('bias', [output_shape], dtype=dtype)
else:
weights = vs.get_variable('weights', [x.get_shape()[1], output_shape],
initializer=init_ops.random_normal_initializer(
init_mean, init_stddev, dtype=dtype),
dtype=dtype)
bias = vs.get_variable('bias', [output_shape],
initializer=init_ops.random_normal_initializer(
init_mean, init_stddev, dtype=dtype),
dtype=dtype)
logging_ops.histogram_summary('linear_regression.weights', weights)
logging_ops.histogram_summary('linear_regression.bias', bias)
return losses_ops.mean_squared_error_regressor(x, y, weights, bias)
|
[
"def",
"linear_regression",
"(",
"x",
",",
"y",
",",
"init_mean",
"=",
"None",
",",
"init_stddev",
"=",
"1.0",
")",
":",
"with",
"vs",
".",
"variable_scope",
"(",
"'linear_regression'",
")",
":",
"logging_ops",
".",
"histogram_summary",
"(",
"'linear_regression.x'",
",",
"x",
")",
"logging_ops",
".",
"histogram_summary",
"(",
"'linear_regression.y'",
",",
"y",
")",
"dtype",
"=",
"x",
".",
"dtype",
".",
"base_dtype",
"y_shape",
"=",
"y",
".",
"get_shape",
"(",
")",
"if",
"len",
"(",
"y_shape",
")",
"==",
"1",
":",
"output_shape",
"=",
"1",
"else",
":",
"output_shape",
"=",
"y_shape",
"[",
"1",
"]",
"# Set up the requested initialization.",
"if",
"init_mean",
"is",
"None",
":",
"weights",
"=",
"vs",
".",
"get_variable",
"(",
"'weights'",
",",
"[",
"x",
".",
"get_shape",
"(",
")",
"[",
"1",
"]",
",",
"output_shape",
"]",
",",
"dtype",
"=",
"dtype",
")",
"bias",
"=",
"vs",
".",
"get_variable",
"(",
"'bias'",
",",
"[",
"output_shape",
"]",
",",
"dtype",
"=",
"dtype",
")",
"else",
":",
"weights",
"=",
"vs",
".",
"get_variable",
"(",
"'weights'",
",",
"[",
"x",
".",
"get_shape",
"(",
")",
"[",
"1",
"]",
",",
"output_shape",
"]",
",",
"initializer",
"=",
"init_ops",
".",
"random_normal_initializer",
"(",
"init_mean",
",",
"init_stddev",
",",
"dtype",
"=",
"dtype",
")",
",",
"dtype",
"=",
"dtype",
")",
"bias",
"=",
"vs",
".",
"get_variable",
"(",
"'bias'",
",",
"[",
"output_shape",
"]",
",",
"initializer",
"=",
"init_ops",
".",
"random_normal_initializer",
"(",
"init_mean",
",",
"init_stddev",
",",
"dtype",
"=",
"dtype",
")",
",",
"dtype",
"=",
"dtype",
")",
"logging_ops",
".",
"histogram_summary",
"(",
"'linear_regression.weights'",
",",
"weights",
")",
"logging_ops",
".",
"histogram_summary",
"(",
"'linear_regression.bias'",
",",
"bias",
")",
"return",
"losses_ops",
".",
"mean_squared_error_regressor",
"(",
"x",
",",
"y",
",",
"weights",
",",
"bias",
")"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/learn/python/learn/models.py#L61-L106
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pyio.py
|
python
|
IOBase.__enter__
|
(self)
|
return self
|
Context management protocol. Returns self (an instance of IOBase).
|
Context management protocol. Returns self (an instance of IOBase).
|
[
"Context",
"management",
"protocol",
".",
"Returns",
"self",
"(",
"an",
"instance",
"of",
"IOBase",
")",
"."
] |
def __enter__(self): # That's a forward reference
"""Context management protocol. Returns self (an instance of IOBase)."""
self._checkClosed()
return self
|
[
"def",
"__enter__",
"(",
"self",
")",
":",
"# That's a forward reference",
"self",
".",
"_checkClosed",
"(",
")",
"return",
"self"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pyio.py#L448-L451
|
|
aimerykong/Low-Rank-Bilinear-Pooling
|
487eb2c857fd9c95357a5166b0c15ad0fe135b28
|
caffe-20160312/scripts/cpp_lint.py
|
python
|
FileInfo.Extension
|
(self)
|
return self.Split()[2]
|
File extension - text following the final period.
|
File extension - text following the final period.
|
[
"File",
"extension",
"-",
"text",
"following",
"the",
"final",
"period",
"."
] |
def Extension(self):
"""File extension - text following the final period."""
return self.Split()[2]
|
[
"def",
"Extension",
"(",
"self",
")",
":",
"return",
"self",
".",
"Split",
"(",
")",
"[",
"2",
"]"
] |
https://github.com/aimerykong/Low-Rank-Bilinear-Pooling/blob/487eb2c857fd9c95357a5166b0c15ad0fe135b28/caffe-20160312/scripts/cpp_lint.py#L948-L950
|
|
hpi-xnor/BMXNet-v2
|
af2b1859eafc5c721b1397cef02f946aaf2ce20d
|
example/gluon/house_prices/kaggle_k_fold_cross_validation.py
|
python
|
k_fold_cross_valid
|
(k, epochs, verbose_epoch, X_train, y_train,
learning_rate, weight_decay, batch_size)
|
return train_loss_sum / k, test_loss_sum / k
|
Conducts k-fold cross validation for the model.
|
Conducts k-fold cross validation for the model.
|
[
"Conducts",
"k",
"-",
"fold",
"cross",
"validation",
"for",
"the",
"model",
"."
] |
def k_fold_cross_valid(k, epochs, verbose_epoch, X_train, y_train,
learning_rate, weight_decay, batch_size):
"""Conducts k-fold cross validation for the model."""
assert k > 1
fold_size = X_train.shape[0] // k
train_loss_sum = 0.0
test_loss_sum = 0.0
for test_idx in range(k):
X_val_test = X_train[test_idx * fold_size: (test_idx + 1) *
fold_size, :]
y_val_test = y_train[test_idx * fold_size: (test_idx + 1) * fold_size]
val_train_defined = False
for i in range(k):
if i != test_idx:
X_cur_fold = X_train[i * fold_size: (i + 1) * fold_size, :]
y_cur_fold = y_train[i * fold_size: (i + 1) * fold_size]
if not val_train_defined:
X_val_train = X_cur_fold
y_val_train = y_cur_fold
val_train_defined = True
else:
X_val_train = nd.concat(X_val_train, X_cur_fold, dim=0)
y_val_train = nd.concat(y_val_train, y_cur_fold, dim=0)
net = get_net()
train_loss = train(net, X_val_train, y_val_train, epochs, verbose_epoch,
learning_rate, weight_decay, batch_size)
train_loss_sum += train_loss
test_loss = get_rmse_log(net, X_val_test, y_val_test)
print("Test loss: %f" % test_loss)
test_loss_sum += test_loss
return train_loss_sum / k, test_loss_sum / k
|
[
"def",
"k_fold_cross_valid",
"(",
"k",
",",
"epochs",
",",
"verbose_epoch",
",",
"X_train",
",",
"y_train",
",",
"learning_rate",
",",
"weight_decay",
",",
"batch_size",
")",
":",
"assert",
"k",
">",
"1",
"fold_size",
"=",
"X_train",
".",
"shape",
"[",
"0",
"]",
"//",
"k",
"train_loss_sum",
"=",
"0.0",
"test_loss_sum",
"=",
"0.0",
"for",
"test_idx",
"in",
"range",
"(",
"k",
")",
":",
"X_val_test",
"=",
"X_train",
"[",
"test_idx",
"*",
"fold_size",
":",
"(",
"test_idx",
"+",
"1",
")",
"*",
"fold_size",
",",
":",
"]",
"y_val_test",
"=",
"y_train",
"[",
"test_idx",
"*",
"fold_size",
":",
"(",
"test_idx",
"+",
"1",
")",
"*",
"fold_size",
"]",
"val_train_defined",
"=",
"False",
"for",
"i",
"in",
"range",
"(",
"k",
")",
":",
"if",
"i",
"!=",
"test_idx",
":",
"X_cur_fold",
"=",
"X_train",
"[",
"i",
"*",
"fold_size",
":",
"(",
"i",
"+",
"1",
")",
"*",
"fold_size",
",",
":",
"]",
"y_cur_fold",
"=",
"y_train",
"[",
"i",
"*",
"fold_size",
":",
"(",
"i",
"+",
"1",
")",
"*",
"fold_size",
"]",
"if",
"not",
"val_train_defined",
":",
"X_val_train",
"=",
"X_cur_fold",
"y_val_train",
"=",
"y_cur_fold",
"val_train_defined",
"=",
"True",
"else",
":",
"X_val_train",
"=",
"nd",
".",
"concat",
"(",
"X_val_train",
",",
"X_cur_fold",
",",
"dim",
"=",
"0",
")",
"y_val_train",
"=",
"nd",
".",
"concat",
"(",
"y_val_train",
",",
"y_cur_fold",
",",
"dim",
"=",
"0",
")",
"net",
"=",
"get_net",
"(",
")",
"train_loss",
"=",
"train",
"(",
"net",
",",
"X_val_train",
",",
"y_val_train",
",",
"epochs",
",",
"verbose_epoch",
",",
"learning_rate",
",",
"weight_decay",
",",
"batch_size",
")",
"train_loss_sum",
"+=",
"train_loss",
"test_loss",
"=",
"get_rmse_log",
"(",
"net",
",",
"X_val_test",
",",
"y_val_test",
")",
"print",
"(",
"\"Test loss: %f\"",
"%",
"test_loss",
")",
"test_loss_sum",
"+=",
"test_loss",
"return",
"train_loss_sum",
"/",
"k",
",",
"test_loss_sum",
"/",
"k"
] |
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/example/gluon/house_prices/kaggle_k_fold_cross_validation.py#L104-L135
|
|
MhLiao/TextBoxes_plusplus
|
39d4898de1504c53a2ed3d67966a57b3595836d0
|
python/caffe/io.py
|
python
|
Transformer.set_mean
|
(self, in_, mean)
|
Set the mean to subtract for centering the data.
Parameters
----------
in_ : which input to assign this mean.
mean : mean ndarray (input dimensional or broadcastable)
|
Set the mean to subtract for centering the data.
|
[
"Set",
"the",
"mean",
"to",
"subtract",
"for",
"centering",
"the",
"data",
"."
] |
def set_mean(self, in_, mean):
"""
Set the mean to subtract for centering the data.
Parameters
----------
in_ : which input to assign this mean.
mean : mean ndarray (input dimensional or broadcastable)
"""
self.__check_input(in_)
ms = mean.shape
if mean.ndim == 1:
# broadcast channels
if ms[0] != self.inputs[in_][1]:
raise ValueError('Mean channels incompatible with input.')
mean = mean[:, np.newaxis, np.newaxis]
else:
# elementwise mean
if len(ms) == 2:
ms = (1,) + ms
if len(ms) != 3:
raise ValueError('Mean shape invalid')
if ms != self.inputs[in_][1:]:
raise ValueError('Mean shape incompatible with input shape.')
self.mean[in_] = mean
|
[
"def",
"set_mean",
"(",
"self",
",",
"in_",
",",
"mean",
")",
":",
"self",
".",
"__check_input",
"(",
"in_",
")",
"ms",
"=",
"mean",
".",
"shape",
"if",
"mean",
".",
"ndim",
"==",
"1",
":",
"# broadcast channels",
"if",
"ms",
"[",
"0",
"]",
"!=",
"self",
".",
"inputs",
"[",
"in_",
"]",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"'Mean channels incompatible with input.'",
")",
"mean",
"=",
"mean",
"[",
":",
",",
"np",
".",
"newaxis",
",",
"np",
".",
"newaxis",
"]",
"else",
":",
"# elementwise mean",
"if",
"len",
"(",
"ms",
")",
"==",
"2",
":",
"ms",
"=",
"(",
"1",
",",
")",
"+",
"ms",
"if",
"len",
"(",
"ms",
")",
"!=",
"3",
":",
"raise",
"ValueError",
"(",
"'Mean shape invalid'",
")",
"if",
"ms",
"!=",
"self",
".",
"inputs",
"[",
"in_",
"]",
"[",
"1",
":",
"]",
":",
"raise",
"ValueError",
"(",
"'Mean shape incompatible with input shape.'",
")",
"self",
".",
"mean",
"[",
"in_",
"]",
"=",
"mean"
] |
https://github.com/MhLiao/TextBoxes_plusplus/blob/39d4898de1504c53a2ed3d67966a57b3595836d0/python/caffe/io.py#L236-L260
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/linalg.py
|
python
|
_solve_compute_return
|
(b, bcpy)
|
Extract 'x' (the solution) from the 'bcpy' scratch space.
Note 'b' is only used to check the system input dimension...
|
Extract 'x' (the solution) from the 'bcpy' scratch space.
Note 'b' is only used to check the system input dimension...
|
[
"Extract",
"x",
"(",
"the",
"solution",
")",
"from",
"the",
"bcpy",
"scratch",
"space",
".",
"Note",
"b",
"is",
"only",
"used",
"to",
"check",
"the",
"system",
"input",
"dimension",
"..."
] |
def _solve_compute_return(b, bcpy):
"""
Extract 'x' (the solution) from the 'bcpy' scratch space.
Note 'b' is only used to check the system input dimension...
"""
raise NotImplementedError
|
[
"def",
"_solve_compute_return",
"(",
"b",
",",
"bcpy",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/targets/linalg.py#L1670-L1675
|
||
miyosuda/TensorFlowAndroidDemo
|
35903e0221aa5f109ea2dbef27f20b52e317f42d
|
jni-build/jni/include/tensorflow/contrib/lookup/lookup_ops.py
|
python
|
MutableHashTable.insert
|
(self, keys, values, name=None)
|
return op
|
Associates `keys` with `values`.
Args:
keys: Keys to insert. Can be a tensor of any shape. Must match the
table's key type.
values: Values to be associated with keys. Must be a tensor of the same
shape as `keys` and match the table's value type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` or `values` doesn't match the table data
types.
|
Associates `keys` with `values`.
|
[
"Associates",
"keys",
"with",
"values",
"."
] |
def insert(self, keys, values, name=None):
"""Associates `keys` with `values`.
Args:
keys: Keys to insert. Can be a tensor of any shape. Must match the
table's key type.
values: Values to be associated with keys. Must be a tensor of the same
shape as `keys` and match the table's value type.
name: A name for the operation (optional).
Returns:
The created Operation.
Raises:
TypeError: when `keys` or `values` doesn't match the table data
types.
"""
self._check_table_dtypes(keys.dtype, values.dtype)
with ops.op_scope([self._table_ref, keys, values], name,
"%s_lookup_table_insert" % self._name) as name:
# pylint: disable=protected-access
op = gen_data_flow_ops._lookup_table_insert(
self._table_ref, keys, values, name=name)
# pylint: enable=protected-access
return op
|
[
"def",
"insert",
"(",
"self",
",",
"keys",
",",
"values",
",",
"name",
"=",
"None",
")",
":",
"self",
".",
"_check_table_dtypes",
"(",
"keys",
".",
"dtype",
",",
"values",
".",
"dtype",
")",
"with",
"ops",
".",
"op_scope",
"(",
"[",
"self",
".",
"_table_ref",
",",
"keys",
",",
"values",
"]",
",",
"name",
",",
"\"%s_lookup_table_insert\"",
"%",
"self",
".",
"_name",
")",
"as",
"name",
":",
"# pylint: disable=protected-access",
"op",
"=",
"gen_data_flow_ops",
".",
"_lookup_table_insert",
"(",
"self",
".",
"_table_ref",
",",
"keys",
",",
"values",
",",
"name",
"=",
"name",
")",
"# pylint: enable=protected-access",
"return",
"op"
] |
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/lookup/lookup_ops.py#L801-L826
|
|
ChromiumWebApps/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
tools/idl_parser/idl_ppapi_parser.py
|
python
|
IDLPPAPIParser.p_ValueListCont
|
(self, p)
|
ValueListCont : ValueList
|
|
ValueListCont : ValueList
|
|
[
"ValueListCont",
":",
"ValueList",
"|"
] |
def p_ValueListCont(self, p):
"""ValueListCont : ValueList
|"""
if len(p) > 1:
p[0] = p[1]
|
[
"def",
"p_ValueListCont",
"(",
"self",
",",
"p",
")",
":",
"if",
"len",
"(",
"p",
")",
">",
"1",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]"
] |
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/idl_parser/idl_ppapi_parser.py#L266-L270
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/losses/python/metric_learning/metric_loss_ops.py
|
python
|
masked_minimum
|
(data, mask, dim=1)
|
return masked_minimums
|
Computes the axis wise minimum over chosen elements.
Args:
data: 2-D float `Tensor` of size [n, m].
mask: 2-D Boolean `Tensor` of size [n, m].
dim: The dimension over which to compute the minimum.
Returns:
masked_minimums: N-D `Tensor`.
The minimized dimension is of size 1 after the operation.
|
Computes the axis wise minimum over chosen elements.
|
[
"Computes",
"the",
"axis",
"wise",
"minimum",
"over",
"chosen",
"elements",
"."
] |
def masked_minimum(data, mask, dim=1):
"""Computes the axis wise minimum over chosen elements.
Args:
data: 2-D float `Tensor` of size [n, m].
mask: 2-D Boolean `Tensor` of size [n, m].
dim: The dimension over which to compute the minimum.
Returns:
masked_minimums: N-D `Tensor`.
The minimized dimension is of size 1 after the operation.
"""
axis_maximums = math_ops.reduce_max(data, dim, keepdims=True)
masked_minimums = math_ops.reduce_min(
math_ops.multiply(data - axis_maximums, mask), dim,
keepdims=True) + axis_maximums
return masked_minimums
|
[
"def",
"masked_minimum",
"(",
"data",
",",
"mask",
",",
"dim",
"=",
"1",
")",
":",
"axis_maximums",
"=",
"math_ops",
".",
"reduce_max",
"(",
"data",
",",
"dim",
",",
"keepdims",
"=",
"True",
")",
"masked_minimums",
"=",
"math_ops",
".",
"reduce_min",
"(",
"math_ops",
".",
"multiply",
"(",
"data",
"-",
"axis_maximums",
",",
"mask",
")",
",",
"dim",
",",
"keepdims",
"=",
"True",
")",
"+",
"axis_maximums",
"return",
"masked_minimums"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/losses/python/metric_learning/metric_loss_ops.py#L141-L157
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
tools/idl_parser/idl_parser.py
|
python
|
IDLParser.p_SerializationPatternList
|
(self, p)
|
SerializationPatternList : GETTER
| identifier Identifiers
|
|
SerializationPatternList : GETTER
| identifier Identifiers
|
|
[
"SerializationPatternList",
":",
"GETTER",
"|",
"identifier",
"Identifiers",
"|"
] |
def p_SerializationPatternList(self, p):
"""SerializationPatternList : GETTER
| identifier Identifiers
|"""
p[0] = self.BuildProduction('List', p, 0)
if len(p) > 1:
if p[1] == 'getter':
p[0].AddChildren(self.BuildTrue('GETTER'))
else:
attributes = ListFromConcat(p[1], p[2])
p[0].AddChildren(self.BuildAttribute('ATTRIBUTES', attributes))
|
[
"def",
"p_SerializationPatternList",
"(",
"self",
",",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"self",
".",
"BuildProduction",
"(",
"'List'",
",",
"p",
",",
"0",
")",
"if",
"len",
"(",
"p",
")",
">",
"1",
":",
"if",
"p",
"[",
"1",
"]",
"==",
"'getter'",
":",
"p",
"[",
"0",
"]",
".",
"AddChildren",
"(",
"self",
".",
"BuildTrue",
"(",
"'GETTER'",
")",
")",
"else",
":",
"attributes",
"=",
"ListFromConcat",
"(",
"p",
"[",
"1",
"]",
",",
"p",
"[",
"2",
"]",
")",
"p",
"[",
"0",
"]",
".",
"AddChildren",
"(",
"self",
".",
"BuildAttribute",
"(",
"'ATTRIBUTES'",
",",
"attributes",
")",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/idl_parser/idl_parser.py#L539-L549
|
||
benoitsteiner/tensorflow-opencl
|
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
|
tensorflow/python/estimator/training.py
|
python
|
EvalSpec.__new__
|
(cls,
input_fn,
steps=100,
name=None,
hooks=None,
exporters=None,
start_delay_secs=120,
throttle_secs=600)
|
return super(EvalSpec, cls).__new__(
cls,
input_fn=input_fn,
steps=steps,
name=name,
hooks=hooks,
exporters=exporters,
start_delay_secs=start_delay_secs,
throttle_secs=throttle_secs)
|
Creates a validated `EvalSpec` instance.
Args:
input_fn: Evaluation input function returning a tuple of:
features - `Tensor` or dictionary of string feature name to `Tensor`.
labels - `Tensor` or dictionary of `Tensor` with labels.
steps: Int. Positive number of steps for which to evaluate model. If
`None`, evaluates until `input_fn` raises an end-of-input exception.
See `Estimator.evaluate` for details.
name: String. Name of the evaluation if user needs to run multiple
evaluations on different data sets. Metrics for different evaluations
are saved in separate folders, and appear separately in tensorboard.
hooks: Iterable of `tf.train.SessionRunHook` objects to run
during evaluation.
exporters: Iterable of `Exporter`s, or a single one, or `None`.
`exporters` will be invoked after each evaluation.
start_delay_secs: Int. Start evaluating after waiting for this many
seconds.
throttle_secs: Int. Do not re-evaluate unless the last evaluation was
started at least this many seconds ago. Of course, evaluation does not
occur if no new checkpoints are available, hence, this is the minimum.
Returns:
A validated `EvalSpec` object.
Raises:
ValueError: If any of the input arguments is invalid.
TypeError: If any of the arguments is not of the expected type.
|
Creates a validated `EvalSpec` instance.
|
[
"Creates",
"a",
"validated",
"EvalSpec",
"instance",
"."
] |
def __new__(cls,
input_fn,
steps=100,
name=None,
hooks=None,
exporters=None,
start_delay_secs=120,
throttle_secs=600):
"""Creates a validated `EvalSpec` instance.
Args:
input_fn: Evaluation input function returning a tuple of:
features - `Tensor` or dictionary of string feature name to `Tensor`.
labels - `Tensor` or dictionary of `Tensor` with labels.
steps: Int. Positive number of steps for which to evaluate model. If
`None`, evaluates until `input_fn` raises an end-of-input exception.
See `Estimator.evaluate` for details.
name: String. Name of the evaluation if user needs to run multiple
evaluations on different data sets. Metrics for different evaluations
are saved in separate folders, and appear separately in tensorboard.
hooks: Iterable of `tf.train.SessionRunHook` objects to run
during evaluation.
exporters: Iterable of `Exporter`s, or a single one, or `None`.
`exporters` will be invoked after each evaluation.
start_delay_secs: Int. Start evaluating after waiting for this many
seconds.
throttle_secs: Int. Do not re-evaluate unless the last evaluation was
started at least this many seconds ago. Of course, evaluation does not
occur if no new checkpoints are available, hence, this is the minimum.
Returns:
A validated `EvalSpec` object.
Raises:
ValueError: If any of the input arguments is invalid.
TypeError: If any of the arguments is not of the expected type.
"""
# Validate input_fn.
_validate_input_fn(input_fn)
# Validate steps.
if steps is not None and steps <= 0:
raise ValueError('Must specify steps > 0, given: {}'.format(steps))
# Validate name.
if name is not None and not isinstance(name, six.string_types):
raise TypeError('`name` must be string, given: {}'.format(name))
# Validate hooks.
hooks = _validate_hooks(hooks)
# Validate exporters.
exporters = _validate_exporters(exporters)
# Validate start_delay_secs.
if start_delay_secs < 0:
raise ValueError('Must specify start_delay_secs >= 0, given: {}'.format(
start_delay_secs))
# Validate throttle_secs.
if throttle_secs < 0:
raise ValueError(
'Must specify throttle_secs >= 0, given: {}'.format(throttle_secs))
return super(EvalSpec, cls).__new__(
cls,
input_fn=input_fn,
steps=steps,
name=name,
hooks=hooks,
exporters=exporters,
start_delay_secs=start_delay_secs,
throttle_secs=throttle_secs)
|
[
"def",
"__new__",
"(",
"cls",
",",
"input_fn",
",",
"steps",
"=",
"100",
",",
"name",
"=",
"None",
",",
"hooks",
"=",
"None",
",",
"exporters",
"=",
"None",
",",
"start_delay_secs",
"=",
"120",
",",
"throttle_secs",
"=",
"600",
")",
":",
"# Validate input_fn.",
"_validate_input_fn",
"(",
"input_fn",
")",
"# Validate steps.",
"if",
"steps",
"is",
"not",
"None",
"and",
"steps",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"'Must specify steps > 0, given: {}'",
".",
"format",
"(",
"steps",
")",
")",
"# Validate name.",
"if",
"name",
"is",
"not",
"None",
"and",
"not",
"isinstance",
"(",
"name",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"'`name` must be string, given: {}'",
".",
"format",
"(",
"name",
")",
")",
"# Validate hooks.",
"hooks",
"=",
"_validate_hooks",
"(",
"hooks",
")",
"# Validate exporters.",
"exporters",
"=",
"_validate_exporters",
"(",
"exporters",
")",
"# Validate start_delay_secs.",
"if",
"start_delay_secs",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"'Must specify start_delay_secs >= 0, given: {}'",
".",
"format",
"(",
"start_delay_secs",
")",
")",
"# Validate throttle_secs.",
"if",
"throttle_secs",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"'Must specify throttle_secs >= 0, given: {}'",
".",
"format",
"(",
"throttle_secs",
")",
")",
"return",
"super",
"(",
"EvalSpec",
",",
"cls",
")",
".",
"__new__",
"(",
"cls",
",",
"input_fn",
"=",
"input_fn",
",",
"steps",
"=",
"steps",
",",
"name",
"=",
"name",
",",
"hooks",
"=",
"hooks",
",",
"exporters",
"=",
"exporters",
",",
"start_delay_secs",
"=",
"start_delay_secs",
",",
"throttle_secs",
"=",
"throttle_secs",
")"
] |
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/estimator/training.py#L181-L253
|
|
apiaryio/drafter
|
4634ebd07f6c6f257cc656598ccd535492fdfb55
|
tools/gyp/pylib/gyp/generator/cmake.py
|
python
|
SetVariableList
|
(output, variable_name, values)
|
Sets a CMake variable to a list.
|
Sets a CMake variable to a list.
|
[
"Sets",
"a",
"CMake",
"variable",
"to",
"a",
"list",
"."
] |
def SetVariableList(output, variable_name, values):
"""Sets a CMake variable to a list."""
if not values:
return SetVariable(output, variable_name, "")
if len(values) == 1:
return SetVariable(output, variable_name, values[0])
output.write('list(APPEND ')
output.write(variable_name)
output.write('\n "')
output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
output.write('")\n')
|
[
"def",
"SetVariableList",
"(",
"output",
",",
"variable_name",
",",
"values",
")",
":",
"if",
"not",
"values",
":",
"return",
"SetVariable",
"(",
"output",
",",
"variable_name",
",",
"\"\"",
")",
"if",
"len",
"(",
"values",
")",
"==",
"1",
":",
"return",
"SetVariable",
"(",
"output",
",",
"variable_name",
",",
"values",
"[",
"0",
"]",
")",
"output",
".",
"write",
"(",
"'list(APPEND '",
")",
"output",
".",
"write",
"(",
"variable_name",
")",
"output",
".",
"write",
"(",
"'\\n \"'",
")",
"output",
".",
"write",
"(",
"'\"\\n \"'",
".",
"join",
"(",
"[",
"CMakeStringEscape",
"(",
"value",
")",
"for",
"value",
"in",
"values",
"]",
")",
")",
"output",
".",
"write",
"(",
"'\")\\n'",
")"
] |
https://github.com/apiaryio/drafter/blob/4634ebd07f6c6f257cc656598ccd535492fdfb55/tools/gyp/pylib/gyp/generator/cmake.py#L189-L199
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/tools/compatibility/ast_edits.py
|
python
|
APIChangeSpec.preprocess
|
(self, root_node)
|
return [], []
|
Preprocess a parse tree. Return any produced logs and errors.
|
Preprocess a parse tree. Return any produced logs and errors.
|
[
"Preprocess",
"a",
"parse",
"tree",
".",
"Return",
"any",
"produced",
"logs",
"and",
"errors",
"."
] |
def preprocess(self, root_node): # pylint: disable=unused-argument
"""Preprocess a parse tree. Return any produced logs and errors."""
return [], []
|
[
"def",
"preprocess",
"(",
"self",
",",
"root_node",
")",
":",
"# pylint: disable=unused-argument",
"return",
"[",
"]",
",",
"[",
"]"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/tools/compatibility/ast_edits.py#L213-L215
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/eager/function.py
|
python
|
ConcreteFunction._get_gradient_function
|
(self)
|
return self._delayed_rewrite_functions._rewrite_forward_and_call_backward
|
Returns gradient function. It will be lazily created at first call.
|
Returns gradient function. It will be lazily created at first call.
|
[
"Returns",
"gradient",
"function",
".",
"It",
"will",
"be",
"lazily",
"created",
"at",
"first",
"call",
"."
] |
def _get_gradient_function(self):
"""Returns gradient function. It will be lazily created at first call."""
return self._delayed_rewrite_functions._rewrite_forward_and_call_backward
|
[
"def",
"_get_gradient_function",
"(",
"self",
")",
":",
"return",
"self",
".",
"_delayed_rewrite_functions",
".",
"_rewrite_forward_and_call_backward"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/eager/function.py#L2118-L2120
|
|
tomahawk-player/tomahawk-resolvers
|
7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d
|
archive/spotify/breakpad/third_party/protobuf/protobuf/python/google/protobuf/service_reflection.py
|
python
|
_ServiceBuilder._GenerateNonImplementedMethod
|
(self, method)
|
return lambda inst, rpc_controller, request, callback: (
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
Generates and returns a method that can be set for a service methods.
Args:
method: Descriptor of the service method for which a method is to be
generated.
Returns:
A method that can be added to the service class.
|
Generates and returns a method that can be set for a service methods.
|
[
"Generates",
"and",
"returns",
"a",
"method",
"that",
"can",
"be",
"set",
"for",
"a",
"service",
"methods",
"."
] |
def _GenerateNonImplementedMethod(self, method):
"""Generates and returns a method that can be set for a service methods.
Args:
method: Descriptor of the service method for which a method is to be
generated.
Returns:
A method that can be added to the service class.
"""
return lambda inst, rpc_controller, request, callback: (
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
[
"def",
"_GenerateNonImplementedMethod",
"(",
"self",
",",
"method",
")",
":",
"return",
"lambda",
"inst",
",",
"rpc_controller",
",",
"request",
",",
"callback",
":",
"(",
"self",
".",
"_NonImplementedMethod",
"(",
"method",
".",
"name",
",",
"rpc_controller",
",",
"callback",
")",
")"
] |
https://github.com/tomahawk-player/tomahawk-resolvers/blob/7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d/archive/spotify/breakpad/third_party/protobuf/protobuf/python/google/protobuf/service_reflection.py#L205-L216
|
|
regomne/chinesize
|
2ae555445046cd28d60a514e30ac1d6eca1c442a
|
N2System/nsbparser/nsbParser.py
|
python
|
NsbParser.p101
|
(self)
|
unk
|
unk
|
[
"unk"
] |
def p101(self):
'unk'
self.text.append('\t'*self.tabcount+'OP_101')
|
[
"def",
"p101",
"(",
"self",
")",
":",
"self",
".",
"text",
".",
"append",
"(",
"'\\t'",
"*",
"self",
".",
"tabcount",
"+",
"'OP_101'",
")"
] |
https://github.com/regomne/chinesize/blob/2ae555445046cd28d60a514e30ac1d6eca1c442a/N2System/nsbparser/nsbParser.py#L321-L323
|
||
FreeCAD/FreeCAD
|
ba42231b9c6889b89e064d6d563448ed81e376ec
|
src/Mod/Draft/DraftVecUtils.py
|
python
|
rotate2D
|
(u, angle)
|
return Vector(x_rot, y_rot, u.z)
|
Rotate the given vector around the Z axis by the specified angle.
The rotation occurs in two dimensions only by means of
a rotation matrix.
::
u_rot R u
(x_rot) = (cos(-angle) -sin(-angle)) * (x)
(y_rot) (sin(-angle) cos(-angle)) (y)
Normally the angle is positive, but in this case it is negative.
`"Such non-standard orientations are rarely used in mathematics
but are common in 2D computer graphics, which often have the origin
in the top left corner and the y-axis pointing down."`
W3C Recommendations (2003), Scalable Vector Graphics: the initial
coordinate system.
Parameters
----------
u : Base::Vector3
The vector.
angle : float
The angle of rotation given in radians.
Returns
-------
Base::Vector3
The new rotated vector.
|
Rotate the given vector around the Z axis by the specified angle.
|
[
"Rotate",
"the",
"given",
"vector",
"around",
"the",
"Z",
"axis",
"by",
"the",
"specified",
"angle",
"."
] |
def rotate2D(u, angle):
"""Rotate the given vector around the Z axis by the specified angle.
The rotation occurs in two dimensions only by means of
a rotation matrix.
::
u_rot R u
(x_rot) = (cos(-angle) -sin(-angle)) * (x)
(y_rot) (sin(-angle) cos(-angle)) (y)
Normally the angle is positive, but in this case it is negative.
`"Such non-standard orientations are rarely used in mathematics
but are common in 2D computer graphics, which often have the origin
in the top left corner and the y-axis pointing down."`
W3C Recommendations (2003), Scalable Vector Graphics: the initial
coordinate system.
Parameters
----------
u : Base::Vector3
The vector.
angle : float
The angle of rotation given in radians.
Returns
-------
Base::Vector3
The new rotated vector.
"""
x_rot = math.cos(-angle) * u.x - math.sin(-angle) * u.y
y_rot = math.sin(-angle) * u.x + math.cos(-angle) * u.y
return Vector(x_rot, y_rot, u.z)
|
[
"def",
"rotate2D",
"(",
"u",
",",
"angle",
")",
":",
"x_rot",
"=",
"math",
".",
"cos",
"(",
"-",
"angle",
")",
"*",
"u",
".",
"x",
"-",
"math",
".",
"sin",
"(",
"-",
"angle",
")",
"*",
"u",
".",
"y",
"y_rot",
"=",
"math",
".",
"sin",
"(",
"-",
"angle",
")",
"*",
"u",
".",
"x",
"+",
"math",
".",
"cos",
"(",
"-",
"angle",
")",
"*",
"u",
".",
"y",
"return",
"Vector",
"(",
"x_rot",
",",
"y_rot",
",",
"u",
".",
"z",
")"
] |
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/DraftVecUtils.py#L403-L436
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py
|
python
|
Misc.tk_bisque
|
(self)
|
Change the color scheme to light brown as used in Tk 3.6 and before.
|
Change the color scheme to light brown as used in Tk 3.6 and before.
|
[
"Change",
"the",
"color",
"scheme",
"to",
"light",
"brown",
"as",
"used",
"in",
"Tk",
"3",
".",
"6",
"and",
"before",
"."
] |
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
|
[
"def",
"tk_bisque",
"(",
"self",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"'tk_bisque'",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/lib-tk/Tkinter.py#L408-L410
|
||
baidu-research/tensorflow-allreduce
|
66d5b855e90b0949e9fa5cca5599fd729a70e874
|
tensorflow/python/ops/data_flow_ops.py
|
python
|
MapStagingArea.get
|
(self, key=None, indices=None, name=None)
|
If the key is provided, the associated (key, value)
is returned from the staging area. If the key is not
in the staging area, this method will block until
the associated (key, value) is inserted.
If no key is provided and the staging area is ordered,
the (key, value) with the smallest key will be returned.
Otherwise, a random (key, value) will be returned.
If the staging area is empty when this operation executes,
it will block until there is an element to dequeue.
Args:
key: Key associated with the required data (Optional)
indices: Partial list of tensors to retrieve (optional).
A list of integer or string indices.
String indices are only valid if the Staging Area
has names associated with it.
name: A name for the operation (optional)
Returns:
The created op
|
If the key is provided, the associated (key, value)
is returned from the staging area. If the key is not
in the staging area, this method will block until
the associated (key, value) is inserted.
|
[
"If",
"the",
"key",
"is",
"provided",
"the",
"associated",
"(",
"key",
"value",
")",
"is",
"returned",
"from",
"the",
"staging",
"area",
".",
"If",
"the",
"key",
"is",
"not",
"in",
"the",
"staging",
"area",
"this",
"method",
"will",
"block",
"until",
"the",
"associated",
"(",
"key",
"value",
")",
"is",
"inserted",
"."
] |
def get(self, key=None, indices=None, name=None):
"""
If the key is provided, the associated (key, value)
is returned from the staging area. If the key is not
in the staging area, this method will block until
the associated (key, value) is inserted.
If no key is provided and the staging area is ordered,
the (key, value) with the smallest key will be returned.
Otherwise, a random (key, value) will be returned.
If the staging area is empty when this operation executes,
it will block until there is an element to dequeue.
Args:
key: Key associated with the required data (Optional)
indices: Partial list of tensors to retrieve (optional).
A list of integer or string indices.
String indices are only valid if the Staging Area
has names associated with it.
name: A name for the operation (optional)
Returns:
The created op
"""
if key is None:
return self._popitem(indices=indices, name=name)
else:
return self._pop(key, indices=indices, name=name)
|
[
"def",
"get",
"(",
"self",
",",
"key",
"=",
"None",
",",
"indices",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"if",
"key",
"is",
"None",
":",
"return",
"self",
".",
"_popitem",
"(",
"indices",
"=",
"indices",
",",
"name",
"=",
"name",
")",
"else",
":",
"return",
"self",
".",
"_pop",
"(",
"key",
",",
"indices",
"=",
"indices",
",",
"name",
"=",
"name",
")"
] |
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/data_flow_ops.py#L1996-L2024
|
||
domino-team/openwrt-cc
|
8b181297c34d14d3ca521cc9f31430d561dbc688
|
package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/mac_tool.py
|
python
|
MacTool._LoadProvisioningProfile
|
(self, profile_path)
|
Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
|
Extracts the plist embedded in a provisioning profile.
|
[
"Extracts",
"the",
"plist",
"embedded",
"in",
"a",
"provisioning",
"profile",
"."
] |
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
|
[
"def",
"_LoadProvisioningProfile",
"(",
"self",
",",
"profile_path",
")",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
")",
"as",
"temp",
":",
"subprocess",
".",
"check_call",
"(",
"[",
"'security'",
",",
"'cms'",
",",
"'-D'",
",",
"'-i'",
",",
"profile_path",
",",
"'-o'",
",",
"temp",
".",
"name",
"]",
")",
"return",
"self",
".",
"_LoadPlistMaybeBinary",
"(",
"temp",
".",
"name",
")"
] |
https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/mac_tool.py#L474-L486
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/gsutil/third_party/protorpc/protorpc/util.py
|
python
|
AcceptItem.match
|
(self, content_type)
|
return ((self.__main_type is None or self.__main_type == main_type) and
(self.__sub_type is None or self.__sub_type == sub_type))
|
Determine if the given accept header matches content type.
Args:
content_type: Unparsed content type string.
Returns:
True if accept header matches content type, else False.
|
Determine if the given accept header matches content type.
|
[
"Determine",
"if",
"the",
"given",
"accept",
"header",
"matches",
"content",
"type",
"."
] |
def match(self, content_type):
"""Determine if the given accept header matches content type.
Args:
content_type: Unparsed content type string.
Returns:
True if accept header matches content type, else False.
"""
content_type, _ = cgi.parse_header(content_type)
match = self.__CONTENT_TYPE_REGEX.match(content_type.lower())
if not match:
return False
main_type, sub_type = match.group(1), match.group(2)
if not(main_type and sub_type):
return False
return ((self.__main_type is None or self.__main_type == main_type) and
(self.__sub_type is None or self.__sub_type == sub_type))
|
[
"def",
"match",
"(",
"self",
",",
"content_type",
")",
":",
"content_type",
",",
"_",
"=",
"cgi",
".",
"parse_header",
"(",
"content_type",
")",
"match",
"=",
"self",
".",
"__CONTENT_TYPE_REGEX",
".",
"match",
"(",
"content_type",
".",
"lower",
"(",
")",
")",
"if",
"not",
"match",
":",
"return",
"False",
"main_type",
",",
"sub_type",
"=",
"match",
".",
"group",
"(",
"1",
")",
",",
"match",
".",
"group",
"(",
"2",
")",
"if",
"not",
"(",
"main_type",
"and",
"sub_type",
")",
":",
"return",
"False",
"return",
"(",
"(",
"self",
".",
"__main_type",
"is",
"None",
"or",
"self",
".",
"__main_type",
"==",
"main_type",
")",
"and",
"(",
"self",
".",
"__sub_type",
"is",
"None",
"or",
"self",
".",
"__sub_type",
"==",
"sub_type",
")",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/protorpc/protorpc/util.py#L280-L299
|
|
naver/sling
|
5671cd445a2caae0b4dd0332299e4cfede05062c
|
webkit/Tools/Scripts/webkitpy/thirdparty/irc/irclib.py
|
python
|
ServerConnection.mode
|
(self, target, command)
|
Send a MODE command.
|
Send a MODE command.
|
[
"Send",
"a",
"MODE",
"command",
"."
] |
def mode(self, target, command):
"""Send a MODE command."""
self.send_raw("MODE %s %s" % (target, command))
|
[
"def",
"mode",
"(",
"self",
",",
"target",
",",
"command",
")",
":",
"self",
".",
"send_raw",
"(",
"\"MODE %s %s\"",
"%",
"(",
"target",
",",
"command",
")",
")"
] |
https://github.com/naver/sling/blob/5671cd445a2caae0b4dd0332299e4cfede05062c/webkit/Tools/Scripts/webkitpy/thirdparty/irc/irclib.py#L719-L721
|
||
google/llvm-propeller
|
45c226984fe8377ebfb2ad7713c680d652ba678d
|
compiler-rt/lib/sanitizer_common/scripts/cpplint.py
|
python
|
ParseNolintSuppressions
|
(filename, raw_line, linenum, error)
|
Updates the global list of line error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
|
Updates the global list of line error-suppressions.
|
[
"Updates",
"the",
"global",
"list",
"of",
"line",
"error",
"-",
"suppressions",
"."
] |
def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of line error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line)
if matched:
if matched.group(1):
suppressed_line = linenum + 1
else:
suppressed_line = linenum
category = matched.group(2)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(suppressed_line)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(suppressed_line)
elif category not in _LEGACY_ERROR_CATEGORIES:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category)
|
[
"def",
"ParseNolintSuppressions",
"(",
"filename",
",",
"raw_line",
",",
"linenum",
",",
"error",
")",
":",
"matched",
"=",
"Search",
"(",
"r'\\bNOLINT(NEXTLINE)?\\b(\\([^)]+\\))?'",
",",
"raw_line",
")",
"if",
"matched",
":",
"if",
"matched",
".",
"group",
"(",
"1",
")",
":",
"suppressed_line",
"=",
"linenum",
"+",
"1",
"else",
":",
"suppressed_line",
"=",
"linenum",
"category",
"=",
"matched",
".",
"group",
"(",
"2",
")",
"if",
"category",
"in",
"(",
"None",
",",
"'(*)'",
")",
":",
"# => \"suppress all\"",
"_error_suppressions",
".",
"setdefault",
"(",
"None",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"suppressed_line",
")",
"else",
":",
"if",
"category",
".",
"startswith",
"(",
"'('",
")",
"and",
"category",
".",
"endswith",
"(",
"')'",
")",
":",
"category",
"=",
"category",
"[",
"1",
":",
"-",
"1",
"]",
"if",
"category",
"in",
"_ERROR_CATEGORIES",
":",
"_error_suppressions",
".",
"setdefault",
"(",
"category",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"suppressed_line",
")",
"elif",
"category",
"not",
"in",
"_LEGACY_ERROR_CATEGORIES",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/nolint'",
",",
"5",
",",
"'Unknown NOLINT error category: %s'",
"%",
"category",
")"
] |
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L583-L612
|
||
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/python/framework/common_shapes.py
|
python
|
separable_conv2d_shape
|
(op)
|
return [tensor_shape.TensorShape([batch_size, out_rows, out_cols, depth_out])]
|
Shape function for a SeparableConv2D op.
This op has three inputs:
* input, a 4D tensor with shape = [batch_size, rows, cols, depth_in]
* depthwise_filter, a 4D tensor with shape = [filter_rows,
filter_cols, depth_in, depth_multiplier]
* pointwise_filter, a 4D tensor with shape = [1, 1, depth_in *
depth_multiplier, depth_out]
The output is a 4D tensor with shape = [batch_size, out_rows,
out_cols, depth_out], where out_rows and out_cols depend on the
value of the op's "padding" and "strides" attrs.
Args:
op: A SeparableConv2D Operation.
Returns:
A list containing the Shape of the SeparableConv2D output.
Raises:
ValueError: If the shapes of the input or filter are incompatible.
|
Shape function for a SeparableConv2D op.
|
[
"Shape",
"function",
"for",
"a",
"SeparableConv2D",
"op",
"."
] |
def separable_conv2d_shape(op):
"""Shape function for a SeparableConv2D op.
This op has three inputs:
* input, a 4D tensor with shape = [batch_size, rows, cols, depth_in]
* depthwise_filter, a 4D tensor with shape = [filter_rows,
filter_cols, depth_in, depth_multiplier]
* pointwise_filter, a 4D tensor with shape = [1, 1, depth_in *
depth_multiplier, depth_out]
The output is a 4D tensor with shape = [batch_size, out_rows,
out_cols, depth_out], where out_rows and out_cols depend on the
value of the op's "padding" and "strides" attrs.
Args:
op: A SeparableConv2D Operation.
Returns:
A list containing the Shape of the SeparableConv2D output.
Raises:
ValueError: If the shapes of the input or filter are incompatible.
"""
input_shape = op.inputs[0].get_shape().with_rank(4)
depthwise_filter_shape = op.inputs[1].get_shape().merge_with(
tensor_shape.TensorShape([None, None, input_shape[3], None]))
pointwise_depth_in = depthwise_filter_shape[2] * depthwise_filter_shape[3]
pointwise_filter_shape = op.inputs[2].get_shape().merge_with(
tensor_shape.TensorShape([1, 1, pointwise_depth_in, None]))
batch_size = input_shape[0]
in_rows = input_shape[1]
in_cols = input_shape[2]
filter_rows = depthwise_filter_shape[0]
filter_cols = depthwise_filter_shape[1]
depth_out = pointwise_filter_shape[3]
stride_b, stride_r, stride_c, stride_d = op.get_attr("strides")
if stride_b != 1 or stride_d != 1:
raise ValueError("Current implementation does not yet support "
"strides in the batch and depth dimensions.")
if stride_r != stride_c:
# TODO(shlens): Add support for this.
raise ValueError("Current implementation only supports equal length "
"strides in the row and column dimensions.")
# TODO(mrry,shlens): Raise an error if the stride would cause
# information in the input to be ignored. This will require a change
# in the kernel implementation.
stride = stride_r
padding = op.get_attr("padding")
out_rows, out_cols = get2d_conv_output_size(in_rows, in_cols, filter_rows,
filter_cols, stride, stride,
padding)
return [tensor_shape.TensorShape([batch_size, out_rows, out_cols, depth_out])]
|
[
"def",
"separable_conv2d_shape",
"(",
"op",
")",
":",
"input_shape",
"=",
"op",
".",
"inputs",
"[",
"0",
"]",
".",
"get_shape",
"(",
")",
".",
"with_rank",
"(",
"4",
")",
"depthwise_filter_shape",
"=",
"op",
".",
"inputs",
"[",
"1",
"]",
".",
"get_shape",
"(",
")",
".",
"merge_with",
"(",
"tensor_shape",
".",
"TensorShape",
"(",
"[",
"None",
",",
"None",
",",
"input_shape",
"[",
"3",
"]",
",",
"None",
"]",
")",
")",
"pointwise_depth_in",
"=",
"depthwise_filter_shape",
"[",
"2",
"]",
"*",
"depthwise_filter_shape",
"[",
"3",
"]",
"pointwise_filter_shape",
"=",
"op",
".",
"inputs",
"[",
"2",
"]",
".",
"get_shape",
"(",
")",
".",
"merge_with",
"(",
"tensor_shape",
".",
"TensorShape",
"(",
"[",
"1",
",",
"1",
",",
"pointwise_depth_in",
",",
"None",
"]",
")",
")",
"batch_size",
"=",
"input_shape",
"[",
"0",
"]",
"in_rows",
"=",
"input_shape",
"[",
"1",
"]",
"in_cols",
"=",
"input_shape",
"[",
"2",
"]",
"filter_rows",
"=",
"depthwise_filter_shape",
"[",
"0",
"]",
"filter_cols",
"=",
"depthwise_filter_shape",
"[",
"1",
"]",
"depth_out",
"=",
"pointwise_filter_shape",
"[",
"3",
"]",
"stride_b",
",",
"stride_r",
",",
"stride_c",
",",
"stride_d",
"=",
"op",
".",
"get_attr",
"(",
"\"strides\"",
")",
"if",
"stride_b",
"!=",
"1",
"or",
"stride_d",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"Current implementation does not yet support \"",
"\"strides in the batch and depth dimensions.\"",
")",
"if",
"stride_r",
"!=",
"stride_c",
":",
"# TODO(shlens): Add support for this.",
"raise",
"ValueError",
"(",
"\"Current implementation only supports equal length \"",
"\"strides in the row and column dimensions.\"",
")",
"# TODO(mrry,shlens): Raise an error if the stride would cause",
"# information in the input to be ignored. This will require a change",
"# in the kernel implementation.",
"stride",
"=",
"stride_r",
"padding",
"=",
"op",
".",
"get_attr",
"(",
"\"padding\"",
")",
"out_rows",
",",
"out_cols",
"=",
"get2d_conv_output_size",
"(",
"in_rows",
",",
"in_cols",
",",
"filter_rows",
",",
"filter_cols",
",",
"stride",
",",
"stride",
",",
"padding",
")",
"return",
"[",
"tensor_shape",
".",
"TensorShape",
"(",
"[",
"batch_size",
",",
"out_rows",
",",
"out_cols",
",",
"depth_out",
"]",
")",
"]"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/framework/common_shapes.py#L312-L372
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py2/pandas/core/base.py
|
python
|
StringMixin.__str__
|
(self)
|
return self.__bytes__()
|
Return a string representation for a particular Object
Invoked by str(df) in both py2/py3.
Yields Bytestring in Py2, Unicode String in py3.
|
Return a string representation for a particular Object
|
[
"Return",
"a",
"string",
"representation",
"for",
"a",
"particular",
"Object"
] |
def __str__(self):
"""
Return a string representation for a particular Object
Invoked by str(df) in both py2/py3.
Yields Bytestring in Py2, Unicode String in py3.
"""
if compat.PY3:
return self.__unicode__()
return self.__bytes__()
|
[
"def",
"__str__",
"(",
"self",
")",
":",
"if",
"compat",
".",
"PY3",
":",
"return",
"self",
".",
"__unicode__",
"(",
")",
"return",
"self",
".",
"__bytes__",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/base.py#L48-L58
|
|
yushroom/FishEngine
|
a4b9fb9b0a6dc202f7990e75f4b7d8d5163209d9
|
Script/reflect/clang/cindex.py
|
python
|
Cursor.is_anonymous
|
(self)
|
return conf.lib.clang_Cursor_isAnonymous(self)
|
Check if the record is anonymous.
|
Check if the record is anonymous.
|
[
"Check",
"if",
"the",
"record",
"is",
"anonymous",
"."
] |
def is_anonymous(self):
"""
Check if the record is anonymous.
"""
if self.kind == CursorKind.FIELD_DECL:
return self.type.get_declaration().is_anonymous()
return conf.lib.clang_Cursor_isAnonymous(self)
|
[
"def",
"is_anonymous",
"(",
"self",
")",
":",
"if",
"self",
".",
"kind",
"==",
"CursorKind",
".",
"FIELD_DECL",
":",
"return",
"self",
".",
"type",
".",
"get_declaration",
"(",
")",
".",
"is_anonymous",
"(",
")",
"return",
"conf",
".",
"lib",
".",
"clang_Cursor_isAnonymous",
"(",
"self",
")"
] |
https://github.com/yushroom/FishEngine/blob/a4b9fb9b0a6dc202f7990e75f4b7d8d5163209d9/Script/reflect/clang/cindex.py#L1714-L1720
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/Editra/src/eclib/ctrlbox.py
|
python
|
SegmentBar.HitTest
|
(self, pos)
|
return where, index
|
Find where the position is in the window
@param pos: (x, y) in client cords
@return: int
|
Find where the position is in the window
@param pos: (x, y) in client cords
@return: int
|
[
"Find",
"where",
"the",
"position",
"is",
"in",
"the",
"window",
"@param",
"pos",
":",
"(",
"x",
"y",
")",
"in",
"client",
"cords",
"@return",
":",
"int"
] |
def HitTest(self, pos):
"""Find where the position is in the window
@param pos: (x, y) in client cords
@return: int
"""
index = self.GetIndexFromPosition(pos)
where = SEGMENT_HT_NOWHERE
if index != wx.NOT_FOUND:
button = self._buttons[index]
if self.SegmentHasCloseButton(index):
brect = button.XButton
trect = wx.Rect(brect.x, brect.y, brect.Width+4, brect.Height+4)
if trect.Contains(pos):
where = SEGMENT_HT_X_BTN
else:
where = SEGMENT_HT_SEG
else:
where = SEGMENT_HT_SEG
return where, index
|
[
"def",
"HitTest",
"(",
"self",
",",
"pos",
")",
":",
"index",
"=",
"self",
".",
"GetIndexFromPosition",
"(",
"pos",
")",
"where",
"=",
"SEGMENT_HT_NOWHERE",
"if",
"index",
"!=",
"wx",
".",
"NOT_FOUND",
":",
"button",
"=",
"self",
".",
"_buttons",
"[",
"index",
"]",
"if",
"self",
".",
"SegmentHasCloseButton",
"(",
"index",
")",
":",
"brect",
"=",
"button",
".",
"XButton",
"trect",
"=",
"wx",
".",
"Rect",
"(",
"brect",
".",
"x",
",",
"brect",
".",
"y",
",",
"brect",
".",
"Width",
"+",
"4",
",",
"brect",
".",
"Height",
"+",
"4",
")",
"if",
"trect",
".",
"Contains",
"(",
"pos",
")",
":",
"where",
"=",
"SEGMENT_HT_X_BTN",
"else",
":",
"where",
"=",
"SEGMENT_HT_SEG",
"else",
":",
"where",
"=",
"SEGMENT_HT_SEG",
"return",
"where",
",",
"index"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/eclib/ctrlbox.py#L932-L952
|
|
garbear/kodi-steamlink
|
3f8e5970b01607cdb3c2688fbaa78e08f2d9c561
|
tools/EventClients/lib/python/xbmcclient.py
|
python
|
Packet.get_udp_message
|
(self, packetnum=1)
|
return header + payload
|
Construct the UDP message for the specified packetnum and return
as string
Keyword arguments:
packetnum -- the packet no. for which to construct the message
(default 1)
|
Construct the UDP message for the specified packetnum and return
as string
|
[
"Construct",
"the",
"UDP",
"message",
"for",
"the",
"specified",
"packetnum",
"and",
"return",
"as",
"string"
] |
def get_udp_message(self, packetnum=1):
"""Construct the UDP message for the specified packetnum and return
as string
Keyword arguments:
packetnum -- the packet no. for which to construct the message
(default 1)
"""
if packetnum > self.num_packets() or packetnum < 1:
return b""
header = b""
if packetnum==1:
header = self.get_header(self.packettype, packetnum, self.maxseq,
self.get_payload_size(packetnum))
else:
header = self.get_header(PT_BLOB, packetnum, self.maxseq,
self.get_payload_size(packetnum))
payload = self.payload[ (packetnum-1) * MAX_PAYLOAD_SIZE :
(packetnum-1) * MAX_PAYLOAD_SIZE+
self.get_payload_size(packetnum) ]
return header + payload
|
[
"def",
"get_udp_message",
"(",
"self",
",",
"packetnum",
"=",
"1",
")",
":",
"if",
"packetnum",
">",
"self",
".",
"num_packets",
"(",
")",
"or",
"packetnum",
"<",
"1",
":",
"return",
"b\"\"",
"header",
"=",
"b\"\"",
"if",
"packetnum",
"==",
"1",
":",
"header",
"=",
"self",
".",
"get_header",
"(",
"self",
".",
"packettype",
",",
"packetnum",
",",
"self",
".",
"maxseq",
",",
"self",
".",
"get_payload_size",
"(",
"packetnum",
")",
")",
"else",
":",
"header",
"=",
"self",
".",
"get_header",
"(",
"PT_BLOB",
",",
"packetnum",
",",
"self",
".",
"maxseq",
",",
"self",
".",
"get_payload_size",
"(",
"packetnum",
")",
")",
"payload",
"=",
"self",
".",
"payload",
"[",
"(",
"packetnum",
"-",
"1",
")",
"*",
"MAX_PAYLOAD_SIZE",
":",
"(",
"packetnum",
"-",
"1",
")",
"*",
"MAX_PAYLOAD_SIZE",
"+",
"self",
".",
"get_payload_size",
"(",
"packetnum",
")",
"]",
"return",
"header",
"+",
"payload"
] |
https://github.com/garbear/kodi-steamlink/blob/3f8e5970b01607cdb3c2688fbaa78e08f2d9c561/tools/EventClients/lib/python/xbmcclient.py#L226-L247
|
|
hpi-xnor/BMXNet
|
ed0b201da6667887222b8e4b5f997c4f6b61943d
|
python/mxnet/ndarray/ndarray.py
|
python
|
NDArray.stype
|
(self)
|
return _STORAGE_TYPE_ID_TO_STR[_storage_type(self.handle)]
|
Storage-type of the array.
|
Storage-type of the array.
|
[
"Storage",
"-",
"type",
"of",
"the",
"array",
"."
] |
def stype(self):
"""Storage-type of the array.
"""
return _STORAGE_TYPE_ID_TO_STR[_storage_type(self.handle)]
|
[
"def",
"stype",
"(",
"self",
")",
":",
"return",
"_STORAGE_TYPE_ID_TO_STR",
"[",
"_storage_type",
"(",
"self",
".",
"handle",
")",
"]"
] |
https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/python/mxnet/ndarray/ndarray.py#L1721-L1724
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
Framework/PythonInterface/mantid/plots/axesfunctions.py
|
python
|
contour
|
(axes, workspace, *args, **kwargs)
|
return axes.contour(x, y, z, *args, **kwargs)
|
Essentially the same as :meth:`matplotlib.axes.Axes.contour`
but calculates the countour levels. Currently this only works with
workspaces that have a constant number of bins between spectra.
:param axes: :class:`matplotlib.axes.Axes` object that will do the plotting
:param workspace: :class:`mantid.api.MatrixWorkspace` or :class:`mantid.api.IMDHistoWorkspace`
to extract the data from
:param distribution: ``None`` (default) asks the workspace. ``False`` means
divide by bin width. ``True`` means do not divide by bin width.
Applies only when the the matrix workspace is a histogram.
:param normalization: ``None`` (default) ask the workspace. Applies to MDHisto workspaces. It can override
the value from displayNormalizationHisto. It checks only if
the normalization is mantid.api.MDNormalization.NumEventsNormalization
:param indices: Specify which slice of an MDHistoWorkspace to use when plotting. Needs to be a tuple
and will be interpreted as a list of indices. You need to use ``slice(None)`` to
select which dimensions to plot. *e.g.* to select the last two axes to plot from a
3D volume use ``indices=(5, slice(None), slice(None))`` where the 5 is the bin selected
for the first axis.
:param slicepoint: Specify which slice of an MDHistoWorkspace to use when plotting in the dimension units.
You need to use ``None`` to select which dimension to plot. *e.g.* to select the last
two axes to plot from a 3D volume use ``slicepoint=(1.0, None, None)`` where the 1.0 is
the value of the dimension selected for the first axis.
:param transpose: ``bool`` to transpose the x and y axes of the plotted dimensions of an MDHistoWorkspace
|
Essentially the same as :meth:`matplotlib.axes.Axes.contour`
but calculates the countour levels. Currently this only works with
workspaces that have a constant number of bins between spectra.
|
[
"Essentially",
"the",
"same",
"as",
":",
"meth",
":",
"matplotlib",
".",
"axes",
".",
"Axes",
".",
"contour",
"but",
"calculates",
"the",
"countour",
"levels",
".",
"Currently",
"this",
"only",
"works",
"with",
"workspaces",
"that",
"have",
"a",
"constant",
"number",
"of",
"bins",
"between",
"spectra",
"."
] |
def contour(axes, workspace, *args, **kwargs):
'''
Essentially the same as :meth:`matplotlib.axes.Axes.contour`
but calculates the countour levels. Currently this only works with
workspaces that have a constant number of bins between spectra.
:param axes: :class:`matplotlib.axes.Axes` object that will do the plotting
:param workspace: :class:`mantid.api.MatrixWorkspace` or :class:`mantid.api.IMDHistoWorkspace`
to extract the data from
:param distribution: ``None`` (default) asks the workspace. ``False`` means
divide by bin width. ``True`` means do not divide by bin width.
Applies only when the the matrix workspace is a histogram.
:param normalization: ``None`` (default) ask the workspace. Applies to MDHisto workspaces. It can override
the value from displayNormalizationHisto. It checks only if
the normalization is mantid.api.MDNormalization.NumEventsNormalization
:param indices: Specify which slice of an MDHistoWorkspace to use when plotting. Needs to be a tuple
and will be interpreted as a list of indices. You need to use ``slice(None)`` to
select which dimensions to plot. *e.g.* to select the last two axes to plot from a
3D volume use ``indices=(5, slice(None), slice(None))`` where the 5 is the bin selected
for the first axis.
:param slicepoint: Specify which slice of an MDHistoWorkspace to use when plotting in the dimension units.
You need to use ``None`` to select which dimension to plot. *e.g.* to select the last
two axes to plot from a 3D volume use ``slicepoint=(1.0, None, None)`` where the 1.0 is
the value of the dimension selected for the first axis.
:param transpose: ``bool`` to transpose the x and y axes of the plotted dimensions of an MDHistoWorkspace
'''
transpose = kwargs.pop('transpose', False)
if isinstance(workspace, mantid.dataobjects.MDHistoWorkspace):
(normalization, kwargs) = get_normalization(workspace, **kwargs)
indices, kwargs = get_indices(workspace, **kwargs)
x, y, z = get_md_data2d_bin_centers(workspace, normalization, indices, transpose)
_setLabels2D(axes, workspace, indices, transpose)
else:
(distribution, kwargs) = get_distribution(workspace, **kwargs)
(x, y, z) = get_matrix_2d_data(workspace,
distribution,
histogram2D=False,
transpose=transpose)
_setLabels2D(axes, workspace, transpose=transpose)
return axes.contour(x, y, z, *args, **kwargs)
|
[
"def",
"contour",
"(",
"axes",
",",
"workspace",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"transpose",
"=",
"kwargs",
".",
"pop",
"(",
"'transpose'",
",",
"False",
")",
"if",
"isinstance",
"(",
"workspace",
",",
"mantid",
".",
"dataobjects",
".",
"MDHistoWorkspace",
")",
":",
"(",
"normalization",
",",
"kwargs",
")",
"=",
"get_normalization",
"(",
"workspace",
",",
"*",
"*",
"kwargs",
")",
"indices",
",",
"kwargs",
"=",
"get_indices",
"(",
"workspace",
",",
"*",
"*",
"kwargs",
")",
"x",
",",
"y",
",",
"z",
"=",
"get_md_data2d_bin_centers",
"(",
"workspace",
",",
"normalization",
",",
"indices",
",",
"transpose",
")",
"_setLabels2D",
"(",
"axes",
",",
"workspace",
",",
"indices",
",",
"transpose",
")",
"else",
":",
"(",
"distribution",
",",
"kwargs",
")",
"=",
"get_distribution",
"(",
"workspace",
",",
"*",
"*",
"kwargs",
")",
"(",
"x",
",",
"y",
",",
"z",
")",
"=",
"get_matrix_2d_data",
"(",
"workspace",
",",
"distribution",
",",
"histogram2D",
"=",
"False",
",",
"transpose",
"=",
"transpose",
")",
"_setLabels2D",
"(",
"axes",
",",
"workspace",
",",
"transpose",
"=",
"transpose",
")",
"return",
"axes",
".",
"contour",
"(",
"x",
",",
"y",
",",
"z",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/mantid/plots/axesfunctions.py#L332-L371
|
|
benoitsteiner/tensorflow-opencl
|
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
|
tensorflow/python/ops/image_ops_impl.py
|
python
|
total_variation
|
(images, name=None)
|
return tot_var
|
Calculate and return the total variation for one or more images.
The total variation is the sum of the absolute differences for neighboring
pixel-values in the input images. This measures how much noise is in the
images.
This can be used as a loss-function during optimization so as to suppress
noise in images. If you have a batch of images, then you should calculate
the scalar loss-value as the sum:
`loss = tf.reduce_sum(tf.image.total_variation(images))`
This implements the anisotropic 2-D version of the formula described here:
https://en.wikipedia.org/wiki/Total_variation_denoising
Args:
images: 4-D Tensor of shape `[batch, height, width, channels]` or
3-D Tensor of shape `[height, width, channels]`.
name: A name for the operation (optional).
Raises:
ValueError: if images.shape is not a 3-D or 4-D vector.
Returns:
The total variation of `images`.
If `images` was 4-D, return a 1-D float Tensor of shape `[batch]` with the
total variation for each image in the batch.
If `images` was 3-D, return a scalar float with the total variation for
that image.
|
Calculate and return the total variation for one or more images.
|
[
"Calculate",
"and",
"return",
"the",
"total",
"variation",
"for",
"one",
"or",
"more",
"images",
"."
] |
def total_variation(images, name=None):
"""Calculate and return the total variation for one or more images.
The total variation is the sum of the absolute differences for neighboring
pixel-values in the input images. This measures how much noise is in the
images.
This can be used as a loss-function during optimization so as to suppress
noise in images. If you have a batch of images, then you should calculate
the scalar loss-value as the sum:
`loss = tf.reduce_sum(tf.image.total_variation(images))`
This implements the anisotropic 2-D version of the formula described here:
https://en.wikipedia.org/wiki/Total_variation_denoising
Args:
images: 4-D Tensor of shape `[batch, height, width, channels]` or
3-D Tensor of shape `[height, width, channels]`.
name: A name for the operation (optional).
Raises:
ValueError: if images.shape is not a 3-D or 4-D vector.
Returns:
The total variation of `images`.
If `images` was 4-D, return a 1-D float Tensor of shape `[batch]` with the
total variation for each image in the batch.
If `images` was 3-D, return a scalar float with the total variation for
that image.
"""
with ops.name_scope(name, 'total_variation'):
ndims = images.get_shape().ndims
if ndims == 3:
# The input is a single image with shape [height, width, channels].
# Calculate the difference of neighboring pixel-values.
# The images are shifted one pixel along the height and width by slicing.
pixel_dif1 = images[1:, :, :] - images[:-1, :, :]
pixel_dif2 = images[:, 1:, :] - images[:, :-1, :]
# Sum for all axis. (None is an alias for all axis.)
sum_axis = None
elif ndims == 4:
# The input is a batch of images with shape:
# [batch, height, width, channels].
# Calculate the difference of neighboring pixel-values.
# The images are shifted one pixel along the height and width by slicing.
pixel_dif1 = images[:, 1:, :, :] - images[:, :-1, :, :]
pixel_dif2 = images[:, :, 1:, :] - images[:, :, :-1, :]
# Only sum for the last 3 axis.
# This results in a 1-D tensor with the total variation for each image.
sum_axis = [1, 2, 3]
else:
raise ValueError('\'images\' must be either 3 or 4-dimensional.')
# Calculate the total variation by taking the absolute value of the
# pixel-differences and summing over the appropriate axis.
tot_var = (math_ops.reduce_sum(math_ops.abs(pixel_dif1), axis=sum_axis) +
math_ops.reduce_sum(math_ops.abs(pixel_dif2), axis=sum_axis))
return tot_var
|
[
"def",
"total_variation",
"(",
"images",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"'total_variation'",
")",
":",
"ndims",
"=",
"images",
".",
"get_shape",
"(",
")",
".",
"ndims",
"if",
"ndims",
"==",
"3",
":",
"# The input is a single image with shape [height, width, channels].",
"# Calculate the difference of neighboring pixel-values.",
"# The images are shifted one pixel along the height and width by slicing.",
"pixel_dif1",
"=",
"images",
"[",
"1",
":",
",",
":",
",",
":",
"]",
"-",
"images",
"[",
":",
"-",
"1",
",",
":",
",",
":",
"]",
"pixel_dif2",
"=",
"images",
"[",
":",
",",
"1",
":",
",",
":",
"]",
"-",
"images",
"[",
":",
",",
":",
"-",
"1",
",",
":",
"]",
"# Sum for all axis. (None is an alias for all axis.)",
"sum_axis",
"=",
"None",
"elif",
"ndims",
"==",
"4",
":",
"# The input is a batch of images with shape:",
"# [batch, height, width, channels].",
"# Calculate the difference of neighboring pixel-values.",
"# The images are shifted one pixel along the height and width by slicing.",
"pixel_dif1",
"=",
"images",
"[",
":",
",",
"1",
":",
",",
":",
",",
":",
"]",
"-",
"images",
"[",
":",
",",
":",
"-",
"1",
",",
":",
",",
":",
"]",
"pixel_dif2",
"=",
"images",
"[",
":",
",",
":",
",",
"1",
":",
",",
":",
"]",
"-",
"images",
"[",
":",
",",
":",
",",
":",
"-",
"1",
",",
":",
"]",
"# Only sum for the last 3 axis.",
"# This results in a 1-D tensor with the total variation for each image.",
"sum_axis",
"=",
"[",
"1",
",",
"2",
",",
"3",
"]",
"else",
":",
"raise",
"ValueError",
"(",
"'\\'images\\' must be either 3 or 4-dimensional.'",
")",
"# Calculate the total variation by taking the absolute value of the",
"# pixel-differences and summing over the appropriate axis.",
"tot_var",
"=",
"(",
"math_ops",
".",
"reduce_sum",
"(",
"math_ops",
".",
"abs",
"(",
"pixel_dif1",
")",
",",
"axis",
"=",
"sum_axis",
")",
"+",
"math_ops",
".",
"reduce_sum",
"(",
"math_ops",
".",
"abs",
"(",
"pixel_dif2",
")",
",",
"axis",
"=",
"sum_axis",
")",
")",
"return",
"tot_var"
] |
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/ops/image_ops_impl.py#L1415-L1482
|
|
kamyu104/LeetCode-Solutions
|
77605708a927ea3b85aee5a479db733938c7c211
|
Python/number-of-different-subsequences-gcds.py
|
python
|
Solution.countDifferentSubsequenceGCDs
|
(self, nums)
|
return result
|
:type nums: List[int]
:rtype: int
|
:type nums: List[int]
:rtype: int
|
[
":",
"type",
"nums",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"int"
] |
def countDifferentSubsequenceGCDs(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
max_num, nums_set = max(nums), set(nums)
result = 0
for i in xrange(1, max_num+1):
d = 0
for x in xrange(i, max_num+1, i):
if x not in nums_set:
continue
d = fractions.gcd(d, x) # total time: O(log(min(d, x)) = O(logd), where d keeps the same or gets smaller
if d == i:
result += 1
break
return result
|
[
"def",
"countDifferentSubsequenceGCDs",
"(",
"self",
",",
"nums",
")",
":",
"max_num",
",",
"nums_set",
"=",
"max",
"(",
"nums",
")",
",",
"set",
"(",
"nums",
")",
"result",
"=",
"0",
"for",
"i",
"in",
"xrange",
"(",
"1",
",",
"max_num",
"+",
"1",
")",
":",
"d",
"=",
"0",
"for",
"x",
"in",
"xrange",
"(",
"i",
",",
"max_num",
"+",
"1",
",",
"i",
")",
":",
"if",
"x",
"not",
"in",
"nums_set",
":",
"continue",
"d",
"=",
"fractions",
".",
"gcd",
"(",
"d",
",",
"x",
")",
"# total time: O(log(min(d, x)) = O(logd), where d keeps the same or gets smaller",
"if",
"d",
"==",
"i",
":",
"result",
"+=",
"1",
"break",
"return",
"result"
] |
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/number-of-different-subsequences-gcds.py#L8-L24
|
|
PaddlePaddle/Paddle
|
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
|
python/paddle/fluid/dygraph/jit.py
|
python
|
_dygraph_to_static_func_
|
(dygraph_func)
|
return __impl__
|
Converts imperative dygraph APIs into declarative function APIs. Decorator
@dygraph_to_static_func only converts imperative dygraph APIs into
declarative net-building APIs, which means it doesn't return immediate
digital result as imperative mode. Users should handle Program and Executor
by themselves.
Note:
This decorator is NOT our recommended way to transform imperative function
to declarative function. We will remove this decorator after we finalize
cleaning up code.
Args:
dygraph_func (callable): callable imperative function.
Returns:
Callable: converting imperative dygraph APIs into declarative
net-building APIs.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
from paddle.fluid.dygraph.jit import dygraph_to_static_func
@dygraph_to_static_func
def func(x):
if fluid.layers.mean(x) < 0:
x_v = x - 1
else:
x_v = x + 1
return x_v
x = fluid.layers.fill_constant(shape=[3, 3], value=0, dtype='float64')
x_v = func(x)
exe = fluid.Executor(fluid.CPUPlace())
out = exe.run(fetch_list=[x_v])
print(out[0])
# [[1. 1. 1.]
# [1. 1. 1.]
# [1. 1. 1.]]
|
Converts imperative dygraph APIs into declarative function APIs. Decorator
@dygraph_to_static_func only converts imperative dygraph APIs into
declarative net-building APIs, which means it doesn't return immediate
digital result as imperative mode. Users should handle Program and Executor
by themselves.
|
[
"Converts",
"imperative",
"dygraph",
"APIs",
"into",
"declarative",
"function",
"APIs",
".",
"Decorator",
"@dygraph_to_static_func",
"only",
"converts",
"imperative",
"dygraph",
"APIs",
"into",
"declarative",
"net",
"-",
"building",
"APIs",
"which",
"means",
"it",
"doesn",
"t",
"return",
"immediate",
"digital",
"result",
"as",
"imperative",
"mode",
".",
"Users",
"should",
"handle",
"Program",
"and",
"Executor",
"by",
"themselves",
"."
] |
def _dygraph_to_static_func_(dygraph_func):
"""
Converts imperative dygraph APIs into declarative function APIs. Decorator
@dygraph_to_static_func only converts imperative dygraph APIs into
declarative net-building APIs, which means it doesn't return immediate
digital result as imperative mode. Users should handle Program and Executor
by themselves.
Note:
This decorator is NOT our recommended way to transform imperative function
to declarative function. We will remove this decorator after we finalize
cleaning up code.
Args:
dygraph_func (callable): callable imperative function.
Returns:
Callable: converting imperative dygraph APIs into declarative
net-building APIs.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import numpy as np
from paddle.fluid.dygraph.jit import dygraph_to_static_func
@dygraph_to_static_func
def func(x):
if fluid.layers.mean(x) < 0:
x_v = x - 1
else:
x_v = x + 1
return x_v
x = fluid.layers.fill_constant(shape=[3, 3], value=0, dtype='float64')
x_v = func(x)
exe = fluid.Executor(fluid.CPUPlace())
out = exe.run(fetch_list=[x_v])
print(out[0])
# [[1. 1. 1.]
# [1. 1. 1.]
# [1. 1. 1.]]
"""
# TODO: remove this decorator after we finalize training API
def __impl__(*args, **kwargs):
program_translator = ProgramTranslator()
if in_dygraph_mode() or not program_translator.enable_to_static:
logging_utils.warn(
"The decorator 'dygraph_to_static_func' doesn't work in "
"dygraph mode or set ProgramTranslator.enable to False. "
"We will just return dygraph output.")
return dygraph_func(*args, **kwargs)
static_func = program_translator.get_func(dygraph_func)
return static_func(*args, **kwargs)
return __impl__
|
[
"def",
"_dygraph_to_static_func_",
"(",
"dygraph_func",
")",
":",
"# TODO: remove this decorator after we finalize training API",
"def",
"__impl__",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"program_translator",
"=",
"ProgramTranslator",
"(",
")",
"if",
"in_dygraph_mode",
"(",
")",
"or",
"not",
"program_translator",
".",
"enable_to_static",
":",
"logging_utils",
".",
"warn",
"(",
"\"The decorator 'dygraph_to_static_func' doesn't work in \"",
"\"dygraph mode or set ProgramTranslator.enable to False. \"",
"\"We will just return dygraph output.\"",
")",
"return",
"dygraph_func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"static_func",
"=",
"program_translator",
".",
"get_func",
"(",
"dygraph_func",
")",
"return",
"static_func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"__impl__"
] |
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/dygraph/jit.py#L77-L137
|
|
vmware/concord-bft
|
ec036a384b4c81be0423d4b429bd37900b13b864
|
util/pyclient/bft_msgs.py
|
python
|
pack_batch_request
|
(client_id, num_of_messages_in_batch, msg_data, cid)
|
return data
|
Create and return a buffer with a header and message
|
Create and return a buffer with a header and message
|
[
"Create",
"and",
"return",
"a",
"buffer",
"with",
"a",
"header",
"and",
"message"
] |
def pack_batch_request(client_id, num_of_messages_in_batch, msg_data, cid):
"""Create and return a buffer with a header and message"""
header = BatchRequestHeader(len(cid), client_id, num_of_messages_in_batch, len(msg_data))
data = b''.join([pack_batch_request_header(header), cid.encode(), msg_data])
return data
|
[
"def",
"pack_batch_request",
"(",
"client_id",
",",
"num_of_messages_in_batch",
",",
"msg_data",
",",
"cid",
")",
":",
"header",
"=",
"BatchRequestHeader",
"(",
"len",
"(",
"cid",
")",
",",
"client_id",
",",
"num_of_messages_in_batch",
",",
"len",
"(",
"msg_data",
")",
")",
"data",
"=",
"b''",
".",
"join",
"(",
"[",
"pack_batch_request_header",
"(",
"header",
")",
",",
"cid",
".",
"encode",
"(",
")",
",",
"msg_data",
"]",
")",
"return",
"data"
] |
https://github.com/vmware/concord-bft/blob/ec036a384b4c81be0423d4b429bd37900b13b864/util/pyclient/bft_msgs.py#L96-L100
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/propgrid.py
|
python
|
PGVIterator.GetProperty
|
(*args, **kwargs)
|
return _propgrid.PGVIterator_GetProperty(*args, **kwargs)
|
GetProperty(self) -> PGProperty
|
GetProperty(self) -> PGProperty
|
[
"GetProperty",
"(",
"self",
")",
"-",
">",
"PGProperty"
] |
def GetProperty(*args, **kwargs):
"""GetProperty(self) -> PGProperty"""
return _propgrid.PGVIterator_GetProperty(*args, **kwargs)
|
[
"def",
"GetProperty",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_propgrid",
".",
"PGVIterator_GetProperty",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/propgrid.py#L1074-L1076
|
|
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/VBox/Devices/EFI/Firmware/AppPkg/Applications/Python/PyMod-2.7.2/Lib/pydoc.py
|
python
|
isdata
|
(object)
|
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
|
Check if an object is of a type that probably means it's data.
|
Check if an object is of a type that probably means it's data.
|
[
"Check",
"if",
"an",
"object",
"is",
"of",
"a",
"type",
"that",
"probably",
"means",
"it",
"s",
"data",
"."
] |
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
|
[
"def",
"isdata",
"(",
"object",
")",
":",
"return",
"not",
"(",
"inspect",
".",
"ismodule",
"(",
"object",
")",
"or",
"inspect",
".",
"isclass",
"(",
"object",
")",
"or",
"inspect",
".",
"isroutine",
"(",
"object",
")",
"or",
"inspect",
".",
"isframe",
"(",
"object",
")",
"or",
"inspect",
".",
"istraceback",
"(",
"object",
")",
"or",
"inspect",
".",
"iscode",
"(",
"object",
")",
")"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/VBox/Devices/EFI/Firmware/AppPkg/Applications/Python/PyMod-2.7.2/Lib/pydoc.py#L102-L106
|
|
fifengine/fifengine
|
4b62c42e85bec19893cef8e63e6855927cff2c47
|
engine/python/fife/extensions/pychan/internal.py
|
python
|
Manager.getDefaultFont
|
(self)
|
return self.fonts['default']
|
Returns the default font
|
Returns the default font
|
[
"Returns",
"the",
"default",
"font"
] |
def getDefaultFont(self):
"""
Returns the default font
"""
return self.fonts['default']
|
[
"def",
"getDefaultFont",
"(",
"self",
")",
":",
"return",
"self",
".",
"fonts",
"[",
"'default'",
"]"
] |
https://github.com/fifengine/fifengine/blob/4b62c42e85bec19893cef8e63e6855927cff2c47/engine/python/fife/extensions/pychan/internal.py#L145-L149
|
|
apache/arrow
|
af33dd1157eb8d7d9bfac25ebf61445b793b7943
|
python/pyarrow/filesystem.py
|
python
|
FileSystem.mv
|
(self, path, new_path)
|
return self.rename(path, new_path)
|
Alias for FileSystem.rename.
|
Alias for FileSystem.rename.
|
[
"Alias",
"for",
"FileSystem",
".",
"rename",
"."
] |
def mv(self, path, new_path):
"""
Alias for FileSystem.rename.
"""
return self.rename(path, new_path)
|
[
"def",
"mv",
"(",
"self",
",",
"path",
",",
"new_path",
")",
":",
"return",
"self",
".",
"rename",
"(",
"path",
",",
"new_path",
")"
] |
https://github.com/apache/arrow/blob/af33dd1157eb8d7d9bfac25ebf61445b793b7943/python/pyarrow/filesystem.py#L126-L130
|
|
apple/swift-lldb
|
d74be846ef3e62de946df343e8c234bde93a8912
|
scripts/Python/static-binding/lldb.py
|
python
|
SBListener.PeekAtNextEvent
|
(self, sb_event)
|
return _lldb.SBListener_PeekAtNextEvent(self, sb_event)
|
PeekAtNextEvent(SBListener self, SBEvent sb_event) -> bool
|
PeekAtNextEvent(SBListener self, SBEvent sb_event) -> bool
|
[
"PeekAtNextEvent",
"(",
"SBListener",
"self",
"SBEvent",
"sb_event",
")",
"-",
">",
"bool"
] |
def PeekAtNextEvent(self, sb_event):
"""PeekAtNextEvent(SBListener self, SBEvent sb_event) -> bool"""
return _lldb.SBListener_PeekAtNextEvent(self, sb_event)
|
[
"def",
"PeekAtNextEvent",
"(",
"self",
",",
"sb_event",
")",
":",
"return",
"_lldb",
".",
"SBListener_PeekAtNextEvent",
"(",
"self",
",",
"sb_event",
")"
] |
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L6870-L6872
|
|
cvxpy/cvxpy
|
5165b4fb750dfd237de8659383ef24b4b2e33aaf
|
cvxpy/expressions/expression.py
|
python
|
Expression.__gt__
|
(self, other: "Expression")
|
Unsupported.
|
Unsupported.
|
[
"Unsupported",
"."
] |
def __gt__(self, other: "Expression"):
"""Unsupported.
"""
raise NotImplementedError("Strict inequalities are not allowed.")
|
[
"def",
"__gt__",
"(",
"self",
",",
"other",
":",
"\"Expression\"",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"Strict inequalities are not allowed.\"",
")"
] |
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/expressions/expression.py#L677-L680
|
||
snap-stanford/snap-python
|
d53c51b0a26aa7e3e7400b014cdf728948fde80a
|
setup/snap.py
|
python
|
TBPGraph.GetLNIdV
|
(self, *args)
|
return _snap.TBPGraph_GetLNIdV(self, *args)
|
GetLNIdV(TBPGraph self, TIntV NIdV)
Parameters:
NIdV: TIntV &
|
GetLNIdV(TBPGraph self, TIntV NIdV)
|
[
"GetLNIdV",
"(",
"TBPGraph",
"self",
"TIntV",
"NIdV",
")"
] |
def GetLNIdV(self, *args):
"""
GetLNIdV(TBPGraph self, TIntV NIdV)
Parameters:
NIdV: TIntV &
"""
return _snap.TBPGraph_GetLNIdV(self, *args)
|
[
"def",
"GetLNIdV",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_snap",
".",
"TBPGraph_GetLNIdV",
"(",
"self",
",",
"*",
"args",
")"
] |
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L5220-L5228
|
|
cms-sw/cmssw
|
fd9de012d503d3405420bcbeec0ec879baa57cf2
|
Validation/Tools/python/GenObject.py
|
python
|
GenObject.compareTwoTrees
|
(chain1, chain2, **kwargs)
|
return resultsDict
|
Given all of the necessary information, this routine will
go through and compare two trees making sure they are
'identical' within requested precision. If 'diffOutputName'
is passed in, a root file with a diffTree and missingTree will
be produced.
|
Given all of the necessary information, this routine will
go through and compare two trees making sure they are
'identical' within requested precision. If 'diffOutputName'
is passed in, a root file with a diffTree and missingTree will
be produced.
|
[
"Given",
"all",
"of",
"the",
"necessary",
"information",
"this",
"routine",
"will",
"go",
"through",
"and",
"compare",
"two",
"trees",
"making",
"sure",
"they",
"are",
"identical",
"within",
"requested",
"precision",
".",
"If",
"diffOutputName",
"is",
"passed",
"in",
"a",
"root",
"file",
"with",
"a",
"diffTree",
"and",
"missingTree",
"will",
"be",
"produced",
"."
] |
def compareTwoTrees (chain1, chain2, **kwargs):
"""Given all of the necessary information, this routine will
go through and compare two trees making sure they are
'identical' within requested precision. If 'diffOutputName'
is passed in, a root file with a diffTree and missingTree will
be produced."""
print("Comparing Two Trees")
diffOutputName = kwargs.get ('diffOutputName')
tupleName1 = GenObject._kitchenSinkDict[chain1]['tupleName']
numEntries1 = GenObject._kitchenSinkDict[chain1]['numEntries']
tupleName2 = GenObject._kitchenSinkDict[chain2]['tupleName']
numEntries2 = GenObject._kitchenSinkDict[chain2]['numEntries']
debug = GenObject._kitchenSinkDict.get ('debug', False)
ree1 = GenObject.getRunEventEntryDict (chain1, tupleName1, numEntries1)
ree2 = GenObject.getRunEventEntryDict (chain2, tupleName2, numEntries2)
overlap, firstOnly, secondOnly = \
GenObject.compareRunEventDicts (ree1, ree2)
if diffOutputName:
rootfile, diffTree, missingTree = \
GenObject.setupDiffOutputTree (diffOutputName,
'diffTree',
'missingTree')
if firstOnly:
vec = GenObject._rootClassDict['firstOnly']
for key in firstOnly:
runevent = GenObject._key2re (key)
vec.push_back( GenObject._rootObjectClone( runevent ) )
if secondOnly:
vec = GenObject._rootClassDict['secondOnly']
for key in secondOnly:
runevent = GenObject._key2re (key)
vec.push_back( GenObject._rootObjectClone( runevent ) )
missingTree.Fill()
resultsDict = {}
if firstOnly:
resultsDict.setdefault ('_runevent', {})['firstOnly'] = \
len (firstOnly)
if secondOnly:
resultsDict.setdefault ('_runevent', {})['secondOnly'] = \
len (secondOnly)
resultsDict['eventsCompared'] = len (overlap)
for reTuple in sorted(overlap):
# if we are filling the diff tree, then save the run and
# event information.
if diffOutputName:
GenObject._key2re (reTuple,
GenObject._rootClassDict['runevent'])
if debug: warn ('event1', blankLines = 3)
event1 = GenObject.loadEventFromTree (chain1, ree1 [reTuple])
if debug: warn ('event2', blankLines = 3)
event2 = GenObject.loadEventFromTree (chain2, ree2 [reTuple])
if GenObject._kitchenSinkDict.get('printEvent'):
print("event1:")
GenObject.printEvent (event1)
print("event2:")
GenObject.printEvent (event2)
if GenObject._kitchenSinkDict.get('blur'):
where = reTuple
GenObject.blurEvent (event1,
GenObject._kitchenSinkDict['blur'],
where)
for objName in sorted (event1.keys()):
if "runevent" == objName:
# runevent is a special case. We don't compare these
continue
if not GenObject._equivDict.get (objName):
# we don't know how to compare these objects, so
# skip them.
continue
if GenObject.isSingleton (objName):
# I'll add this in later. For now, just skip it
print("singleton")
continue
# Get ready to calculate root diff object if necessary
rootObj = 0
if diffOutputName:
rootObj = GenObject._rootObjectDict[objName]
rootObj.clear()
vec1 = event1[objName]
vec2 = event2[objName]
matchedSet, noMatch1Set, noMatch2Set = \
GenObject.pairEquivalentObjects (vec1, vec2)
if noMatch1Set or noMatch2Set:
## print "No match 1", noMatch1Set
## print "No match 2", noMatch2Set
count1 = len (noMatch1Set)
count2 = len (noMatch2Set)
key = (count1, count2)
countDict = resultsDict.\
setdefault (objName, {}).\
setdefault ('_missing', {})
if key in countDict:
countDict[key] += 1
else:
countDict[key] = 1
# should be calculating root diff objects
if diffOutputName:
# first set
for index in sorted(list(noMatch1Set)):
goObj = vec1 [index]
rootObj.firstOnly.push_back ( GenObject.\
_rootObjectClone \
(goObj) )
# second set
for index in sorted(list(noMatch2Set)):
goObj = vec2 [index]
rootObj.secondOnly.push_back ( GenObject.\
_rootObjectClone \
(goObj) )
# o.k. Now that we have them matched, let's compare
# the proper items:
for pair in sorted(list(matchedSet)):
if diffOutputName:
rootDiffObj = GenObject._rootDiffObject \
( vec1[ pair[1 - 1] ],
vec2[ pair[2 - 1] ] )
rootObj.diff.push_back ( rootDiffObj )
problems = GenObject.\
compareTwoItems (vec1[ pair[1 - 1] ],
vec2[ pair[2 - 1] ])
if problems.keys():
# pprint.pprint (problems)
for varName in problems.keys():
countDict = resultsDict.\
setdefault (objName, {}).\
setdefault ('_var', {})
if varName in countDict:
countDict[varName] += 1
else:
countDict[varName] = 1
key = 'count_%s' % objName
if key not in resultsDict:
resultsDict[key] = 0
resultsDict[key] += len (matchedSet)
# try cleaning up
del vec1
del vec2
# end for objName
if diffOutputName:
diffTree.Fill()
del event1
del event2
# end for overlap
if diffOutputName:
diffTree.Write()
missingTree.Write()
rootfile.Close()
return resultsDict
|
[
"def",
"compareTwoTrees",
"(",
"chain1",
",",
"chain2",
",",
"*",
"*",
"kwargs",
")",
":",
"print",
"(",
"\"Comparing Two Trees\"",
")",
"diffOutputName",
"=",
"kwargs",
".",
"get",
"(",
"'diffOutputName'",
")",
"tupleName1",
"=",
"GenObject",
".",
"_kitchenSinkDict",
"[",
"chain1",
"]",
"[",
"'tupleName'",
"]",
"numEntries1",
"=",
"GenObject",
".",
"_kitchenSinkDict",
"[",
"chain1",
"]",
"[",
"'numEntries'",
"]",
"tupleName2",
"=",
"GenObject",
".",
"_kitchenSinkDict",
"[",
"chain2",
"]",
"[",
"'tupleName'",
"]",
"numEntries2",
"=",
"GenObject",
".",
"_kitchenSinkDict",
"[",
"chain2",
"]",
"[",
"'numEntries'",
"]",
"debug",
"=",
"GenObject",
".",
"_kitchenSinkDict",
".",
"get",
"(",
"'debug'",
",",
"False",
")",
"ree1",
"=",
"GenObject",
".",
"getRunEventEntryDict",
"(",
"chain1",
",",
"tupleName1",
",",
"numEntries1",
")",
"ree2",
"=",
"GenObject",
".",
"getRunEventEntryDict",
"(",
"chain2",
",",
"tupleName2",
",",
"numEntries2",
")",
"overlap",
",",
"firstOnly",
",",
"secondOnly",
"=",
"GenObject",
".",
"compareRunEventDicts",
"(",
"ree1",
",",
"ree2",
")",
"if",
"diffOutputName",
":",
"rootfile",
",",
"diffTree",
",",
"missingTree",
"=",
"GenObject",
".",
"setupDiffOutputTree",
"(",
"diffOutputName",
",",
"'diffTree'",
",",
"'missingTree'",
")",
"if",
"firstOnly",
":",
"vec",
"=",
"GenObject",
".",
"_rootClassDict",
"[",
"'firstOnly'",
"]",
"for",
"key",
"in",
"firstOnly",
":",
"runevent",
"=",
"GenObject",
".",
"_key2re",
"(",
"key",
")",
"vec",
".",
"push_back",
"(",
"GenObject",
".",
"_rootObjectClone",
"(",
"runevent",
")",
")",
"if",
"secondOnly",
":",
"vec",
"=",
"GenObject",
".",
"_rootClassDict",
"[",
"'secondOnly'",
"]",
"for",
"key",
"in",
"secondOnly",
":",
"runevent",
"=",
"GenObject",
".",
"_key2re",
"(",
"key",
")",
"vec",
".",
"push_back",
"(",
"GenObject",
".",
"_rootObjectClone",
"(",
"runevent",
")",
")",
"missingTree",
".",
"Fill",
"(",
")",
"resultsDict",
"=",
"{",
"}",
"if",
"firstOnly",
":",
"resultsDict",
".",
"setdefault",
"(",
"'_runevent'",
",",
"{",
"}",
")",
"[",
"'firstOnly'",
"]",
"=",
"len",
"(",
"firstOnly",
")",
"if",
"secondOnly",
":",
"resultsDict",
".",
"setdefault",
"(",
"'_runevent'",
",",
"{",
"}",
")",
"[",
"'secondOnly'",
"]",
"=",
"len",
"(",
"secondOnly",
")",
"resultsDict",
"[",
"'eventsCompared'",
"]",
"=",
"len",
"(",
"overlap",
")",
"for",
"reTuple",
"in",
"sorted",
"(",
"overlap",
")",
":",
"# if we are filling the diff tree, then save the run and",
"# event information.",
"if",
"diffOutputName",
":",
"GenObject",
".",
"_key2re",
"(",
"reTuple",
",",
"GenObject",
".",
"_rootClassDict",
"[",
"'runevent'",
"]",
")",
"if",
"debug",
":",
"warn",
"(",
"'event1'",
",",
"blankLines",
"=",
"3",
")",
"event1",
"=",
"GenObject",
".",
"loadEventFromTree",
"(",
"chain1",
",",
"ree1",
"[",
"reTuple",
"]",
")",
"if",
"debug",
":",
"warn",
"(",
"'event2'",
",",
"blankLines",
"=",
"3",
")",
"event2",
"=",
"GenObject",
".",
"loadEventFromTree",
"(",
"chain2",
",",
"ree2",
"[",
"reTuple",
"]",
")",
"if",
"GenObject",
".",
"_kitchenSinkDict",
".",
"get",
"(",
"'printEvent'",
")",
":",
"print",
"(",
"\"event1:\"",
")",
"GenObject",
".",
"printEvent",
"(",
"event1",
")",
"print",
"(",
"\"event2:\"",
")",
"GenObject",
".",
"printEvent",
"(",
"event2",
")",
"if",
"GenObject",
".",
"_kitchenSinkDict",
".",
"get",
"(",
"'blur'",
")",
":",
"where",
"=",
"reTuple",
"GenObject",
".",
"blurEvent",
"(",
"event1",
",",
"GenObject",
".",
"_kitchenSinkDict",
"[",
"'blur'",
"]",
",",
"where",
")",
"for",
"objName",
"in",
"sorted",
"(",
"event1",
".",
"keys",
"(",
")",
")",
":",
"if",
"\"runevent\"",
"==",
"objName",
":",
"# runevent is a special case. We don't compare these",
"continue",
"if",
"not",
"GenObject",
".",
"_equivDict",
".",
"get",
"(",
"objName",
")",
":",
"# we don't know how to compare these objects, so",
"# skip them.",
"continue",
"if",
"GenObject",
".",
"isSingleton",
"(",
"objName",
")",
":",
"# I'll add this in later. For now, just skip it",
"print",
"(",
"\"singleton\"",
")",
"continue",
"# Get ready to calculate root diff object if necessary",
"rootObj",
"=",
"0",
"if",
"diffOutputName",
":",
"rootObj",
"=",
"GenObject",
".",
"_rootObjectDict",
"[",
"objName",
"]",
"rootObj",
".",
"clear",
"(",
")",
"vec1",
"=",
"event1",
"[",
"objName",
"]",
"vec2",
"=",
"event2",
"[",
"objName",
"]",
"matchedSet",
",",
"noMatch1Set",
",",
"noMatch2Set",
"=",
"GenObject",
".",
"pairEquivalentObjects",
"(",
"vec1",
",",
"vec2",
")",
"if",
"noMatch1Set",
"or",
"noMatch2Set",
":",
"## print \"No match 1\", noMatch1Set",
"## print \"No match 2\", noMatch2Set",
"count1",
"=",
"len",
"(",
"noMatch1Set",
")",
"count2",
"=",
"len",
"(",
"noMatch2Set",
")",
"key",
"=",
"(",
"count1",
",",
"count2",
")",
"countDict",
"=",
"resultsDict",
".",
"setdefault",
"(",
"objName",
",",
"{",
"}",
")",
".",
"setdefault",
"(",
"'_missing'",
",",
"{",
"}",
")",
"if",
"key",
"in",
"countDict",
":",
"countDict",
"[",
"key",
"]",
"+=",
"1",
"else",
":",
"countDict",
"[",
"key",
"]",
"=",
"1",
"# should be calculating root diff objects",
"if",
"diffOutputName",
":",
"# first set",
"for",
"index",
"in",
"sorted",
"(",
"list",
"(",
"noMatch1Set",
")",
")",
":",
"goObj",
"=",
"vec1",
"[",
"index",
"]",
"rootObj",
".",
"firstOnly",
".",
"push_back",
"(",
"GenObject",
".",
"_rootObjectClone",
"(",
"goObj",
")",
")",
"# second set",
"for",
"index",
"in",
"sorted",
"(",
"list",
"(",
"noMatch2Set",
")",
")",
":",
"goObj",
"=",
"vec2",
"[",
"index",
"]",
"rootObj",
".",
"secondOnly",
".",
"push_back",
"(",
"GenObject",
".",
"_rootObjectClone",
"(",
"goObj",
")",
")",
"# o.k. Now that we have them matched, let's compare",
"# the proper items: ",
"for",
"pair",
"in",
"sorted",
"(",
"list",
"(",
"matchedSet",
")",
")",
":",
"if",
"diffOutputName",
":",
"rootDiffObj",
"=",
"GenObject",
".",
"_rootDiffObject",
"(",
"vec1",
"[",
"pair",
"[",
"1",
"-",
"1",
"]",
"]",
",",
"vec2",
"[",
"pair",
"[",
"2",
"-",
"1",
"]",
"]",
")",
"rootObj",
".",
"diff",
".",
"push_back",
"(",
"rootDiffObj",
")",
"problems",
"=",
"GenObject",
".",
"compareTwoItems",
"(",
"vec1",
"[",
"pair",
"[",
"1",
"-",
"1",
"]",
"]",
",",
"vec2",
"[",
"pair",
"[",
"2",
"-",
"1",
"]",
"]",
")",
"if",
"problems",
".",
"keys",
"(",
")",
":",
"# pprint.pprint (problems)",
"for",
"varName",
"in",
"problems",
".",
"keys",
"(",
")",
":",
"countDict",
"=",
"resultsDict",
".",
"setdefault",
"(",
"objName",
",",
"{",
"}",
")",
".",
"setdefault",
"(",
"'_var'",
",",
"{",
"}",
")",
"if",
"varName",
"in",
"countDict",
":",
"countDict",
"[",
"varName",
"]",
"+=",
"1",
"else",
":",
"countDict",
"[",
"varName",
"]",
"=",
"1",
"key",
"=",
"'count_%s'",
"%",
"objName",
"if",
"key",
"not",
"in",
"resultsDict",
":",
"resultsDict",
"[",
"key",
"]",
"=",
"0",
"resultsDict",
"[",
"key",
"]",
"+=",
"len",
"(",
"matchedSet",
")",
"# try cleaning up",
"del",
"vec1",
"del",
"vec2",
"# end for objName ",
"if",
"diffOutputName",
":",
"diffTree",
".",
"Fill",
"(",
")",
"del",
"event1",
"del",
"event2",
"# end for overlap",
"if",
"diffOutputName",
":",
"diffTree",
".",
"Write",
"(",
")",
"missingTree",
".",
"Write",
"(",
")",
"rootfile",
".",
"Close",
"(",
")",
"return",
"resultsDict"
] |
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Validation/Tools/python/GenObject.py#L1310-L1457
|
|
wywu/LAB
|
4b6debd302ae109fd104d4dd04dccc3418ae7471
|
python/caffe/io.py
|
python
|
array_to_blobproto
|
(arr, diff=None)
|
return blob
|
Converts a N-dimensional array to blob proto. If diff is given, also
convert the diff. You need to make sure that arr and diff have the same
shape, and this function does not do sanity check.
|
Converts a N-dimensional array to blob proto. If diff is given, also
convert the diff. You need to make sure that arr and diff have the same
shape, and this function does not do sanity check.
|
[
"Converts",
"a",
"N",
"-",
"dimensional",
"array",
"to",
"blob",
"proto",
".",
"If",
"diff",
"is",
"given",
"also",
"convert",
"the",
"diff",
".",
"You",
"need",
"to",
"make",
"sure",
"that",
"arr",
"and",
"diff",
"have",
"the",
"same",
"shape",
"and",
"this",
"function",
"does",
"not",
"do",
"sanity",
"check",
"."
] |
def array_to_blobproto(arr, diff=None):
"""Converts a N-dimensional array to blob proto. If diff is given, also
convert the diff. You need to make sure that arr and diff have the same
shape, and this function does not do sanity check.
"""
blob = caffe_pb2.BlobProto()
blob.shape.dim.extend(arr.shape)
blob.data.extend(arr.astype(float).flat)
if diff is not None:
blob.diff.extend(diff.astype(float).flat)
return blob
|
[
"def",
"array_to_blobproto",
"(",
"arr",
",",
"diff",
"=",
"None",
")",
":",
"blob",
"=",
"caffe_pb2",
".",
"BlobProto",
"(",
")",
"blob",
".",
"shape",
".",
"dim",
".",
"extend",
"(",
"arr",
".",
"shape",
")",
"blob",
".",
"data",
".",
"extend",
"(",
"arr",
".",
"astype",
"(",
"float",
")",
".",
"flat",
")",
"if",
"diff",
"is",
"not",
"None",
":",
"blob",
".",
"diff",
".",
"extend",
"(",
"diff",
".",
"astype",
"(",
"float",
")",
".",
"flat",
")",
"return",
"blob"
] |
https://github.com/wywu/LAB/blob/4b6debd302ae109fd104d4dd04dccc3418ae7471/python/caffe/io.py#L36-L46
|
|
echronos/echronos
|
c996f1d2c8af6c6536205eb319c1bf1d4d84569c
|
external_tools/ply_info/example/BASIC/basparse.py
|
python
|
p_command_gosub
|
(p)
|
command : GOSUB INTEGER
|
command : GOSUB INTEGER
|
[
"command",
":",
"GOSUB",
"INTEGER"
] |
def p_command_gosub(p):
'''command : GOSUB INTEGER'''
p[0] = ('GOSUB',int(p[2]))
|
[
"def",
"p_command_gosub",
"(",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"(",
"'GOSUB'",
",",
"int",
"(",
"p",
"[",
"2",
"]",
")",
")"
] |
https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/ply_info/example/BASIC/basparse.py#L235-L237
|
||
appleseedhq/appleseed
|
1ba62025b5db722e179a2219d8d366c34bfaa342
|
sandbox/lib/python/site-packages/Qt.py
|
python
|
_pyside2
|
()
|
Initialise PySide2
These functions serve to test the existence of a binding
along with set it up in such a way that it aligns with
the final step; adding members from the original binding
to Qt.py
|
Initialise PySide2
|
[
"Initialise",
"PySide2"
] |
def _pyside2():
"""Initialise PySide2
These functions serve to test the existence of a binding
along with set it up in such a way that it aligns with
the final step; adding members from the original binding
to Qt.py
"""
import PySide2 as module
_setup(module, ["QtUiTools"])
Qt.__binding_version__ = module.__version__
try:
try:
# Before merge of PySide and shiboken
import shiboken2
except ImportError:
# After merge of PySide and shiboken, May 2017
from PySide2 import shiboken2
Qt.QtCompat.wrapInstance = (
lambda ptr, base=None: _wrapinstance(
shiboken2.wrapInstance, ptr, base)
)
Qt.QtCompat.getCppPointer = lambda object: \
shiboken2.getCppPointer(object)[0]
except ImportError:
pass # Optional
if hasattr(Qt, "_QtUiTools"):
Qt.QtCompat.loadUi = _loadUi
if hasattr(Qt, "_QtCore"):
Qt.__qt_version__ = Qt._QtCore.qVersion()
Qt.QtCompat.qInstallMessageHandler = _qInstallMessageHandler
Qt.QtCompat.translate = Qt._QtCore.QCoreApplication.translate
if hasattr(Qt, "_QtWidgets"):
Qt.QtCompat.setSectionResizeMode = \
Qt._QtWidgets.QHeaderView.setSectionResizeMode
_reassign_misplaced_members("PySide2")
_build_compatibility_members("PySide2")
|
[
"def",
"_pyside2",
"(",
")",
":",
"import",
"PySide2",
"as",
"module",
"_setup",
"(",
"module",
",",
"[",
"\"QtUiTools\"",
"]",
")",
"Qt",
".",
"__binding_version__",
"=",
"module",
".",
"__version__",
"try",
":",
"try",
":",
"# Before merge of PySide and shiboken",
"import",
"shiboken2",
"except",
"ImportError",
":",
"# After merge of PySide and shiboken, May 2017",
"from",
"PySide2",
"import",
"shiboken2",
"Qt",
".",
"QtCompat",
".",
"wrapInstance",
"=",
"(",
"lambda",
"ptr",
",",
"base",
"=",
"None",
":",
"_wrapinstance",
"(",
"shiboken2",
".",
"wrapInstance",
",",
"ptr",
",",
"base",
")",
")",
"Qt",
".",
"QtCompat",
".",
"getCppPointer",
"=",
"lambda",
"object",
":",
"shiboken2",
".",
"getCppPointer",
"(",
"object",
")",
"[",
"0",
"]",
"except",
"ImportError",
":",
"pass",
"# Optional",
"if",
"hasattr",
"(",
"Qt",
",",
"\"_QtUiTools\"",
")",
":",
"Qt",
".",
"QtCompat",
".",
"loadUi",
"=",
"_loadUi",
"if",
"hasattr",
"(",
"Qt",
",",
"\"_QtCore\"",
")",
":",
"Qt",
".",
"__qt_version__",
"=",
"Qt",
".",
"_QtCore",
".",
"qVersion",
"(",
")",
"Qt",
".",
"QtCompat",
".",
"qInstallMessageHandler",
"=",
"_qInstallMessageHandler",
"Qt",
".",
"QtCompat",
".",
"translate",
"=",
"Qt",
".",
"_QtCore",
".",
"QCoreApplication",
".",
"translate",
"if",
"hasattr",
"(",
"Qt",
",",
"\"_QtWidgets\"",
")",
":",
"Qt",
".",
"QtCompat",
".",
"setSectionResizeMode",
"=",
"Qt",
".",
"_QtWidgets",
".",
"QHeaderView",
".",
"setSectionResizeMode",
"_reassign_misplaced_members",
"(",
"\"PySide2\"",
")",
"_build_compatibility_members",
"(",
"\"PySide2\"",
")"
] |
https://github.com/appleseedhq/appleseed/blob/1ba62025b5db722e179a2219d8d366c34bfaa342/sandbox/lib/python/site-packages/Qt.py#L1036-L1082
|
||
PixarAnimationStudios/USD
|
faed18ce62c8736b02413635b584a2f637156bad
|
pxr/usdImaging/usdviewq/selectionDataModel.py
|
python
|
SelectionDataModel.getProps
|
(self)
|
return [self._getPropFromPath(path)
for path in self.getPropPaths()]
|
Get a list of all selected properties.
|
Get a list of all selected properties.
|
[
"Get",
"a",
"list",
"of",
"all",
"selected",
"properties",
"."
] |
def getProps(self):
"""Get a list of all selected properties."""
return [self._getPropFromPath(path)
for path in self.getPropPaths()]
|
[
"def",
"getProps",
"(",
"self",
")",
":",
"return",
"[",
"self",
".",
"_getPropFromPath",
"(",
"path",
")",
"for",
"path",
"in",
"self",
".",
"getPropPaths",
"(",
")",
"]"
] |
https://github.com/PixarAnimationStudios/USD/blob/faed18ce62c8736b02413635b584a2f637156bad/pxr/usdImaging/usdviewq/selectionDataModel.py#L943-L947
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/core/fromnumeric.py
|
python
|
argsort
|
(a, axis=-1, kind=None, order=None)
|
return _wrapfunc(a, 'argsort', axis=axis, kind=kind, order=order)
|
Returns the indices that would sort an array.
Perform an indirect sort along the given axis using the algorithm specified
by the `kind` keyword. It returns an array of indices of the same shape as
`a` that index data along the given axis in sorted order.
Parameters
----------
a : array_like
Array to sort.
axis : int or None, optional
Axis along which to sort. The default is -1 (the last axis). If None,
the flattened array is used.
kind : {'quicksort', 'mergesort', 'heapsort', 'stable'}, optional
Sorting algorithm. The default is 'quicksort'. Note that both 'stable'
and 'mergesort' use timsort under the covers and, in general, the
actual implementation will vary with data type. The 'mergesort' option
is retained for backwards compatibility.
.. versionchanged:: 1.15.0.
The 'stable' option was added.
order : str or list of str, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. A single field can
be specified as a string, and not all fields need be specified,
but unspecified fields will still be used, in the order in which
they come up in the dtype, to break ties.
Returns
-------
index_array : ndarray, int
Array of indices that sort `a` along the specified `axis`.
If `a` is one-dimensional, ``a[index_array]`` yields a sorted `a`.
More generally, ``np.take_along_axis(a, index_array, axis=axis)``
always yields the sorted `a`, irrespective of dimensionality.
See Also
--------
sort : Describes sorting algorithms used.
lexsort : Indirect stable sort with multiple keys.
ndarray.sort : Inplace sort.
argpartition : Indirect partial sort.
take_along_axis : Apply ``index_array`` from argsort
to an array as if by calling sort.
Notes
-----
See `sort` for notes on the different sorting algorithms.
As of NumPy 1.4.0 `argsort` works with real/complex arrays containing
nan values. The enhanced sort order is documented in `sort`.
Examples
--------
One dimensional array:
>>> x = np.array([3, 1, 2])
>>> np.argsort(x)
array([1, 2, 0])
Two-dimensional array:
>>> x = np.array([[0, 3], [2, 2]])
>>> x
array([[0, 3],
[2, 2]])
>>> ind = np.argsort(x, axis=0) # sorts along first axis (down)
>>> ind
array([[0, 1],
[1, 0]])
>>> np.take_along_axis(x, ind, axis=0) # same as np.sort(x, axis=0)
array([[0, 2],
[2, 3]])
>>> ind = np.argsort(x, axis=1) # sorts along last axis (across)
>>> ind
array([[0, 1],
[0, 1]])
>>> np.take_along_axis(x, ind, axis=1) # same as np.sort(x, axis=1)
array([[0, 3],
[2, 2]])
Indices of the sorted elements of a N-dimensional array:
>>> ind = np.unravel_index(np.argsort(x, axis=None), x.shape)
>>> ind
(array([0, 1, 1, 0]), array([0, 0, 1, 1]))
>>> x[ind] # same as np.sort(x, axis=None)
array([0, 2, 2, 3])
Sorting with keys:
>>> x = np.array([(1, 0), (0, 1)], dtype=[('x', '<i4'), ('y', '<i4')])
>>> x
array([(1, 0), (0, 1)],
dtype=[('x', '<i4'), ('y', '<i4')])
>>> np.argsort(x, order=('x','y'))
array([1, 0])
>>> np.argsort(x, order=('y','x'))
array([0, 1])
|
Returns the indices that would sort an array.
|
[
"Returns",
"the",
"indices",
"that",
"would",
"sort",
"an",
"array",
"."
] |
def argsort(a, axis=-1, kind=None, order=None):
"""
Returns the indices that would sort an array.
Perform an indirect sort along the given axis using the algorithm specified
by the `kind` keyword. It returns an array of indices of the same shape as
`a` that index data along the given axis in sorted order.
Parameters
----------
a : array_like
Array to sort.
axis : int or None, optional
Axis along which to sort. The default is -1 (the last axis). If None,
the flattened array is used.
kind : {'quicksort', 'mergesort', 'heapsort', 'stable'}, optional
Sorting algorithm. The default is 'quicksort'. Note that both 'stable'
and 'mergesort' use timsort under the covers and, in general, the
actual implementation will vary with data type. The 'mergesort' option
is retained for backwards compatibility.
.. versionchanged:: 1.15.0.
The 'stable' option was added.
order : str or list of str, optional
When `a` is an array with fields defined, this argument specifies
which fields to compare first, second, etc. A single field can
be specified as a string, and not all fields need be specified,
but unspecified fields will still be used, in the order in which
they come up in the dtype, to break ties.
Returns
-------
index_array : ndarray, int
Array of indices that sort `a` along the specified `axis`.
If `a` is one-dimensional, ``a[index_array]`` yields a sorted `a`.
More generally, ``np.take_along_axis(a, index_array, axis=axis)``
always yields the sorted `a`, irrespective of dimensionality.
See Also
--------
sort : Describes sorting algorithms used.
lexsort : Indirect stable sort with multiple keys.
ndarray.sort : Inplace sort.
argpartition : Indirect partial sort.
take_along_axis : Apply ``index_array`` from argsort
to an array as if by calling sort.
Notes
-----
See `sort` for notes on the different sorting algorithms.
As of NumPy 1.4.0 `argsort` works with real/complex arrays containing
nan values. The enhanced sort order is documented in `sort`.
Examples
--------
One dimensional array:
>>> x = np.array([3, 1, 2])
>>> np.argsort(x)
array([1, 2, 0])
Two-dimensional array:
>>> x = np.array([[0, 3], [2, 2]])
>>> x
array([[0, 3],
[2, 2]])
>>> ind = np.argsort(x, axis=0) # sorts along first axis (down)
>>> ind
array([[0, 1],
[1, 0]])
>>> np.take_along_axis(x, ind, axis=0) # same as np.sort(x, axis=0)
array([[0, 2],
[2, 3]])
>>> ind = np.argsort(x, axis=1) # sorts along last axis (across)
>>> ind
array([[0, 1],
[0, 1]])
>>> np.take_along_axis(x, ind, axis=1) # same as np.sort(x, axis=1)
array([[0, 3],
[2, 2]])
Indices of the sorted elements of a N-dimensional array:
>>> ind = np.unravel_index(np.argsort(x, axis=None), x.shape)
>>> ind
(array([0, 1, 1, 0]), array([0, 0, 1, 1]))
>>> x[ind] # same as np.sort(x, axis=None)
array([0, 2, 2, 3])
Sorting with keys:
>>> x = np.array([(1, 0), (0, 1)], dtype=[('x', '<i4'), ('y', '<i4')])
>>> x
array([(1, 0), (0, 1)],
dtype=[('x', '<i4'), ('y', '<i4')])
>>> np.argsort(x, order=('x','y'))
array([1, 0])
>>> np.argsort(x, order=('y','x'))
array([0, 1])
"""
return _wrapfunc(a, 'argsort', axis=axis, kind=kind, order=order)
|
[
"def",
"argsort",
"(",
"a",
",",
"axis",
"=",
"-",
"1",
",",
"kind",
"=",
"None",
",",
"order",
"=",
"None",
")",
":",
"return",
"_wrapfunc",
"(",
"a",
",",
"'argsort'",
",",
"axis",
"=",
"axis",
",",
"kind",
"=",
"kind",
",",
"order",
"=",
"order",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/core/fromnumeric.py#L998-L1105
|
|
ValveSoftware/source-sdk-2013
|
0d8dceea4310fde5706b3ce1c70609d72a38efdf
|
mp/src/thirdparty/protobuf-2.3.0/python/mox.py
|
python
|
MockAnything._Replay
|
(self)
|
Start replaying expected method calls.
|
Start replaying expected method calls.
|
[
"Start",
"replaying",
"expected",
"method",
"calls",
"."
] |
def _Replay(self):
"""Start replaying expected method calls."""
self._replay_mode = True
|
[
"def",
"_Replay",
"(",
"self",
")",
":",
"self",
".",
"_replay_mode",
"=",
"True"
] |
https://github.com/ValveSoftware/source-sdk-2013/blob/0d8dceea4310fde5706b3ce1c70609d72a38efdf/mp/src/thirdparty/protobuf-2.3.0/python/mox.py#L326-L329
|
||
apache/singa
|
93fd9da72694e68bfe3fb29d0183a65263d238a1
|
python/singa/sonnx.py
|
python
|
SingaFrontend._create_batchnorm
|
(cls, op, op_t)
|
return nodes
|
get a onnx node from singa _BatchNorm2d operator
Args:
op: a given operator
Args:
op_t: the tensor of the operator
Returns:
the onnx node
|
get a onnx node from singa _BatchNorm2d operator
Args:
op: a given operator
Args:
op_t: the tensor of the operator
Returns:
the onnx node
|
[
"get",
"a",
"onnx",
"node",
"from",
"singa",
"_BatchNorm2d",
"operator",
"Args",
":",
"op",
":",
"a",
"given",
"operator",
"Args",
":",
"op_t",
":",
"the",
"tensor",
"of",
"the",
"operator",
"Returns",
":",
"the",
"onnx",
"node"
] |
def _create_batchnorm(cls, op, op_t):
"""
get a onnx node from singa _BatchNorm2d operator
Args:
op: a given operator
Args:
op_t: the tensor of the operator
Returns:
the onnx node
"""
# first, we init batchnorm node
epsilon = 1e-5 # the epsilon value used in singa
bn_node = cls._common_singa_tensor_to_onnx_node(op, op_t)
bn_node.attribute.extend([
helper.make_attribute('momentum', op.handle.factor),
helper.make_attribute('epsilon', epsilon),
])
# then we add nodes of scal, bias, mean, var
nodes = []
running_values = {"mean": op.running_mean, "var": op.running_var}
for tmp_name, running_value in running_values.items():
node_name = op.name + ":" + tmp_name
bn_node.input.append(node_name)
nodes.append(bn_node)
return nodes
|
[
"def",
"_create_batchnorm",
"(",
"cls",
",",
"op",
",",
"op_t",
")",
":",
"# first, we init batchnorm node",
"epsilon",
"=",
"1e-5",
"# the epsilon value used in singa",
"bn_node",
"=",
"cls",
".",
"_common_singa_tensor_to_onnx_node",
"(",
"op",
",",
"op_t",
")",
"bn_node",
".",
"attribute",
".",
"extend",
"(",
"[",
"helper",
".",
"make_attribute",
"(",
"'momentum'",
",",
"op",
".",
"handle",
".",
"factor",
")",
",",
"helper",
".",
"make_attribute",
"(",
"'epsilon'",
",",
"epsilon",
")",
",",
"]",
")",
"# then we add nodes of scal, bias, mean, var",
"nodes",
"=",
"[",
"]",
"running_values",
"=",
"{",
"\"mean\"",
":",
"op",
".",
"running_mean",
",",
"\"var\"",
":",
"op",
".",
"running_var",
"}",
"for",
"tmp_name",
",",
"running_value",
"in",
"running_values",
".",
"items",
"(",
")",
":",
"node_name",
"=",
"op",
".",
"name",
"+",
"\":\"",
"+",
"tmp_name",
"bn_node",
".",
"input",
".",
"append",
"(",
"node_name",
")",
"nodes",
".",
"append",
"(",
"bn_node",
")",
"return",
"nodes"
] |
https://github.com/apache/singa/blob/93fd9da72694e68bfe3fb29d0183a65263d238a1/python/singa/sonnx.py#L595-L620
|
|
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
benchmarks/distributed/rpc/rl/launcher.py
|
python
|
find_graph_variable
|
(args)
|
r"""
Determines if user specified multiple entries for a single argument, in which case
benchmark is run for each of these entries. Comma separated values in a given argument indicate multiple entries.
Output is presented so that user can use plot repo to plot the results with each of the
variable argument's entries on the x-axis. Args is modified in accordance with this.
More than 1 argument with multiple entries is not permitted.
Args:
args (dict): Dictionary containing arguments passed by the user (and default arguments)
|
r"""
Determines if user specified multiple entries for a single argument, in which case
benchmark is run for each of these entries. Comma separated values in a given argument indicate multiple entries.
Output is presented so that user can use plot repo to plot the results with each of the
variable argument's entries on the x-axis. Args is modified in accordance with this.
More than 1 argument with multiple entries is not permitted.
Args:
args (dict): Dictionary containing arguments passed by the user (and default arguments)
|
[
"r",
"Determines",
"if",
"user",
"specified",
"multiple",
"entries",
"for",
"a",
"single",
"argument",
"in",
"which",
"case",
"benchmark",
"is",
"run",
"for",
"each",
"of",
"these",
"entries",
".",
"Comma",
"separated",
"values",
"in",
"a",
"given",
"argument",
"indicate",
"multiple",
"entries",
".",
"Output",
"is",
"presented",
"so",
"that",
"user",
"can",
"use",
"plot",
"repo",
"to",
"plot",
"the",
"results",
"with",
"each",
"of",
"the",
"variable",
"argument",
"s",
"entries",
"on",
"the",
"x",
"-",
"axis",
".",
"Args",
"is",
"modified",
"in",
"accordance",
"with",
"this",
".",
"More",
"than",
"1",
"argument",
"with",
"multiple",
"entries",
"is",
"not",
"permitted",
".",
"Args",
":",
"args",
"(",
"dict",
")",
":",
"Dictionary",
"containing",
"arguments",
"passed",
"by",
"the",
"user",
"(",
"and",
"default",
"arguments",
")"
] |
def find_graph_variable(args):
r"""
Determines if user specified multiple entries for a single argument, in which case
benchmark is run for each of these entries. Comma separated values in a given argument indicate multiple entries.
Output is presented so that user can use plot repo to plot the results with each of the
variable argument's entries on the x-axis. Args is modified in accordance with this.
More than 1 argument with multiple entries is not permitted.
Args:
args (dict): Dictionary containing arguments passed by the user (and default arguments)
"""
var_types = {'world_size': int,
'state_size': str,
'nlayers': int,
'out_features': int,
'batch': str2bool}
for arg in var_types.keys():
if ',' in args[arg]:
if args.get('x_axis_name'):
raise("Only 1 x axis graph variable allowed")
args[arg] = list(map(var_types[arg], args[arg].split(','))) # convert , separated str to list
args['x_axis_name'] = arg
else:
args[arg] = var_types[arg](args[arg])
|
[
"def",
"find_graph_variable",
"(",
"args",
")",
":",
"var_types",
"=",
"{",
"'world_size'",
":",
"int",
",",
"'state_size'",
":",
"str",
",",
"'nlayers'",
":",
"int",
",",
"'out_features'",
":",
"int",
",",
"'batch'",
":",
"str2bool",
"}",
"for",
"arg",
"in",
"var_types",
".",
"keys",
"(",
")",
":",
"if",
"','",
"in",
"args",
"[",
"arg",
"]",
":",
"if",
"args",
".",
"get",
"(",
"'x_axis_name'",
")",
":",
"raise",
"(",
"\"Only 1 x axis graph variable allowed\"",
")",
"args",
"[",
"arg",
"]",
"=",
"list",
"(",
"map",
"(",
"var_types",
"[",
"arg",
"]",
",",
"args",
"[",
"arg",
"]",
".",
"split",
"(",
"','",
")",
")",
")",
"# convert , separated str to list",
"args",
"[",
"'x_axis_name'",
"]",
"=",
"arg",
"else",
":",
"args",
"[",
"arg",
"]",
"=",
"var_types",
"[",
"arg",
"]",
"(",
"args",
"[",
"arg",
"]",
")"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/benchmarks/distributed/rpc/rl/launcher.py#L81-L103
|
||
eclipse/sumo
|
7132a9b8b6eea734bdec38479026b4d8c4336d03
|
tools/traci/_routeprobe.py
|
python
|
RouteProbeDomain.sampleCurrentRouteID
|
(self, probeID)
|
return self._getUniversal(tc.VAR_SAMPLE_CURRENT, probeID)
|
sampleCurrentRouteID(string) -> string
Returns a random routeID from the distribution collected by this route
proble in the current collectin interval
|
sampleCurrentRouteID(string) -> string
Returns a random routeID from the distribution collected by this route
proble in the current collectin interval
|
[
"sampleCurrentRouteID",
"(",
"string",
")",
"-",
">",
"string",
"Returns",
"a",
"random",
"routeID",
"from",
"the",
"distribution",
"collected",
"by",
"this",
"route",
"proble",
"in",
"the",
"current",
"collectin",
"interval"
] |
def sampleCurrentRouteID(self, probeID):
"""sampleCurrentRouteID(string) -> string
Returns a random routeID from the distribution collected by this route
proble in the current collectin interval
"""
return self._getUniversal(tc.VAR_SAMPLE_CURRENT, probeID)
|
[
"def",
"sampleCurrentRouteID",
"(",
"self",
",",
"probeID",
")",
":",
"return",
"self",
".",
"_getUniversal",
"(",
"tc",
".",
"VAR_SAMPLE_CURRENT",
",",
"probeID",
")"
] |
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/traci/_routeprobe.py#L44-L49
|
|
Kitware/VTK
|
5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8
|
Wrapping/Python/vtkmodules/gtk/GtkGLExtVTKRenderWindow.py
|
python
|
GtkGLExtVTKRenderWindowBase.GetStillUpdateRate
|
(self)
|
return self._StillUpdateRate
|
Mirrors the method with the same name in
vtkRenderWindowInteractor.
|
Mirrors the method with the same name in
vtkRenderWindowInteractor.
|
[
"Mirrors",
"the",
"method",
"with",
"the",
"same",
"name",
"in",
"vtkRenderWindowInteractor",
"."
] |
def GetStillUpdateRate(self):
"""Mirrors the method with the same name in
vtkRenderWindowInteractor."""
return self._StillUpdateRate
|
[
"def",
"GetStillUpdateRate",
"(",
"self",
")",
":",
"return",
"self",
".",
"_StillUpdateRate"
] |
https://github.com/Kitware/VTK/blob/5b4df4d90a4f31194d97d3c639dd38ea8f81e8b8/Wrapping/Python/vtkmodules/gtk/GtkGLExtVTKRenderWindow.py#L103-L106
|
|
luliyucoordinate/Leetcode
|
96afcdc54807d1d184e881a075d1dbf3371e31fb
|
src/0146-LRU-Cache/0146.py
|
python
|
LRUCache.__init__
|
(self, capacity)
|
:type capacity: int
|
:type capacity: int
|
[
":",
"type",
"capacity",
":",
"int"
] |
def __init__(self, capacity):
"""
:type capacity: int
"""
self.capacity = capacity
self.cache = collections.OrderedDict()
|
[
"def",
"__init__",
"(",
"self",
",",
"capacity",
")",
":",
"self",
".",
"capacity",
"=",
"capacity",
"self",
".",
"cache",
"=",
"collections",
".",
"OrderedDict",
"(",
")"
] |
https://github.com/luliyucoordinate/Leetcode/blob/96afcdc54807d1d184e881a075d1dbf3371e31fb/src/0146-LRU-Cache/0146.py#L3-L8
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/imaplib.py
|
python
|
IMAP4.search
|
(self, charset, *criteria)
|
return self._untagged_response(typ, dat, name)
|
Search mailbox for matching messages.
(typ, [data]) = <instance>.search(charset, criterion, ...)
'data' is space separated list of matching message numbers.
If UTF8 is enabled, charset MUST be None.
|
Search mailbox for matching messages.
|
[
"Search",
"mailbox",
"for",
"matching",
"messages",
"."
] |
def search(self, charset, *criteria):
"""Search mailbox for matching messages.
(typ, [data]) = <instance>.search(charset, criterion, ...)
'data' is space separated list of matching message numbers.
If UTF8 is enabled, charset MUST be None.
"""
name = 'SEARCH'
if charset:
if self.utf8_enabled:
raise IMAP4.error("Non-None charset not valid in UTF8 mode")
typ, dat = self._simple_command(name, 'CHARSET', charset, *criteria)
else:
typ, dat = self._simple_command(name, *criteria)
return self._untagged_response(typ, dat, name)
|
[
"def",
"search",
"(",
"self",
",",
"charset",
",",
"*",
"criteria",
")",
":",
"name",
"=",
"'SEARCH'",
"if",
"charset",
":",
"if",
"self",
".",
"utf8_enabled",
":",
"raise",
"IMAP4",
".",
"error",
"(",
"\"Non-None charset not valid in UTF8 mode\"",
")",
"typ",
",",
"dat",
"=",
"self",
".",
"_simple_command",
"(",
"name",
",",
"'CHARSET'",
",",
"charset",
",",
"*",
"criteria",
")",
"else",
":",
"typ",
",",
"dat",
"=",
"self",
".",
"_simple_command",
"(",
"name",
",",
"*",
"criteria",
")",
"return",
"self",
".",
"_untagged_response",
"(",
"typ",
",",
"dat",
",",
"name",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/imaplib.py#L709-L724
|
|
cvxpy/cvxpy
|
5165b4fb750dfd237de8659383ef24b4b2e33aaf
|
cvxpy/lin_ops/lin_utils.py
|
python
|
sum_expr
|
(operators)
|
return lo.LinOp(lo.SUM, operators[0].shape, operators, None)
|
Add linear operators.
Parameters
----------
operators : list
A list of linear operators.
Returns
-------
LinOp
A LinOp representing the sum of the operators.
|
Add linear operators.
|
[
"Add",
"linear",
"operators",
"."
] |
def sum_expr(operators):
"""Add linear operators.
Parameters
----------
operators : list
A list of linear operators.
Returns
-------
LinOp
A LinOp representing the sum of the operators.
"""
return lo.LinOp(lo.SUM, operators[0].shape, operators, None)
|
[
"def",
"sum_expr",
"(",
"operators",
")",
":",
"return",
"lo",
".",
"LinOp",
"(",
"lo",
".",
"SUM",
",",
"operators",
"[",
"0",
"]",
".",
"shape",
",",
"operators",
",",
"None",
")"
] |
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/lin_ops/lin_utils.py#L162-L175
|
|
plumonito/dtslam
|
5994bb9cf7a11981b830370db206bceb654c085d
|
3rdparty/opencv-git/doc/pattern_tools/svgfig.py
|
python
|
Plot.SVG
|
(self, trans=None)
|
return Fig(Fig(*d, **{"trans": trans})).SVG(self.last_window)
|
Apply the transformation "trans" and return an SVG object.
|
Apply the transformation "trans" and return an SVG object.
|
[
"Apply",
"the",
"transformation",
"trans",
"and",
"return",
"an",
"SVG",
"object",
"."
] |
def SVG(self, trans=None):
"""Apply the transformation "trans" and return an SVG object."""
if trans is None:
trans = self.trans
if isinstance(trans, basestring):
trans = totrans(trans)
self.last_window = window(self.xmin, self.xmax, self.ymin, self.ymax,
x=self.x, y=self.y, width=self.width, height=self.height,
xlogbase=self.xlogbase, ylogbase=self.ylogbase,
minusInfinity=self.minusInfinity, flipx=self.flipx, flipy=self.flipy)
d = ([Axes(self.xmin, self.xmax, self.ymin, self.ymax, self.atx, self.aty,
self.xticks, self.xminiticks, self.xlabels, self.xlogbase,
self.yticks, self.yminiticks, self.ylabels, self.ylogbase,
self.arrows, self.text_attr, **self.axis_attr)]
+ self.d)
return Fig(Fig(*d, **{"trans": trans})).SVG(self.last_window)
|
[
"def",
"SVG",
"(",
"self",
",",
"trans",
"=",
"None",
")",
":",
"if",
"trans",
"is",
"None",
":",
"trans",
"=",
"self",
".",
"trans",
"if",
"isinstance",
"(",
"trans",
",",
"basestring",
")",
":",
"trans",
"=",
"totrans",
"(",
"trans",
")",
"self",
".",
"last_window",
"=",
"window",
"(",
"self",
".",
"xmin",
",",
"self",
".",
"xmax",
",",
"self",
".",
"ymin",
",",
"self",
".",
"ymax",
",",
"x",
"=",
"self",
".",
"x",
",",
"y",
"=",
"self",
".",
"y",
",",
"width",
"=",
"self",
".",
"width",
",",
"height",
"=",
"self",
".",
"height",
",",
"xlogbase",
"=",
"self",
".",
"xlogbase",
",",
"ylogbase",
"=",
"self",
".",
"ylogbase",
",",
"minusInfinity",
"=",
"self",
".",
"minusInfinity",
",",
"flipx",
"=",
"self",
".",
"flipx",
",",
"flipy",
"=",
"self",
".",
"flipy",
")",
"d",
"=",
"(",
"[",
"Axes",
"(",
"self",
".",
"xmin",
",",
"self",
".",
"xmax",
",",
"self",
".",
"ymin",
",",
"self",
".",
"ymax",
",",
"self",
".",
"atx",
",",
"self",
".",
"aty",
",",
"self",
".",
"xticks",
",",
"self",
".",
"xminiticks",
",",
"self",
".",
"xlabels",
",",
"self",
".",
"xlogbase",
",",
"self",
".",
"yticks",
",",
"self",
".",
"yminiticks",
",",
"self",
".",
"ylabels",
",",
"self",
".",
"ylogbase",
",",
"self",
".",
"arrows",
",",
"self",
".",
"text_attr",
",",
"*",
"*",
"self",
".",
"axis_attr",
")",
"]",
"+",
"self",
".",
"d",
")",
"return",
"Fig",
"(",
"Fig",
"(",
"*",
"d",
",",
"*",
"*",
"{",
"\"trans\"",
":",
"trans",
"}",
")",
")",
".",
"SVG",
"(",
"self",
".",
"last_window",
")"
] |
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/doc/pattern_tools/svgfig.py#L936-L954
|
|
ApolloAuto/apollo-platform
|
86d9dc6743b496ead18d597748ebabd34a513289
|
ros/third_party/lib_aarch64/python2.7/dist-packages/diagnostic_updater/_update_functions.py
|
python
|
FrequencyStatusParam.__init__
|
(self, freq_bound, tolerance = 0.1, window_size = 5)
|
Creates a filled-out FrequencyStatusParam.
|
Creates a filled-out FrequencyStatusParam.
|
[
"Creates",
"a",
"filled",
"-",
"out",
"FrequencyStatusParam",
"."
] |
def __init__(self, freq_bound, tolerance = 0.1, window_size = 5):
"""Creates a filled-out FrequencyStatusParam."""
self.freq_bound = freq_bound
self.tolerance = tolerance
self.window_size = window_size
|
[
"def",
"__init__",
"(",
"self",
",",
"freq_bound",
",",
"tolerance",
"=",
"0.1",
",",
"window_size",
"=",
"5",
")",
":",
"self",
".",
"freq_bound",
"=",
"freq_bound",
"self",
".",
"tolerance",
"=",
"tolerance",
"self",
".",
"window_size",
"=",
"window_size"
] |
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_aarch64/python2.7/dist-packages/diagnostic_updater/_update_functions.py#L65-L69
|
||
trilinos/Trilinos
|
6168be6dd51e35e1cd681e9c4b24433e709df140
|
packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/phactori.py
|
python
|
SharedTriangleSet.CreateFromLocalProcess
|
(self, inPhactoriOperation)
|
figure out the set of triangles from the target surface which are on
this process (if any): we assume we have a triangle mesh or this code
won't work
|
figure out the set of triangles from the target surface which are on
this process (if any): we assume we have a triangle mesh or this code
won't work
|
[
"figure",
"out",
"the",
"set",
"of",
"triangles",
"from",
"the",
"target",
"surface",
"which",
"are",
"on",
"this",
"process",
"(",
"if",
"any",
")",
":",
"we",
"assume",
"we",
"have",
"a",
"triangle",
"mesh",
"or",
"this",
"code",
"won",
"t",
"work"
] |
def CreateFromLocalProcess(self, inPhactoriOperation):
"""figure out the set of triangles from the target surface which are on
this process (if any): we assume we have a triangle mesh or this code
won't work"""
#obtain pointer to the local geometry data
csdata = inPhactoriOperation.mParaViewFilter.GetClientSideObject().\
GetOutputDataObject(0)
self.numPoints = csdata.GetNumberOfPoints()
if PhactoriDbg():
myDebugPrint3(str(dir(csdata)) + "\n")
myDebugPrint3("num points: " + str(self.numPoints) + "\n")
myDebugPrint3(str(dir(vtk)) + "\n")
pntData = csdata.GetPointData()
cellData = csdata.GetCellData()
numCells = csdata.GetNumberOfCells()
gNodeIdArray = pntData.GetArray('GlobalNodeId')
#gElmtIdArray = cellData.GetArray('GlobalElementId')
#pntGeometryArray = csdata.GetPoints()
self.PointXyzs.SetNumberOfValues(self.numPoints*3)
self.NodeIds = vtk.vtkIntArray()
self.NodeIds.SetNumberOfValues(self.numPoints)
#this is stupid, there is probably a much faster way to do this
ptxyz = [0.0,0.0,0.0]
for ii in range(0, self.numPoints):
ndx = ii*3
csdata.GetPoint(ii,ptxyz)
self.PointXyzs.SetValue(ndx, ptxyz[0])
self.PointXyzs.SetValue(ndx+1, ptxyz[1])
self.PointXyzs.SetValue(ndx+2, ptxyz[2])
if(gNodeIdArray == None):
self.NodeIds.SetValue(ii, ii)
else:
self.NodeIds.SetValue(ii, gNodeIdArray.GetValue(ii))
self.Triangles.SetNumberOfValues(0)
cellPointIds = vtk.vtkIdList()
for ii in range(0, numCells):
csdata.GetCellPoints(ii, cellPointIds)
#numpoints should be 3
numids = cellPointIds.GetNumberOfIds()
#we are only doing triangles
if numids != 3:
if numids < 3:
#degenerate ? try just skipping
if PhactoriDbg():
myDebugPrint3AndException(str(ii) + " degenerate 2 point\n")
continue
if True: #for now we consider this fatal error
myDebugPrint3AndException(
"PhactoriIntersectNodeNormalsWithSurface::CreateFromLocalProcess\n"
"encountered non-triangle\n")
continue
self.Triangles.InsertNextValue(cellPointIds.GetId(0))
self.Triangles.InsertNextValue(cellPointIds.GetId(1))
self.Triangles.InsertNextValue(cellPointIds.GetId(2))
self.numTriangles = self.Triangles.GetNumberOfValues() // 3
|
[
"def",
"CreateFromLocalProcess",
"(",
"self",
",",
"inPhactoriOperation",
")",
":",
"#obtain pointer to the local geometry data",
"csdata",
"=",
"inPhactoriOperation",
".",
"mParaViewFilter",
".",
"GetClientSideObject",
"(",
")",
".",
"GetOutputDataObject",
"(",
"0",
")",
"self",
".",
"numPoints",
"=",
"csdata",
".",
"GetNumberOfPoints",
"(",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"str",
"(",
"dir",
"(",
"csdata",
")",
")",
"+",
"\"\\n\"",
")",
"myDebugPrint3",
"(",
"\"num points: \"",
"+",
"str",
"(",
"self",
".",
"numPoints",
")",
"+",
"\"\\n\"",
")",
"myDebugPrint3",
"(",
"str",
"(",
"dir",
"(",
"vtk",
")",
")",
"+",
"\"\\n\"",
")",
"pntData",
"=",
"csdata",
".",
"GetPointData",
"(",
")",
"cellData",
"=",
"csdata",
".",
"GetCellData",
"(",
")",
"numCells",
"=",
"csdata",
".",
"GetNumberOfCells",
"(",
")",
"gNodeIdArray",
"=",
"pntData",
".",
"GetArray",
"(",
"'GlobalNodeId'",
")",
"#gElmtIdArray = cellData.GetArray('GlobalElementId')",
"#pntGeometryArray = csdata.GetPoints()",
"self",
".",
"PointXyzs",
".",
"SetNumberOfValues",
"(",
"self",
".",
"numPoints",
"*",
"3",
")",
"self",
".",
"NodeIds",
"=",
"vtk",
".",
"vtkIntArray",
"(",
")",
"self",
".",
"NodeIds",
".",
"SetNumberOfValues",
"(",
"self",
".",
"numPoints",
")",
"#this is stupid, there is probably a much faster way to do this",
"ptxyz",
"=",
"[",
"0.0",
",",
"0.0",
",",
"0.0",
"]",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"self",
".",
"numPoints",
")",
":",
"ndx",
"=",
"ii",
"*",
"3",
"csdata",
".",
"GetPoint",
"(",
"ii",
",",
"ptxyz",
")",
"self",
".",
"PointXyzs",
".",
"SetValue",
"(",
"ndx",
",",
"ptxyz",
"[",
"0",
"]",
")",
"self",
".",
"PointXyzs",
".",
"SetValue",
"(",
"ndx",
"+",
"1",
",",
"ptxyz",
"[",
"1",
"]",
")",
"self",
".",
"PointXyzs",
".",
"SetValue",
"(",
"ndx",
"+",
"2",
",",
"ptxyz",
"[",
"2",
"]",
")",
"if",
"(",
"gNodeIdArray",
"==",
"None",
")",
":",
"self",
".",
"NodeIds",
".",
"SetValue",
"(",
"ii",
",",
"ii",
")",
"else",
":",
"self",
".",
"NodeIds",
".",
"SetValue",
"(",
"ii",
",",
"gNodeIdArray",
".",
"GetValue",
"(",
"ii",
")",
")",
"self",
".",
"Triangles",
".",
"SetNumberOfValues",
"(",
"0",
")",
"cellPointIds",
"=",
"vtk",
".",
"vtkIdList",
"(",
")",
"for",
"ii",
"in",
"range",
"(",
"0",
",",
"numCells",
")",
":",
"csdata",
".",
"GetCellPoints",
"(",
"ii",
",",
"cellPointIds",
")",
"#numpoints should be 3",
"numids",
"=",
"cellPointIds",
".",
"GetNumberOfIds",
"(",
")",
"#we are only doing triangles",
"if",
"numids",
"!=",
"3",
":",
"if",
"numids",
"<",
"3",
":",
"#degenerate ? try just skipping",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3AndException",
"(",
"str",
"(",
"ii",
")",
"+",
"\" degenerate 2 point\\n\"",
")",
"continue",
"if",
"True",
":",
"#for now we consider this fatal error",
"myDebugPrint3AndException",
"(",
"\"PhactoriIntersectNodeNormalsWithSurface::CreateFromLocalProcess\\n\"",
"\"encountered non-triangle\\n\"",
")",
"continue",
"self",
".",
"Triangles",
".",
"InsertNextValue",
"(",
"cellPointIds",
".",
"GetId",
"(",
"0",
")",
")",
"self",
".",
"Triangles",
".",
"InsertNextValue",
"(",
"cellPointIds",
".",
"GetId",
"(",
"1",
")",
")",
"self",
".",
"Triangles",
".",
"InsertNextValue",
"(",
"cellPointIds",
".",
"GetId",
"(",
"2",
")",
")",
"self",
".",
"numTriangles",
"=",
"self",
".",
"Triangles",
".",
"GetNumberOfValues",
"(",
")",
"//",
"3"
] |
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/phactori.py#L6387-L6447
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/tarfile.py
|
python
|
TarFile.list
|
(self, verbose=True, *, members=None)
|
Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced. `members' is optional and must be a subset of the
list returned by getmembers().
|
Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced. `members' is optional and must be a subset of the
list returned by getmembers().
|
[
"Print",
"a",
"table",
"of",
"contents",
"to",
"sys",
".",
"stdout",
".",
"If",
"verbose",
"is",
"False",
"only",
"the",
"names",
"of",
"the",
"members",
"are",
"printed",
".",
"If",
"it",
"is",
"True",
"an",
"ls",
"-",
"l",
"-",
"like",
"output",
"is",
"produced",
".",
"members",
"is",
"optional",
"and",
"must",
"be",
"a",
"subset",
"of",
"the",
"list",
"returned",
"by",
"getmembers",
"()",
"."
] |
def list(self, verbose=True, *, members=None):
"""Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced. `members' is optional and must be a subset of the
list returned by getmembers().
"""
self._check()
if members is None:
members = self
for tarinfo in members:
if verbose:
_safe_print(stat.filemode(tarinfo.mode))
_safe_print("%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid))
if tarinfo.ischr() or tarinfo.isblk():
_safe_print("%10s" %
("%d,%d" % (tarinfo.devmajor, tarinfo.devminor)))
else:
_safe_print("%10d" % tarinfo.size)
_safe_print("%d-%02d-%02d %02d:%02d:%02d" \
% time.localtime(tarinfo.mtime)[:6])
_safe_print(tarinfo.name + ("/" if tarinfo.isdir() else ""))
if verbose:
if tarinfo.issym():
_safe_print("-> " + tarinfo.linkname)
if tarinfo.islnk():
_safe_print("link to " + tarinfo.linkname)
print()
|
[
"def",
"list",
"(",
"self",
",",
"verbose",
"=",
"True",
",",
"*",
",",
"members",
"=",
"None",
")",
":",
"self",
".",
"_check",
"(",
")",
"if",
"members",
"is",
"None",
":",
"members",
"=",
"self",
"for",
"tarinfo",
"in",
"members",
":",
"if",
"verbose",
":",
"_safe_print",
"(",
"stat",
".",
"filemode",
"(",
"tarinfo",
".",
"mode",
")",
")",
"_safe_print",
"(",
"\"%s/%s\"",
"%",
"(",
"tarinfo",
".",
"uname",
"or",
"tarinfo",
".",
"uid",
",",
"tarinfo",
".",
"gname",
"or",
"tarinfo",
".",
"gid",
")",
")",
"if",
"tarinfo",
".",
"ischr",
"(",
")",
"or",
"tarinfo",
".",
"isblk",
"(",
")",
":",
"_safe_print",
"(",
"\"%10s\"",
"%",
"(",
"\"%d,%d\"",
"%",
"(",
"tarinfo",
".",
"devmajor",
",",
"tarinfo",
".",
"devminor",
")",
")",
")",
"else",
":",
"_safe_print",
"(",
"\"%10d\"",
"%",
"tarinfo",
".",
"size",
")",
"_safe_print",
"(",
"\"%d-%02d-%02d %02d:%02d:%02d\"",
"%",
"time",
".",
"localtime",
"(",
"tarinfo",
".",
"mtime",
")",
"[",
":",
"6",
"]",
")",
"_safe_print",
"(",
"tarinfo",
".",
"name",
"+",
"(",
"\"/\"",
"if",
"tarinfo",
".",
"isdir",
"(",
")",
"else",
"\"\"",
")",
")",
"if",
"verbose",
":",
"if",
"tarinfo",
".",
"issym",
"(",
")",
":",
"_safe_print",
"(",
"\"-> \"",
"+",
"tarinfo",
".",
"linkname",
")",
"if",
"tarinfo",
".",
"islnk",
"(",
")",
":",
"_safe_print",
"(",
"\"link to \"",
"+",
"tarinfo",
".",
"linkname",
")",
"print",
"(",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tarfile.py#L1873-L1903
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/XRCed/listener.py
|
python
|
_Listener.SaveRecent
|
(self, path)
|
Append path to recently used files.
|
Append path to recently used files.
|
[
"Append",
"path",
"to",
"recently",
"used",
"files",
"."
] |
def SaveRecent(self, path):
'''Append path to recently used files.'''
g.fileHistory.AddFileToHistory(path)
|
[
"def",
"SaveRecent",
"(",
"self",
",",
"path",
")",
":",
"g",
".",
"fileHistory",
".",
"AddFileToHistory",
"(",
"path",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/XRCed/listener.py#L328-L330
|
||
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/multiprocessing/__init__.py
|
python
|
Pool
|
(processes=None, initializer=None, initargs=())
|
return Pool(processes, initializer, initargs)
|
Returns a process pool object
|
Returns a process pool object
|
[
"Returns",
"a",
"process",
"pool",
"object"
] |
def Pool(processes=None, initializer=None, initargs=()):
'''
Returns a process pool object
'''
from multiprocessing.pool import Pool
return Pool(processes, initializer, initargs)
|
[
"def",
"Pool",
"(",
"processes",
"=",
"None",
",",
"initializer",
"=",
"None",
",",
"initargs",
"=",
"(",
")",
")",
":",
"from",
"multiprocessing",
".",
"pool",
"import",
"Pool",
"return",
"Pool",
"(",
"processes",
",",
"initializer",
",",
"initargs",
")"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/multiprocessing/__init__.py#L222-L227
|
|
SpenceKonde/megaTinyCore
|
1c4a70b18a149fe6bcb551dfa6db11ca50b8997b
|
megaavr/tools/libs/pymcuprog/nvmserialupdi.py
|
python
|
NvmAccessProviderSerial.hold_in_reset
|
(self)
|
return
|
Hold device in reset
|
Hold device in reset
|
[
"Hold",
"device",
"in",
"reset"
] |
def hold_in_reset(self):
"""
Hold device in reset
"""
# For UPDI parts it is sufficient to enter programming mode to hold the target in reset
# Since the start function is a prerequisite to all functions in this file it can be
# assumed that programming mode already has been entered
return
|
[
"def",
"hold_in_reset",
"(",
"self",
")",
":",
"# For UPDI parts it is sufficient to enter programming mode to hold the target in reset",
"# Since the start function is a prerequisite to all functions in this file it can be",
"# assumed that programming mode already has been entered",
"return"
] |
https://github.com/SpenceKonde/megaTinyCore/blob/1c4a70b18a149fe6bcb551dfa6db11ca50b8997b/megaavr/tools/libs/pymcuprog/nvmserialupdi.py#L228-L235
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.