nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mkdocs/mkdocs
|
cff5b55d59aa37da5aa67aadd9673167035d9d1c
|
mkdocs/utils/__init__.py
|
python
|
reduce_list
|
(data_set)
|
return [item for item in data_set if
item not in seen and not seen.add(item)]
|
Reduce duplicate items in a list and preserve order
|
Reduce duplicate items in a list and preserve order
|
[
"Reduce",
"duplicate",
"items",
"in",
"a",
"list",
"and",
"preserve",
"order"
] |
def reduce_list(data_set):
""" Reduce duplicate items in a list and preserve order """
seen = set()
return [item for item in data_set if
item not in seen and not seen.add(item)]
|
[
"def",
"reduce_list",
"(",
"data_set",
")",
":",
"seen",
"=",
"set",
"(",
")",
"return",
"[",
"item",
"for",
"item",
"in",
"data_set",
"if",
"item",
"not",
"in",
"seen",
"and",
"not",
"seen",
".",
"add",
"(",
"item",
")",
"]"
] |
https://github.com/mkdocs/mkdocs/blob/cff5b55d59aa37da5aa67aadd9673167035d9d1c/mkdocs/utils/__init__.py#L119-L123
|
|
matt-graham/mici
|
aa209e2cf698bb9e0c7c733d7b6a5557ab5df190
|
mici/systems.py
|
python
|
ConstrainedEuclideanMetricSystem.__init__
|
(
self,
neg_log_dens,
constr,
metric=None,
dens_wrt_hausdorff=True,
grad_neg_log_dens=None,
jacob_constr=None,
)
|
Args:
neg_log_dens (Callable[[array], float]): Function which given a
position array returns the negative logarithm of an
unnormalized probability density on the constrained position
space with respect to the Hausdorff measure on the constraint
manifold (if `dens_wrt_hausdorff == True`) or alternatively the
negative logarithm of an unnormalized probability density on
the unconstrained (ambient) position space with respect to the
Lebesgue measure. In the former case the target distribution it
is wished to draw approximate samples from is assumed to be
directly specified by the density function on the manifold. In
the latter case the density function is instead taken to
specify a prior distribution on the ambient space with the
target distribution then corresponding to the posterior
distribution when conditioning on the (zero Lebesgue measure)
event `constr(pos) == 0`. This target posterior distribution
has support on the differentiable manifold implicitly defined
by the constraint equation, with density with respect to the
Hausdorff measure on the manifold corresponding to the ratio of
the prior density (specified by `neg_log_dens`) and the
square-root of the determinant of the Gram matrix defined by
gram(q) = jacob_constr(q) @ inv(metric) @ jacob_constr(q).T
where `jacob_constr` is the Jacobian of the constraint function
`constr` and `metric` is the matrix representation of the
metric on the ambient space.
constr (Callable[[array], array]): Function which given a position
array return as a 1D array the value of the (vector-valued)
constraint function, the zero level-set of which implicitly
defines the manifold the dynamic is simulated on.
metric (None or array or PositiveDefiniteMatrix): Matrix object
corresponding to matrix representation of metric on
*unconstrained* position space and covariance of Gaussian
marginal distribution on *unconstrained* momentum vector. If
`None` is passed (the default), the identity matrix will be
used. If a 1D array is passed then this is assumed to specify a
metric with positive diagonal matrix representation and the
array the matrix diagonal. If a 2D array is passed then this is
assumed to specify a metric with a dense positive definite
matrix representation specified by the array. Otherwise if the
value is a `mici.matrices.PositiveDefiniteMatrix` subclass it
is assumed to directly specify the metric matrix
representation.
dens_wrt_hausdorff (bool): Whether the `neg_log_dens` function
specifies the (negative logarithm) of the density of the target
distribution with respect to the Hausdorff measure on the
manifold directly (True) or alternatively the negative
logarithm of a density of a prior distriubtion on the
unconstrained (ambient) position space with respect to the
Lebesgue measure, with the target distribution then
corresponding to the posterior distribution when conditioning
on the event `const(pos) == 0` (False). Note that in the former
case the base Hausdorff measure on the manifold depends on the
metric defined on the ambient space, with the Hausdorff measure
being defined with respect to the metric induced on the
manifold from this ambient metric.
grad_neg_log_dens (
None or Callable[[array], array or Tuple[array, float]]):
Function which given a position array returns the derivative of
`neg_log_dens` with respect to the position array argument.
Optionally the function may instead return a 2-tuple of values
with the first being the array corresponding to the derivative
and the second being the value of the `neg_log_dens` evaluated
at the passed position array. If `None` is passed (the default)
an automatic differentiation fallback will be used to attempt
to construct a function to compute the derivative (and value)
of `neg_log_dens` automatically.
jacob_constr (
None or Callable[[array], array or Tuple[array, array]]):
Function which given a position array computes the Jacobian
(matrix / 2D array of partial derivatives) of the output of the
constraint function `c = constr(q)` with respect to the position
array argument `q`, returning the computed Jacobian as a 2D
array `jacob` with
jacob[i, j] = ∂c[i] / ∂q[j]
Optionally the function may instead return a 2-tuple of values
with the first being the array corresponding to the Jacobian and
the second being the value of `constr` evaluated at the passed
position array. If `None` is passed (the default) an automatic
differentiation fallback will be used to attempt to construct a
function to compute the Jacobian (and value) of `constr`
automatically.
|
Args:
neg_log_dens (Callable[[array], float]): Function which given a
position array returns the negative logarithm of an
unnormalized probability density on the constrained position
space with respect to the Hausdorff measure on the constraint
manifold (if `dens_wrt_hausdorff == True`) or alternatively the
negative logarithm of an unnormalized probability density on
the unconstrained (ambient) position space with respect to the
Lebesgue measure. In the former case the target distribution it
is wished to draw approximate samples from is assumed to be
directly specified by the density function on the manifold. In
the latter case the density function is instead taken to
specify a prior distribution on the ambient space with the
target distribution then corresponding to the posterior
distribution when conditioning on the (zero Lebesgue measure)
event `constr(pos) == 0`. This target posterior distribution
has support on the differentiable manifold implicitly defined
by the constraint equation, with density with respect to the
Hausdorff measure on the manifold corresponding to the ratio of
the prior density (specified by `neg_log_dens`) and the
square-root of the determinant of the Gram matrix defined by
|
[
"Args",
":",
"neg_log_dens",
"(",
"Callable",
"[[",
"array",
"]",
"float",
"]",
")",
":",
"Function",
"which",
"given",
"a",
"position",
"array",
"returns",
"the",
"negative",
"logarithm",
"of",
"an",
"unnormalized",
"probability",
"density",
"on",
"the",
"constrained",
"position",
"space",
"with",
"respect",
"to",
"the",
"Hausdorff",
"measure",
"on",
"the",
"constraint",
"manifold",
"(",
"if",
"dens_wrt_hausdorff",
"==",
"True",
")",
"or",
"alternatively",
"the",
"negative",
"logarithm",
"of",
"an",
"unnormalized",
"probability",
"density",
"on",
"the",
"unconstrained",
"(",
"ambient",
")",
"position",
"space",
"with",
"respect",
"to",
"the",
"Lebesgue",
"measure",
".",
"In",
"the",
"former",
"case",
"the",
"target",
"distribution",
"it",
"is",
"wished",
"to",
"draw",
"approximate",
"samples",
"from",
"is",
"assumed",
"to",
"be",
"directly",
"specified",
"by",
"the",
"density",
"function",
"on",
"the",
"manifold",
".",
"In",
"the",
"latter",
"case",
"the",
"density",
"function",
"is",
"instead",
"taken",
"to",
"specify",
"a",
"prior",
"distribution",
"on",
"the",
"ambient",
"space",
"with",
"the",
"target",
"distribution",
"then",
"corresponding",
"to",
"the",
"posterior",
"distribution",
"when",
"conditioning",
"on",
"the",
"(",
"zero",
"Lebesgue",
"measure",
")",
"event",
"constr",
"(",
"pos",
")",
"==",
"0",
".",
"This",
"target",
"posterior",
"distribution",
"has",
"support",
"on",
"the",
"differentiable",
"manifold",
"implicitly",
"defined",
"by",
"the",
"constraint",
"equation",
"with",
"density",
"with",
"respect",
"to",
"the",
"Hausdorff",
"measure",
"on",
"the",
"manifold",
"corresponding",
"to",
"the",
"ratio",
"of",
"the",
"prior",
"density",
"(",
"specified",
"by",
"neg_log_dens",
")",
"and",
"the",
"square",
"-",
"root",
"of",
"the",
"determinant",
"of",
"the",
"Gram",
"matrix",
"defined",
"by"
] |
def __init__(
self,
neg_log_dens,
constr,
metric=None,
dens_wrt_hausdorff=True,
grad_neg_log_dens=None,
jacob_constr=None,
):
"""
Args:
neg_log_dens (Callable[[array], float]): Function which given a
position array returns the negative logarithm of an
unnormalized probability density on the constrained position
space with respect to the Hausdorff measure on the constraint
manifold (if `dens_wrt_hausdorff == True`) or alternatively the
negative logarithm of an unnormalized probability density on
the unconstrained (ambient) position space with respect to the
Lebesgue measure. In the former case the target distribution it
is wished to draw approximate samples from is assumed to be
directly specified by the density function on the manifold. In
the latter case the density function is instead taken to
specify a prior distribution on the ambient space with the
target distribution then corresponding to the posterior
distribution when conditioning on the (zero Lebesgue measure)
event `constr(pos) == 0`. This target posterior distribution
has support on the differentiable manifold implicitly defined
by the constraint equation, with density with respect to the
Hausdorff measure on the manifold corresponding to the ratio of
the prior density (specified by `neg_log_dens`) and the
square-root of the determinant of the Gram matrix defined by
gram(q) = jacob_constr(q) @ inv(metric) @ jacob_constr(q).T
where `jacob_constr` is the Jacobian of the constraint function
`constr` and `metric` is the matrix representation of the
metric on the ambient space.
constr (Callable[[array], array]): Function which given a position
array return as a 1D array the value of the (vector-valued)
constraint function, the zero level-set of which implicitly
defines the manifold the dynamic is simulated on.
metric (None or array or PositiveDefiniteMatrix): Matrix object
corresponding to matrix representation of metric on
*unconstrained* position space and covariance of Gaussian
marginal distribution on *unconstrained* momentum vector. If
`None` is passed (the default), the identity matrix will be
used. If a 1D array is passed then this is assumed to specify a
metric with positive diagonal matrix representation and the
array the matrix diagonal. If a 2D array is passed then this is
assumed to specify a metric with a dense positive definite
matrix representation specified by the array. Otherwise if the
value is a `mici.matrices.PositiveDefiniteMatrix` subclass it
is assumed to directly specify the metric matrix
representation.
dens_wrt_hausdorff (bool): Whether the `neg_log_dens` function
specifies the (negative logarithm) of the density of the target
distribution with respect to the Hausdorff measure on the
manifold directly (True) or alternatively the negative
logarithm of a density of a prior distriubtion on the
unconstrained (ambient) position space with respect to the
Lebesgue measure, with the target distribution then
corresponding to the posterior distribution when conditioning
on the event `const(pos) == 0` (False). Note that in the former
case the base Hausdorff measure on the manifold depends on the
metric defined on the ambient space, with the Hausdorff measure
being defined with respect to the metric induced on the
manifold from this ambient metric.
grad_neg_log_dens (
None or Callable[[array], array or Tuple[array, float]]):
Function which given a position array returns the derivative of
`neg_log_dens` with respect to the position array argument.
Optionally the function may instead return a 2-tuple of values
with the first being the array corresponding to the derivative
and the second being the value of the `neg_log_dens` evaluated
at the passed position array. If `None` is passed (the default)
an automatic differentiation fallback will be used to attempt
to construct a function to compute the derivative (and value)
of `neg_log_dens` automatically.
jacob_constr (
None or Callable[[array], array or Tuple[array, array]]):
Function which given a position array computes the Jacobian
(matrix / 2D array of partial derivatives) of the output of the
constraint function `c = constr(q)` with respect to the position
array argument `q`, returning the computed Jacobian as a 2D
array `jacob` with
jacob[i, j] = ∂c[i] / ∂q[j]
Optionally the function may instead return a 2-tuple of values
with the first being the array corresponding to the Jacobian and
the second being the value of `constr` evaluated at the passed
position array. If `None` is passed (the default) an automatic
differentiation fallback will be used to attempt to construct a
function to compute the Jacobian (and value) of `constr`
automatically.
"""
super().__init__(
neg_log_dens=neg_log_dens,
metric=metric,
grad_neg_log_dens=grad_neg_log_dens,
)
self._constr = constr
self.dens_wrt_hausdorff = dens_wrt_hausdorff
self._jacob_constr = autodiff_fallback(
jacob_constr, constr, "jacobian_and_value", "jacob_constr"
)
|
[
"def",
"__init__",
"(",
"self",
",",
"neg_log_dens",
",",
"constr",
",",
"metric",
"=",
"None",
",",
"dens_wrt_hausdorff",
"=",
"True",
",",
"grad_neg_log_dens",
"=",
"None",
",",
"jacob_constr",
"=",
"None",
",",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
"neg_log_dens",
"=",
"neg_log_dens",
",",
"metric",
"=",
"metric",
",",
"grad_neg_log_dens",
"=",
"grad_neg_log_dens",
",",
")",
"self",
".",
"_constr",
"=",
"constr",
"self",
".",
"dens_wrt_hausdorff",
"=",
"dens_wrt_hausdorff",
"self",
".",
"_jacob_constr",
"=",
"autodiff_fallback",
"(",
"jacob_constr",
",",
"constr",
",",
"\"jacobian_and_value\"",
",",
"\"jacob_constr\"",
")"
] |
https://github.com/matt-graham/mici/blob/aa209e2cf698bb9e0c7c733d7b6a5557ab5df190/mici/systems.py#L459-L564
|
||
SanPen/GridCal
|
d3f4566d2d72c11c7e910c9d162538ef0e60df31
|
src/GridCal/Gui/GuiFunctions.py
|
python
|
PandasModel.columnCount
|
(self, parent=None)
|
return self.c
|
:param parent:
:return:
|
[] |
def columnCount(self, parent=None):
"""
:param parent:
:return:
"""
return self.c
|
[
"def",
"columnCount",
"(",
"self",
",",
"parent",
"=",
"None",
")",
":",
"return",
"self",
".",
"c"
] |
https://github.com/SanPen/GridCal/blob/d3f4566d2d72c11c7e910c9d162538ef0e60df31/src/GridCal/Gui/GuiFunctions.py#L321-L327
|
||
Chaffelson/nipyapi
|
d3b186fd701ce308c2812746d98af9120955e810
|
nipyapi/nifi/models/bulletin_dto.py
|
python
|
BulletinDTO.timestamp
|
(self, timestamp)
|
Sets the timestamp of this BulletinDTO.
When this bulletin was generated.
:param timestamp: The timestamp of this BulletinDTO.
:type: str
|
Sets the timestamp of this BulletinDTO.
When this bulletin was generated.
|
[
"Sets",
"the",
"timestamp",
"of",
"this",
"BulletinDTO",
".",
"When",
"this",
"bulletin",
"was",
"generated",
"."
] |
def timestamp(self, timestamp):
"""
Sets the timestamp of this BulletinDTO.
When this bulletin was generated.
:param timestamp: The timestamp of this BulletinDTO.
:type: str
"""
self._timestamp = timestamp
|
[
"def",
"timestamp",
"(",
"self",
",",
"timestamp",
")",
":",
"self",
".",
"_timestamp",
"=",
"timestamp"
] |
https://github.com/Chaffelson/nipyapi/blob/d3b186fd701ce308c2812746d98af9120955e810/nipyapi/nifi/models/bulletin_dto.py#L287-L296
|
||
krintoxi/NoobSec-Toolkit
|
38738541cbc03cedb9a3b3ed13b629f781ad64f6
|
NoobSecToolkit - MAC OSX/scripts/sshbackdoors/backdoors/shell/pupy/pupy/pupylib/PupyCmd.py
|
python
|
WindowsColoredStdout.read
|
(self, *args, **kwargs)
|
[] |
def read(self, *args, **kwargs):
sys.stdout.read(*args, **kwargs)
|
[
"def",
"read",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"sys",
".",
"stdout",
".",
"read",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/krintoxi/NoobSec-Toolkit/blob/38738541cbc03cedb9a3b3ed13b629f781ad64f6/NoobSecToolkit - MAC OSX/scripts/sshbackdoors/backdoors/shell/pupy/pupy/pupylib/PupyCmd.py#L144-L145
|
||||
Pyomo/pyomo
|
dbd4faee151084f343b893cc2b0c04cf2b76fd92
|
pyomo/core/base/units_container.py
|
python
|
_PyomoUnit.__call__
|
(self, exception=True)
|
return 1.0
|
Unit is treated as a constant value, and this method always returns 1.0
Returns
-------
: float
Returns 1.0
|
Unit is treated as a constant value, and this method always returns 1.0
|
[
"Unit",
"is",
"treated",
"as",
"a",
"constant",
"value",
"and",
"this",
"method",
"always",
"returns",
"1",
".",
"0"
] |
def __call__(self, exception=True):
"""Unit is treated as a constant value, and this method always returns 1.0
Returns
-------
: float
Returns 1.0
"""
return 1.0
|
[
"def",
"__call__",
"(",
"self",
",",
"exception",
"=",
"True",
")",
":",
"return",
"1.0"
] |
https://github.com/Pyomo/pyomo/blob/dbd4faee151084f343b893cc2b0c04cf2b76fd92/pyomo/core/base/units_container.py#L369-L377
|
|
hardbyte/python-can
|
e7a2b040ee1f0cdd7fd77fbfef0454353166b333
|
can/interfaces/ixxat/canlib_vcinpl.py
|
python
|
IXXATBus.flush_tx_buffer
|
(self)
|
Flushes the transmit buffer on the IXXAT
|
Flushes the transmit buffer on the IXXAT
|
[
"Flushes",
"the",
"transmit",
"buffer",
"on",
"the",
"IXXAT"
] |
def flush_tx_buffer(self):
"""Flushes the transmit buffer on the IXXAT"""
# TODO #64: no timeout?
_canlib.canChannelWaitTxEvent(self._channel_handle, constants.INFINITE)
|
[
"def",
"flush_tx_buffer",
"(",
"self",
")",
":",
"# TODO #64: no timeout?",
"_canlib",
".",
"canChannelWaitTxEvent",
"(",
"self",
".",
"_channel_handle",
",",
"constants",
".",
"INFINITE",
")"
] |
https://github.com/hardbyte/python-can/blob/e7a2b040ee1f0cdd7fd77fbfef0454353166b333/can/interfaces/ixxat/canlib_vcinpl.py#L635-L638
|
||
sentinel-hub/sentinelhub-py
|
d7ad283cf9d4bd4c8c1a8b169cdbe37c5bc8208a
|
sentinelhub/sentinelhub_batch.py
|
python
|
SentinelHubBatch.delete_collection
|
(self, collection)
|
return self.client.get_json(
url=self._get_collections_url(collection_id),
request_type=RequestType.DELETE,
use_session=True
)
|
Delete an existing batch collection
`Batch API reference <https://docs.sentinel-hub.com/api/latest/reference/#operation/deleteBatchCollection>`__
:param collection: Batch collection id or object
:type collection: str or BatchCollection
|
Delete an existing batch collection
|
[
"Delete",
"an",
"existing",
"batch",
"collection"
] |
def delete_collection(self, collection):
""" Delete an existing batch collection
`Batch API reference <https://docs.sentinel-hub.com/api/latest/reference/#operation/deleteBatchCollection>`__
:param collection: Batch collection id or object
:type collection: str or BatchCollection
"""
collection_id = self._parse_collection_id(collection)
return self.client.get_json(
url=self._get_collections_url(collection_id),
request_type=RequestType.DELETE,
use_session=True
)
|
[
"def",
"delete_collection",
"(",
"self",
",",
"collection",
")",
":",
"collection_id",
"=",
"self",
".",
"_parse_collection_id",
"(",
"collection",
")",
"return",
"self",
".",
"client",
".",
"get_json",
"(",
"url",
"=",
"self",
".",
"_get_collections_url",
"(",
"collection_id",
")",
",",
"request_type",
"=",
"RequestType",
".",
"DELETE",
",",
"use_session",
"=",
"True",
")"
] |
https://github.com/sentinel-hub/sentinelhub-py/blob/d7ad283cf9d4bd4c8c1a8b169cdbe37c5bc8208a/sentinelhub/sentinelhub_batch.py#L459-L472
|
|
openmc-dev/openmc
|
0cf7d9283786677e324bfbdd0984a54d1c86dacc
|
openmc/data/resonance_covariance.py
|
python
|
MultiLevelBreitWignerCovariance.from_endf
|
(cls, ev, file_obj, items, resonance)
|
return mlbw
|
Create MLBW covariance data from an ENDF evaluation.
Parameters
----------
ev : openmc.data.endf.Evaluation
ENDF evaluation
file_obj : file-like object
ENDF file positioned at the second record of a resonance range
subsection in MF=32, MT=151
items : list
Items from the CONT record at the start of the resonance range
subsection
resonance : openmc.data.ResonanceRange object
Corresponding resonance range with File 2 data.
Returns
-------
openmc.data.MultiLevelBreitWignerCovariance
Multi-level Breit-Wigner resonance covariance parameters
|
Create MLBW covariance data from an ENDF evaluation.
|
[
"Create",
"MLBW",
"covariance",
"data",
"from",
"an",
"ENDF",
"evaluation",
"."
] |
def from_endf(cls, ev, file_obj, items, resonance):
"""Create MLBW covariance data from an ENDF evaluation.
Parameters
----------
ev : openmc.data.endf.Evaluation
ENDF evaluation
file_obj : file-like object
ENDF file positioned at the second record of a resonance range
subsection in MF=32, MT=151
items : list
Items from the CONT record at the start of the resonance range
subsection
resonance : openmc.data.ResonanceRange object
Corresponding resonance range with File 2 data.
Returns
-------
openmc.data.MultiLevelBreitWignerCovariance
Multi-level Breit-Wigner resonance covariance parameters
"""
# Read energy-dependent scattering radius if present
energy_min, energy_max = items[0:2]
nro, naps = items[4:6]
if nro != 0:
params, ape = endf.get_tab1_record(file_obj)
# Other scatter radius parameters
items = endf.get_cont_record(file_obj)
target_spin = items[0]
lcomp = items[3] # Flag for compatibility 0, 1, 2 - 2 is compact form
nls = items[4] # number of l-values
# Build covariance matrix for General Resolved Resonance Formats
if lcomp == 1:
items = endf.get_cont_record(file_obj)
# Number of short range type resonance covariances
num_short_range = items[4]
# Number of long range type resonance covariances
num_long_range = items[5]
# Read resonance widths, J values, etc
records = []
for i in range(num_short_range):
items, values = endf.get_list_record(file_obj)
mpar = items[2]
num_res = items[5]
num_par_vals = num_res*6
res_values = values[:num_par_vals]
cov_values = values[num_par_vals:]
energy = res_values[0::6]
spin = res_values[1::6]
gt = res_values[2::6]
gn = res_values[3::6]
gg = res_values[4::6]
gf = res_values[5::6]
for i, E in enumerate(energy):
records.append([energy[i], spin[i], gt[i], gn[i],
gg[i], gf[i]])
# Build the upper-triangular covariance matrix
cov_dim = mpar*num_res
cov = np.zeros([cov_dim, cov_dim])
indices = np.triu_indices(cov_dim)
cov[indices] = cov_values
# Compact format - Resonances and individual uncertainties followed by
# compact correlations
elif lcomp == 2:
items, values = endf.get_list_record(file_obj)
mean = items
num_res = items[5]
energy = values[0::12]
spin = values[1::12]
gt = values[2::12]
gn = values[3::12]
gg = values[4::12]
gf = values[5::12]
par_unc = []
for i in range(num_res):
res_unc = values[i*12+6 : i*12+12]
# Delete 0 values (not provided, no fission width)
# DAJ/DGT always zero, DGF sometimes nonzero [1, 2, 5]
res_unc_nonzero = []
for j in range(6):
if j in [1, 2, 5] and res_unc[j] != 0.0:
res_unc_nonzero.append(res_unc[j])
elif j in [0, 3, 4]:
res_unc_nonzero.append(res_unc[j])
par_unc.extend(res_unc_nonzero)
records = []
for i, E in enumerate(energy):
records.append([energy[i], spin[i], gt[i], gn[i],
gg[i], gf[i]])
corr = endf.get_intg_record(file_obj)
cov = np.diag(par_unc).dot(corr).dot(np.diag(par_unc))
# Compatible resolved resonance format
elif lcomp == 0:
cov = np.zeros([4, 4])
records = []
cov_index = 0
for i in range(nls):
items, values = endf.get_list_record(file_obj)
num_res = items[5]
for j in range(num_res):
one_res = values[18*j:18*(j+1)]
res_values = one_res[:6]
cov_values = one_res[6:]
records.append(list(res_values))
# Populate the coviariance matrix for this resonance
# There are no covariances between resonances in lcomp=0
cov[cov_index, cov_index] = cov_values[0]
cov[cov_index+1, cov_index+1 : cov_index+2] = cov_values[1:2]
cov[cov_index+1, cov_index+3] = cov_values[4]
cov[cov_index+2, cov_index+2] = cov_values[3]
cov[cov_index+2, cov_index+3] = cov_values[5]
cov[cov_index+3, cov_index+3] = cov_values[6]
cov_index += 4
if j < num_res-1: # Pad matrix for additional values
cov = np.pad(cov, ((0, 4), (0, 4)), 'constant',
constant_values=0)
# Create pandas DataFrame with resonance data, currently
# redundant with data.IncidentNeutron.resonance
columns = ['energy', 'J', 'totalWidth', 'neutronWidth',
'captureWidth', 'fissionWidth']
parameters = pd.DataFrame.from_records(records, columns=columns)
# Determine mpar (number of parameters for each resonance in
# covariance matrix)
nparams, params = parameters.shape
covsize = cov.shape[0]
mpar = int(covsize/nparams)
# Add parameters from File 2
parameters = _add_file2_contributions(parameters,
resonance.parameters)
# Create instance of class
mlbw = cls(energy_min, energy_max, parameters, cov, mpar, lcomp,
resonance)
return mlbw
|
[
"def",
"from_endf",
"(",
"cls",
",",
"ev",
",",
"file_obj",
",",
"items",
",",
"resonance",
")",
":",
"# Read energy-dependent scattering radius if present",
"energy_min",
",",
"energy_max",
"=",
"items",
"[",
"0",
":",
"2",
"]",
"nro",
",",
"naps",
"=",
"items",
"[",
"4",
":",
"6",
"]",
"if",
"nro",
"!=",
"0",
":",
"params",
",",
"ape",
"=",
"endf",
".",
"get_tab1_record",
"(",
"file_obj",
")",
"# Other scatter radius parameters",
"items",
"=",
"endf",
".",
"get_cont_record",
"(",
"file_obj",
")",
"target_spin",
"=",
"items",
"[",
"0",
"]",
"lcomp",
"=",
"items",
"[",
"3",
"]",
"# Flag for compatibility 0, 1, 2 - 2 is compact form",
"nls",
"=",
"items",
"[",
"4",
"]",
"# number of l-values",
"# Build covariance matrix for General Resolved Resonance Formats",
"if",
"lcomp",
"==",
"1",
":",
"items",
"=",
"endf",
".",
"get_cont_record",
"(",
"file_obj",
")",
"# Number of short range type resonance covariances",
"num_short_range",
"=",
"items",
"[",
"4",
"]",
"# Number of long range type resonance covariances",
"num_long_range",
"=",
"items",
"[",
"5",
"]",
"# Read resonance widths, J values, etc",
"records",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"num_short_range",
")",
":",
"items",
",",
"values",
"=",
"endf",
".",
"get_list_record",
"(",
"file_obj",
")",
"mpar",
"=",
"items",
"[",
"2",
"]",
"num_res",
"=",
"items",
"[",
"5",
"]",
"num_par_vals",
"=",
"num_res",
"*",
"6",
"res_values",
"=",
"values",
"[",
":",
"num_par_vals",
"]",
"cov_values",
"=",
"values",
"[",
"num_par_vals",
":",
"]",
"energy",
"=",
"res_values",
"[",
"0",
":",
":",
"6",
"]",
"spin",
"=",
"res_values",
"[",
"1",
":",
":",
"6",
"]",
"gt",
"=",
"res_values",
"[",
"2",
":",
":",
"6",
"]",
"gn",
"=",
"res_values",
"[",
"3",
":",
":",
"6",
"]",
"gg",
"=",
"res_values",
"[",
"4",
":",
":",
"6",
"]",
"gf",
"=",
"res_values",
"[",
"5",
":",
":",
"6",
"]",
"for",
"i",
",",
"E",
"in",
"enumerate",
"(",
"energy",
")",
":",
"records",
".",
"append",
"(",
"[",
"energy",
"[",
"i",
"]",
",",
"spin",
"[",
"i",
"]",
",",
"gt",
"[",
"i",
"]",
",",
"gn",
"[",
"i",
"]",
",",
"gg",
"[",
"i",
"]",
",",
"gf",
"[",
"i",
"]",
"]",
")",
"# Build the upper-triangular covariance matrix",
"cov_dim",
"=",
"mpar",
"*",
"num_res",
"cov",
"=",
"np",
".",
"zeros",
"(",
"[",
"cov_dim",
",",
"cov_dim",
"]",
")",
"indices",
"=",
"np",
".",
"triu_indices",
"(",
"cov_dim",
")",
"cov",
"[",
"indices",
"]",
"=",
"cov_values",
"# Compact format - Resonances and individual uncertainties followed by",
"# compact correlations",
"elif",
"lcomp",
"==",
"2",
":",
"items",
",",
"values",
"=",
"endf",
".",
"get_list_record",
"(",
"file_obj",
")",
"mean",
"=",
"items",
"num_res",
"=",
"items",
"[",
"5",
"]",
"energy",
"=",
"values",
"[",
"0",
":",
":",
"12",
"]",
"spin",
"=",
"values",
"[",
"1",
":",
":",
"12",
"]",
"gt",
"=",
"values",
"[",
"2",
":",
":",
"12",
"]",
"gn",
"=",
"values",
"[",
"3",
":",
":",
"12",
"]",
"gg",
"=",
"values",
"[",
"4",
":",
":",
"12",
"]",
"gf",
"=",
"values",
"[",
"5",
":",
":",
"12",
"]",
"par_unc",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"num_res",
")",
":",
"res_unc",
"=",
"values",
"[",
"i",
"*",
"12",
"+",
"6",
":",
"i",
"*",
"12",
"+",
"12",
"]",
"# Delete 0 values (not provided, no fission width)",
"# DAJ/DGT always zero, DGF sometimes nonzero [1, 2, 5]",
"res_unc_nonzero",
"=",
"[",
"]",
"for",
"j",
"in",
"range",
"(",
"6",
")",
":",
"if",
"j",
"in",
"[",
"1",
",",
"2",
",",
"5",
"]",
"and",
"res_unc",
"[",
"j",
"]",
"!=",
"0.0",
":",
"res_unc_nonzero",
".",
"append",
"(",
"res_unc",
"[",
"j",
"]",
")",
"elif",
"j",
"in",
"[",
"0",
",",
"3",
",",
"4",
"]",
":",
"res_unc_nonzero",
".",
"append",
"(",
"res_unc",
"[",
"j",
"]",
")",
"par_unc",
".",
"extend",
"(",
"res_unc_nonzero",
")",
"records",
"=",
"[",
"]",
"for",
"i",
",",
"E",
"in",
"enumerate",
"(",
"energy",
")",
":",
"records",
".",
"append",
"(",
"[",
"energy",
"[",
"i",
"]",
",",
"spin",
"[",
"i",
"]",
",",
"gt",
"[",
"i",
"]",
",",
"gn",
"[",
"i",
"]",
",",
"gg",
"[",
"i",
"]",
",",
"gf",
"[",
"i",
"]",
"]",
")",
"corr",
"=",
"endf",
".",
"get_intg_record",
"(",
"file_obj",
")",
"cov",
"=",
"np",
".",
"diag",
"(",
"par_unc",
")",
".",
"dot",
"(",
"corr",
")",
".",
"dot",
"(",
"np",
".",
"diag",
"(",
"par_unc",
")",
")",
"# Compatible resolved resonance format",
"elif",
"lcomp",
"==",
"0",
":",
"cov",
"=",
"np",
".",
"zeros",
"(",
"[",
"4",
",",
"4",
"]",
")",
"records",
"=",
"[",
"]",
"cov_index",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"nls",
")",
":",
"items",
",",
"values",
"=",
"endf",
".",
"get_list_record",
"(",
"file_obj",
")",
"num_res",
"=",
"items",
"[",
"5",
"]",
"for",
"j",
"in",
"range",
"(",
"num_res",
")",
":",
"one_res",
"=",
"values",
"[",
"18",
"*",
"j",
":",
"18",
"*",
"(",
"j",
"+",
"1",
")",
"]",
"res_values",
"=",
"one_res",
"[",
":",
"6",
"]",
"cov_values",
"=",
"one_res",
"[",
"6",
":",
"]",
"records",
".",
"append",
"(",
"list",
"(",
"res_values",
")",
")",
"# Populate the coviariance matrix for this resonance",
"# There are no covariances between resonances in lcomp=0",
"cov",
"[",
"cov_index",
",",
"cov_index",
"]",
"=",
"cov_values",
"[",
"0",
"]",
"cov",
"[",
"cov_index",
"+",
"1",
",",
"cov_index",
"+",
"1",
":",
"cov_index",
"+",
"2",
"]",
"=",
"cov_values",
"[",
"1",
":",
"2",
"]",
"cov",
"[",
"cov_index",
"+",
"1",
",",
"cov_index",
"+",
"3",
"]",
"=",
"cov_values",
"[",
"4",
"]",
"cov",
"[",
"cov_index",
"+",
"2",
",",
"cov_index",
"+",
"2",
"]",
"=",
"cov_values",
"[",
"3",
"]",
"cov",
"[",
"cov_index",
"+",
"2",
",",
"cov_index",
"+",
"3",
"]",
"=",
"cov_values",
"[",
"5",
"]",
"cov",
"[",
"cov_index",
"+",
"3",
",",
"cov_index",
"+",
"3",
"]",
"=",
"cov_values",
"[",
"6",
"]",
"cov_index",
"+=",
"4",
"if",
"j",
"<",
"num_res",
"-",
"1",
":",
"# Pad matrix for additional values",
"cov",
"=",
"np",
".",
"pad",
"(",
"cov",
",",
"(",
"(",
"0",
",",
"4",
")",
",",
"(",
"0",
",",
"4",
")",
")",
",",
"'constant'",
",",
"constant_values",
"=",
"0",
")",
"# Create pandas DataFrame with resonance data, currently",
"# redundant with data.IncidentNeutron.resonance",
"columns",
"=",
"[",
"'energy'",
",",
"'J'",
",",
"'totalWidth'",
",",
"'neutronWidth'",
",",
"'captureWidth'",
",",
"'fissionWidth'",
"]",
"parameters",
"=",
"pd",
".",
"DataFrame",
".",
"from_records",
"(",
"records",
",",
"columns",
"=",
"columns",
")",
"# Determine mpar (number of parameters for each resonance in",
"# covariance matrix)",
"nparams",
",",
"params",
"=",
"parameters",
".",
"shape",
"covsize",
"=",
"cov",
".",
"shape",
"[",
"0",
"]",
"mpar",
"=",
"int",
"(",
"covsize",
"/",
"nparams",
")",
"# Add parameters from File 2",
"parameters",
"=",
"_add_file2_contributions",
"(",
"parameters",
",",
"resonance",
".",
"parameters",
")",
"# Create instance of class",
"mlbw",
"=",
"cls",
"(",
"energy_min",
",",
"energy_max",
",",
"parameters",
",",
"cov",
",",
"mpar",
",",
"lcomp",
",",
"resonance",
")",
"return",
"mlbw"
] |
https://github.com/openmc-dev/openmc/blob/0cf7d9283786677e324bfbdd0984a54d1c86dacc/openmc/data/resonance_covariance.py#L350-L497
|
|
pyqtgraph/pyqtgraph
|
ac3887abfca4e529aac44f022f8e40556a2587b0
|
pyqtgraph/widgets/SpinBox.py
|
python
|
SpinBox.setDecimals
|
(self, decimals)
|
Set the number of decimals to be displayed when formatting numeric
values.
|
Set the number of decimals to be displayed when formatting numeric
values.
|
[
"Set",
"the",
"number",
"of",
"decimals",
"to",
"be",
"displayed",
"when",
"formatting",
"numeric",
"values",
"."
] |
def setDecimals(self, decimals):
"""Set the number of decimals to be displayed when formatting numeric
values.
"""
self.setOpts(decimals=decimals)
|
[
"def",
"setDecimals",
"(",
"self",
",",
"decimals",
")",
":",
"self",
".",
"setOpts",
"(",
"decimals",
"=",
"decimals",
")"
] |
https://github.com/pyqtgraph/pyqtgraph/blob/ac3887abfca4e529aac44f022f8e40556a2587b0/pyqtgraph/widgets/SpinBox.py#L287-L291
|
||
wistbean/fxxkpython
|
88e16d79d8dd37236ba6ecd0d0ff11d63143968c
|
vip/qyxuan/projects/Snake/venv/lib/python3.6/site-packages/pip-19.0.3-py3.6.egg/pip/_vendor/distlib/util.py
|
python
|
EventMixin.get_subscribers
|
(self, event)
|
return iter(self._subscribers.get(event, ()))
|
Return an iterator for the subscribers for an event.
:param event: The event to return subscribers for.
|
Return an iterator for the subscribers for an event.
:param event: The event to return subscribers for.
|
[
"Return",
"an",
"iterator",
"for",
"the",
"subscribers",
"for",
"an",
"event",
".",
":",
"param",
"event",
":",
"The",
"event",
"to",
"return",
"subscribers",
"for",
"."
] |
def get_subscribers(self, event):
"""
Return an iterator for the subscribers for an event.
:param event: The event to return subscribers for.
"""
return iter(self._subscribers.get(event, ()))
|
[
"def",
"get_subscribers",
"(",
"self",
",",
"event",
")",
":",
"return",
"iter",
"(",
"self",
".",
"_subscribers",
".",
"get",
"(",
"event",
",",
"(",
")",
")",
")"
] |
https://github.com/wistbean/fxxkpython/blob/88e16d79d8dd37236ba6ecd0d0ff11d63143968c/vip/qyxuan/projects/Snake/venv/lib/python3.6/site-packages/pip-19.0.3-py3.6.egg/pip/_vendor/distlib/util.py#L1020-L1025
|
|
PaloAltoNetworks/pan-os-python
|
30f6cd9e29d0e3c2549d46c722f6dcb507acd437
|
panos/userid.py
|
python
|
UserId.get_user_tags
|
(self, user=None, prefix=None)
|
return ans
|
Get the dynamic user tags.
Note: PAN-OS 9.1+
Args:
user: Get only this user's tags, not all users and all tags.
prefix: Override class tag prefix.
Returns:
dict: Dict where the user is the key and the value is a list of tags.
|
Get the dynamic user tags.
|
[
"Get",
"the",
"dynamic",
"user",
"tags",
"."
] |
def get_user_tags(self, user=None, prefix=None):
"""
Get the dynamic user tags.
Note: PAN-OS 9.1+
Args:
user: Get only this user's tags, not all users and all tags.
prefix: Override class tag prefix.
Returns:
dict: Dict where the user is the key and the value is a list of tags.
"""
if prefix is None:
prefix = self.prefix
limit = 500
start = 1
start_elm = None
msg = [
"<show><object><registered-user>",
]
if user is None:
msg.append(
"<all>"
+ "<limit>{0}</limit>".format(limit)
+ "<start-point>{0}</start-point>".format(start)
+ "</all>"
)
else:
msg.append("<user>{0}</user>".format(user))
msg.append("</registered-user></object></show>")
cmd = ET.fromstring("".join(msg))
if user is None:
start_elm = cmd.find("./object/registered-user/all/start-point")
ans = {}
while True:
resp = self.device.op(
cmd=ET.tostring(cmd, encoding="utf-8"),
vsys=self.device.vsys,
cmd_xml=False,
)
entries = resp.findall("./result/entry")
for entry in entries:
key = entry.attrib["user"]
val = []
members = entry.findall("./tag/member")
for member in members:
tag = member.text
if not prefix or tag.startswith(prefix):
val.append(tag)
ans[key] = val
if start_elm is None or limit <= 0 or len(entries) < limit:
break
start += len(entries)
start_elm.text = "{0}".format(start)
# Done.
return ans
|
[
"def",
"get_user_tags",
"(",
"self",
",",
"user",
"=",
"None",
",",
"prefix",
"=",
"None",
")",
":",
"if",
"prefix",
"is",
"None",
":",
"prefix",
"=",
"self",
".",
"prefix",
"limit",
"=",
"500",
"start",
"=",
"1",
"start_elm",
"=",
"None",
"msg",
"=",
"[",
"\"<show><object><registered-user>\"",
",",
"]",
"if",
"user",
"is",
"None",
":",
"msg",
".",
"append",
"(",
"\"<all>\"",
"+",
"\"<limit>{0}</limit>\"",
".",
"format",
"(",
"limit",
")",
"+",
"\"<start-point>{0}</start-point>\"",
".",
"format",
"(",
"start",
")",
"+",
"\"</all>\"",
")",
"else",
":",
"msg",
".",
"append",
"(",
"\"<user>{0}</user>\"",
".",
"format",
"(",
"user",
")",
")",
"msg",
".",
"append",
"(",
"\"</registered-user></object></show>\"",
")",
"cmd",
"=",
"ET",
".",
"fromstring",
"(",
"\"\"",
".",
"join",
"(",
"msg",
")",
")",
"if",
"user",
"is",
"None",
":",
"start_elm",
"=",
"cmd",
".",
"find",
"(",
"\"./object/registered-user/all/start-point\"",
")",
"ans",
"=",
"{",
"}",
"while",
"True",
":",
"resp",
"=",
"self",
".",
"device",
".",
"op",
"(",
"cmd",
"=",
"ET",
".",
"tostring",
"(",
"cmd",
",",
"encoding",
"=",
"\"utf-8\"",
")",
",",
"vsys",
"=",
"self",
".",
"device",
".",
"vsys",
",",
"cmd_xml",
"=",
"False",
",",
")",
"entries",
"=",
"resp",
".",
"findall",
"(",
"\"./result/entry\"",
")",
"for",
"entry",
"in",
"entries",
":",
"key",
"=",
"entry",
".",
"attrib",
"[",
"\"user\"",
"]",
"val",
"=",
"[",
"]",
"members",
"=",
"entry",
".",
"findall",
"(",
"\"./tag/member\"",
")",
"for",
"member",
"in",
"members",
":",
"tag",
"=",
"member",
".",
"text",
"if",
"not",
"prefix",
"or",
"tag",
".",
"startswith",
"(",
"prefix",
")",
":",
"val",
".",
"append",
"(",
"tag",
")",
"ans",
"[",
"key",
"]",
"=",
"val",
"if",
"start_elm",
"is",
"None",
"or",
"limit",
"<=",
"0",
"or",
"len",
"(",
"entries",
")",
"<",
"limit",
":",
"break",
"start",
"+=",
"len",
"(",
"entries",
")",
"start_elm",
".",
"text",
"=",
"\"{0}\"",
".",
"format",
"(",
"start",
")",
"# Done.",
"return",
"ans"
] |
https://github.com/PaloAltoNetworks/pan-os-python/blob/30f6cd9e29d0e3c2549d46c722f6dcb507acd437/panos/userid.py#L621-L684
|
|
openstack/neutron
|
fb229fb527ac8b95526412f7762d90826ac41428
|
neutron/db/l3_agentschedulers_db.py
|
python
|
L3AgentSchedulerDbMixin.get_routers_l3_agents_count
|
(self, context)
|
return [(self._make_router_dict(router_model),
agent_count if agent_count else 0)
for router_model, agent_count in l3_model_list]
|
Return a map between routers and agent counts for all routers.
|
Return a map between routers and agent counts for all routers.
|
[
"Return",
"a",
"map",
"between",
"routers",
"and",
"agent",
"counts",
"for",
"all",
"routers",
"."
] |
def get_routers_l3_agents_count(self, context):
"""Return a map between routers and agent counts for all routers."""
# TODO(sshank): This portion needs Router OVO integration when it is
# merged.
l3_model_list = l3_objs.RouterExtraAttributes.get_router_agents_count(
context)
return [(self._make_router_dict(router_model),
agent_count if agent_count else 0)
for router_model, agent_count in l3_model_list]
|
[
"def",
"get_routers_l3_agents_count",
"(",
"self",
",",
"context",
")",
":",
"# TODO(sshank): This portion needs Router OVO integration when it is",
"# merged.",
"l3_model_list",
"=",
"l3_objs",
".",
"RouterExtraAttributes",
".",
"get_router_agents_count",
"(",
"context",
")",
"return",
"[",
"(",
"self",
".",
"_make_router_dict",
"(",
"router_model",
")",
",",
"agent_count",
"if",
"agent_count",
"else",
"0",
")",
"for",
"router_model",
",",
"agent_count",
"in",
"l3_model_list",
"]"
] |
https://github.com/openstack/neutron/blob/fb229fb527ac8b95526412f7762d90826ac41428/neutron/db/l3_agentschedulers_db.py#L401-L409
|
|
ioflo/ioflo
|
177ac656d7c4ff801aebb0d8b401db365a5248ce
|
ioflo/aid/aggregating.py
|
python
|
gowa
|
(w, wm, l=1.0)
|
return math.pow(s, 1/l)
|
Generalized Ordered Weighted Averaging Operator
More info can be found here:
https://pdfs.semanticscholar.org/2810/c971af0d01d085c799fb2295dc5668d055c8.pdf
l = -1 = Ordered Weighted Harmonic Averaging Operator
l = -.000000000001 = Ordered Weighted Geometric Averaging Operator
l = 1 = Ordered Weighted Arithmetic Averaging Operator
l = 2 = Ordered Weighted Quadratic Averaging Operator
w = list of weights
wm = list of importance weighted membership values
l = lambda real number specifying type of owa to use
returns ordered weighted average
|
Generalized Ordered Weighted Averaging Operator
More info can be found here:
https://pdfs.semanticscholar.org/2810/c971af0d01d085c799fb2295dc5668d055c8.pdf
|
[
"Generalized",
"Ordered",
"Weighted",
"Averaging",
"Operator",
"More",
"info",
"can",
"be",
"found",
"here",
":",
"https",
":",
"//",
"pdfs",
".",
"semanticscholar",
".",
"org",
"/",
"2810",
"/",
"c971af0d01d085c799fb2295dc5668d055c8",
".",
"pdf"
] |
def gowa(w, wm, l=1.0):
"""
Generalized Ordered Weighted Averaging Operator
More info can be found here:
https://pdfs.semanticscholar.org/2810/c971af0d01d085c799fb2295dc5668d055c8.pdf
l = -1 = Ordered Weighted Harmonic Averaging Operator
l = -.000000000001 = Ordered Weighted Geometric Averaging Operator
l = 1 = Ordered Weighted Arithmetic Averaging Operator
l = 2 = Ordered Weighted Quadratic Averaging Operator
w = list of weights
wm = list of importance weighted membership values
l = lambda real number specifying type of owa to use
returns ordered weighted average
"""
if len(w) != len(wm):
raise ValueError("Weights and membership value lists must be of equal length.")
if l == 0:
raise ZeroDivisionError("Param l cannot be 0. Use -.000000000001 for owg.")
wm.sort(reverse=True)
s = 0
for i in range(len(w)):
s += w[i] * math.pow(wm[i], l)
return math.pow(s, 1/l)
|
[
"def",
"gowa",
"(",
"w",
",",
"wm",
",",
"l",
"=",
"1.0",
")",
":",
"if",
"len",
"(",
"w",
")",
"!=",
"len",
"(",
"wm",
")",
":",
"raise",
"ValueError",
"(",
"\"Weights and membership value lists must be of equal length.\"",
")",
"if",
"l",
"==",
"0",
":",
"raise",
"ZeroDivisionError",
"(",
"\"Param l cannot be 0. Use -.000000000001 for owg.\"",
")",
"wm",
".",
"sort",
"(",
"reverse",
"=",
"True",
")",
"s",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"w",
")",
")",
":",
"s",
"+=",
"w",
"[",
"i",
"]",
"*",
"math",
".",
"pow",
"(",
"wm",
"[",
"i",
"]",
",",
"l",
")",
"return",
"math",
".",
"pow",
"(",
"s",
",",
"1",
"/",
"l",
")"
] |
https://github.com/ioflo/ioflo/blob/177ac656d7c4ff801aebb0d8b401db365a5248ce/ioflo/aid/aggregating.py#L60-L89
|
|
Ledger-Donjon/lascar
|
7a1fc2187a9b642efcdda5d9177f86ec2345d7ba
|
lascar/tools/signal_processing.py
|
python
|
running_min
|
(x, window=32)
|
return L
|
Returns min of consecutive windows of x, each max repeated window times
|
Returns min of consecutive windows of x, each max repeated window times
|
[
"Returns",
"min",
"of",
"consecutive",
"windows",
"of",
"x",
"each",
"max",
"repeated",
"window",
"times"
] |
def running_min(x, window=32):
"""
Returns min of consecutive windows of x, each max repeated window times
"""
n = x.shape[0]
L = np.zeros(n, dtype=x.dtype)
for i in range(0, n - window, window):
L[i : i + window] = np.repeat(x[i : i + window].min(), window)
leftover = n % window
if leftover:
L[-leftover:] = np.repeat(x[-leftover:].min(), leftover)
return L
|
[
"def",
"running_min",
"(",
"x",
",",
"window",
"=",
"32",
")",
":",
"n",
"=",
"x",
".",
"shape",
"[",
"0",
"]",
"L",
"=",
"np",
".",
"zeros",
"(",
"n",
",",
"dtype",
"=",
"x",
".",
"dtype",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"n",
"-",
"window",
",",
"window",
")",
":",
"L",
"[",
"i",
":",
"i",
"+",
"window",
"]",
"=",
"np",
".",
"repeat",
"(",
"x",
"[",
"i",
":",
"i",
"+",
"window",
"]",
".",
"min",
"(",
")",
",",
"window",
")",
"leftover",
"=",
"n",
"%",
"window",
"if",
"leftover",
":",
"L",
"[",
"-",
"leftover",
":",
"]",
"=",
"np",
".",
"repeat",
"(",
"x",
"[",
"-",
"leftover",
":",
"]",
".",
"min",
"(",
")",
",",
"leftover",
")",
"return",
"L"
] |
https://github.com/Ledger-Donjon/lascar/blob/7a1fc2187a9b642efcdda5d9177f86ec2345d7ba/lascar/tools/signal_processing.py#L144-L155
|
|
SpamScope/spamscope
|
ffbfc53b9a3503ef3041cee94c6726c8b899118d
|
src/modules/attachments/thug_analysis.py
|
python
|
CustomWatchdog.handler
|
(self, signum, frame)
|
Function that handles Thug timeout
|
Function that handles Thug timeout
|
[
"Function",
"that",
"handles",
"Thug",
"timeout"
] |
def handler(self, signum, frame):
"""
Function that handles Thug timeout
"""
msg = "The analysis took more than {} seconds.".format(self.time)
log.critical(msg)
if self.callback:
self.callback(signum, frame)
log.ThugLogging.log_event()
raise Exception(msg)
|
[
"def",
"handler",
"(",
"self",
",",
"signum",
",",
"frame",
")",
":",
"msg",
"=",
"\"The analysis took more than {} seconds.\"",
".",
"format",
"(",
"self",
".",
"time",
")",
"log",
".",
"critical",
"(",
"msg",
")",
"if",
"self",
".",
"callback",
":",
"self",
".",
"callback",
"(",
"signum",
",",
"frame",
")",
"log",
".",
"ThugLogging",
".",
"log_event",
"(",
")",
"raise",
"Exception",
"(",
"msg",
")"
] |
https://github.com/SpamScope/spamscope/blob/ffbfc53b9a3503ef3041cee94c6726c8b899118d/src/modules/attachments/thug_analysis.py#L70-L81
|
||
noamraph/dreampie
|
b09ee546ec099ee6549c649692ceb129e05fb229
|
dreampielib/gui/tags.py
|
python
|
get_theme
|
(config, theme_name)
|
return theme
|
Get a theme description (a dict of tuples, see above) from a config object.
|
Get a theme description (a dict of tuples, see above) from a config object.
|
[
"Get",
"a",
"theme",
"description",
"(",
"a",
"dict",
"of",
"tuples",
"see",
"above",
")",
"from",
"a",
"config",
"object",
"."
] |
def get_theme(config, theme_name):
"""
Get a theme description (a dict of tuples, see above) from a config object.
"""
section = theme_name + THEME_POSTFIX
if not config.get_bool('is-active', section):
raise ValueError("Theme %s is not active" % theme_name)
theme = {}
for tag, _desc in tag_desc:
theme[tag, FG, COLOR] = config.get('%s-fg' % tag, section)
theme[tag, BG, COLOR] = config.get('%s-bg' % tag, section)
if tag != DEFAULT:
theme[tag, FG, ISSET] = config.get_bool('%s-fg-set' % tag, section)
theme[tag, BG, ISSET] = config.get_bool('%s-bg-set' % tag, section)
return theme
|
[
"def",
"get_theme",
"(",
"config",
",",
"theme_name",
")",
":",
"section",
"=",
"theme_name",
"+",
"THEME_POSTFIX",
"if",
"not",
"config",
".",
"get_bool",
"(",
"'is-active'",
",",
"section",
")",
":",
"raise",
"ValueError",
"(",
"\"Theme %s is not active\"",
"%",
"theme_name",
")",
"theme",
"=",
"{",
"}",
"for",
"tag",
",",
"_desc",
"in",
"tag_desc",
":",
"theme",
"[",
"tag",
",",
"FG",
",",
"COLOR",
"]",
"=",
"config",
".",
"get",
"(",
"'%s-fg'",
"%",
"tag",
",",
"section",
")",
"theme",
"[",
"tag",
",",
"BG",
",",
"COLOR",
"]",
"=",
"config",
".",
"get",
"(",
"'%s-bg'",
"%",
"tag",
",",
"section",
")",
"if",
"tag",
"!=",
"DEFAULT",
":",
"theme",
"[",
"tag",
",",
"FG",
",",
"ISSET",
"]",
"=",
"config",
".",
"get_bool",
"(",
"'%s-fg-set'",
"%",
"tag",
",",
"section",
")",
"theme",
"[",
"tag",
",",
"BG",
",",
"ISSET",
"]",
"=",
"config",
".",
"get_bool",
"(",
"'%s-bg-set'",
"%",
"tag",
",",
"section",
")",
"return",
"theme"
] |
https://github.com/noamraph/dreampie/blob/b09ee546ec099ee6549c649692ceb129e05fb229/dreampielib/gui/tags.py#L154-L168
|
|
allegro/ralph
|
1e4a9e1800d5f664abaef2624b8bf7512df279ce
|
src/ralph/admin/mixins.py
|
python
|
BulkEditChangeListMixin.get_list_display
|
(self, request)
|
return super().get_list_display(request)
|
Override django admin get list display method.
Set new values for fields list_editable and list_display.
|
Override django admin get list display method.
Set new values for fields list_editable and list_display.
|
[
"Override",
"django",
"admin",
"get",
"list",
"display",
"method",
".",
"Set",
"new",
"values",
"for",
"fields",
"list_editable",
"and",
"list_display",
"."
] |
def get_list_display(self, request):
"""
Override django admin get list display method.
Set new values for fields list_editable and list_display.
"""
self.list_editable = []
if request.GET.get(BULK_EDIT_VAR):
# separate read-only and editable fields
bulk_list_display = self.bulk_edit_list
bulk_list_edit = self.bulk_edit_list
if issubclass(self.model, PermByFieldMixin):
bulk_list_display = [
field for field in self.bulk_edit_list
if self.model.has_access_to_field(
field, request.user, action='view'
)
]
bulk_list_edit = [
field for field in bulk_list_display
if self.model.has_access_to_field(
field, request.user, action='change'
)
]
# overwrite displayed fields in bulk-edit mode
list_display = bulk_list_display.copy()
if 'id' not in list_display:
list_display.insert(0, 'id')
# list editable is subset of list display in this case
self.list_editable = bulk_list_edit
return list_display
return super().get_list_display(request)
|
[
"def",
"get_list_display",
"(",
"self",
",",
"request",
")",
":",
"self",
".",
"list_editable",
"=",
"[",
"]",
"if",
"request",
".",
"GET",
".",
"get",
"(",
"BULK_EDIT_VAR",
")",
":",
"# separate read-only and editable fields",
"bulk_list_display",
"=",
"self",
".",
"bulk_edit_list",
"bulk_list_edit",
"=",
"self",
".",
"bulk_edit_list",
"if",
"issubclass",
"(",
"self",
".",
"model",
",",
"PermByFieldMixin",
")",
":",
"bulk_list_display",
"=",
"[",
"field",
"for",
"field",
"in",
"self",
".",
"bulk_edit_list",
"if",
"self",
".",
"model",
".",
"has_access_to_field",
"(",
"field",
",",
"request",
".",
"user",
",",
"action",
"=",
"'view'",
")",
"]",
"bulk_list_edit",
"=",
"[",
"field",
"for",
"field",
"in",
"bulk_list_display",
"if",
"self",
".",
"model",
".",
"has_access_to_field",
"(",
"field",
",",
"request",
".",
"user",
",",
"action",
"=",
"'change'",
")",
"]",
"# overwrite displayed fields in bulk-edit mode",
"list_display",
"=",
"bulk_list_display",
".",
"copy",
"(",
")",
"if",
"'id'",
"not",
"in",
"list_display",
":",
"list_display",
".",
"insert",
"(",
"0",
",",
"'id'",
")",
"# list editable is subset of list display in this case",
"self",
".",
"list_editable",
"=",
"bulk_list_edit",
"return",
"list_display",
"return",
"super",
"(",
")",
".",
"get_list_display",
"(",
"request",
")"
] |
https://github.com/allegro/ralph/blob/1e4a9e1800d5f664abaef2624b8bf7512df279ce/src/ralph/admin/mixins.py#L543-L573
|
|
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/modular/modsym/modsym.py
|
python
|
canonical_parameters
|
(group, weight, sign, base_ring)
|
return group, weight, sign, base_ring
|
Return the canonically normalized parameters associated to a choice
of group, weight, sign, and base_ring. That is, normalize each of
these to be of the correct type, perform all appropriate type
checking, etc.
EXAMPLES::
sage: p1 = sage.modular.modsym.modsym.canonical_parameters(5,int(2),1,QQ) ; p1
(Congruence Subgroup Gamma0(5), 2, 1, Rational Field)
sage: p2 = sage.modular.modsym.modsym.canonical_parameters(Gamma0(5),2,1,QQ) ; p2
(Congruence Subgroup Gamma0(5), 2, 1, Rational Field)
sage: p1 == p2
True
sage: type(p1[1])
<class 'sage.rings.integer.Integer'>
|
Return the canonically normalized parameters associated to a choice
of group, weight, sign, and base_ring. That is, normalize each of
these to be of the correct type, perform all appropriate type
checking, etc.
|
[
"Return",
"the",
"canonically",
"normalized",
"parameters",
"associated",
"to",
"a",
"choice",
"of",
"group",
"weight",
"sign",
"and",
"base_ring",
".",
"That",
"is",
"normalize",
"each",
"of",
"these",
"to",
"be",
"of",
"the",
"correct",
"type",
"perform",
"all",
"appropriate",
"type",
"checking",
"etc",
"."
] |
def canonical_parameters(group, weight, sign, base_ring):
"""
Return the canonically normalized parameters associated to a choice
of group, weight, sign, and base_ring. That is, normalize each of
these to be of the correct type, perform all appropriate type
checking, etc.
EXAMPLES::
sage: p1 = sage.modular.modsym.modsym.canonical_parameters(5,int(2),1,QQ) ; p1
(Congruence Subgroup Gamma0(5), 2, 1, Rational Field)
sage: p2 = sage.modular.modsym.modsym.canonical_parameters(Gamma0(5),2,1,QQ) ; p2
(Congruence Subgroup Gamma0(5), 2, 1, Rational Field)
sage: p1 == p2
True
sage: type(p1[1])
<class 'sage.rings.integer.Integer'>
"""
sign = rings.Integer(sign)
if not (sign in [-1,0,1]):
raise ValueError("sign must be -1, 0, or 1")
weight = rings.Integer(weight)
if weight <= 1:
raise ValueError("the weight must be at least 2")
if isinstance(group, (int, rings.Integer)):
group = arithgroup.Gamma0(group)
elif isinstance(group, dirichlet.DirichletCharacter):
if group.is_trivial():
group = arithgroup.Gamma0(group.modulus())
else:
eps = group.minimize_base_ring()
group = (eps, eps.parent())
if base_ring is None:
base_ring = eps.base_ring()
if base_ring is None:
base_ring = rational_field.RationalField()
if not isinstance(base_ring, rings.CommutativeRing):
raise TypeError("base_ring (=%s) must be a commutative ring"%base_ring)
if not base_ring.is_field():
raise TypeError("(currently) base_ring (=%s) must be a field"%base_ring)
return group, weight, sign, base_ring
|
[
"def",
"canonical_parameters",
"(",
"group",
",",
"weight",
",",
"sign",
",",
"base_ring",
")",
":",
"sign",
"=",
"rings",
".",
"Integer",
"(",
"sign",
")",
"if",
"not",
"(",
"sign",
"in",
"[",
"-",
"1",
",",
"0",
",",
"1",
"]",
")",
":",
"raise",
"ValueError",
"(",
"\"sign must be -1, 0, or 1\"",
")",
"weight",
"=",
"rings",
".",
"Integer",
"(",
"weight",
")",
"if",
"weight",
"<=",
"1",
":",
"raise",
"ValueError",
"(",
"\"the weight must be at least 2\"",
")",
"if",
"isinstance",
"(",
"group",
",",
"(",
"int",
",",
"rings",
".",
"Integer",
")",
")",
":",
"group",
"=",
"arithgroup",
".",
"Gamma0",
"(",
"group",
")",
"elif",
"isinstance",
"(",
"group",
",",
"dirichlet",
".",
"DirichletCharacter",
")",
":",
"if",
"group",
".",
"is_trivial",
"(",
")",
":",
"group",
"=",
"arithgroup",
".",
"Gamma0",
"(",
"group",
".",
"modulus",
"(",
")",
")",
"else",
":",
"eps",
"=",
"group",
".",
"minimize_base_ring",
"(",
")",
"group",
"=",
"(",
"eps",
",",
"eps",
".",
"parent",
"(",
")",
")",
"if",
"base_ring",
"is",
"None",
":",
"base_ring",
"=",
"eps",
".",
"base_ring",
"(",
")",
"if",
"base_ring",
"is",
"None",
":",
"base_ring",
"=",
"rational_field",
".",
"RationalField",
"(",
")",
"if",
"not",
"isinstance",
"(",
"base_ring",
",",
"rings",
".",
"CommutativeRing",
")",
":",
"raise",
"TypeError",
"(",
"\"base_ring (=%s) must be a commutative ring\"",
"%",
"base_ring",
")",
"if",
"not",
"base_ring",
".",
"is_field",
"(",
")",
":",
"raise",
"TypeError",
"(",
"\"(currently) base_ring (=%s) must be a field\"",
"%",
"base_ring",
")",
"return",
"group",
",",
"weight",
",",
"sign",
",",
"base_ring"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/modular/modsym/modsym.py#L99-L145
|
|
Ultimaker/Uranium
|
66da853cd9a04edd3a8a03526fac81e83c03f5aa
|
UM/SortedList.py
|
python
|
SortedList.__delitem__
|
(self, index)
|
Remove value at `index` from sorted list.
``sl.__delitem__(index)`` <==> ``del sl[index]``
Supports slicing.
Runtime complexity: `O(log(n))` -- approximate.
>>> sl = SortedList('abcde')
>>> del sl[2]
>>> sl
SortedList(['a', 'b', 'd', 'e'])
>>> del sl[:2]
>>> sl
SortedList(['d', 'e'])
:param index: integer or slice for indexing
:raises IndexError: if index out of range
|
Remove value at `index` from sorted list.
|
[
"Remove",
"value",
"at",
"index",
"from",
"sorted",
"list",
"."
] |
def __delitem__(self, index):
"""Remove value at `index` from sorted list.
``sl.__delitem__(index)`` <==> ``del sl[index]``
Supports slicing.
Runtime complexity: `O(log(n))` -- approximate.
>>> sl = SortedList('abcde')
>>> del sl[2]
>>> sl
SortedList(['a', 'b', 'd', 'e'])
>>> del sl[:2]
>>> sl
SortedList(['d', 'e'])
:param index: integer or slice for indexing
:raises IndexError: if index out of range
"""
if isinstance(index, slice):
start, stop, step = index.indices(self._len)
if step == 1 and start < stop:
if start == 0 and stop == self._len:
return self._clear()
elif self._len <= 8 * (stop - start):
values = self._getitem(slice(None, start))
if stop < self._len:
values += self._getitem(slice(stop, None))
self._clear()
return self._update(values)
indices = range(start, stop, step)
# Delete items from greatest index to least so
# that the indices remain valid throughout iteration.
if step > 0:
indices = reversed(indices)
_pos, _delete = self._pos, self._delete
for index in indices:
pos, idx = _pos(index)
_delete(pos, idx)
else:
pos, idx = self._pos(index)
self._delete(pos, idx)
|
[
"def",
"__delitem__",
"(",
"self",
",",
"index",
")",
":",
"if",
"isinstance",
"(",
"index",
",",
"slice",
")",
":",
"start",
",",
"stop",
",",
"step",
"=",
"index",
".",
"indices",
"(",
"self",
".",
"_len",
")",
"if",
"step",
"==",
"1",
"and",
"start",
"<",
"stop",
":",
"if",
"start",
"==",
"0",
"and",
"stop",
"==",
"self",
".",
"_len",
":",
"return",
"self",
".",
"_clear",
"(",
")",
"elif",
"self",
".",
"_len",
"<=",
"8",
"*",
"(",
"stop",
"-",
"start",
")",
":",
"values",
"=",
"self",
".",
"_getitem",
"(",
"slice",
"(",
"None",
",",
"start",
")",
")",
"if",
"stop",
"<",
"self",
".",
"_len",
":",
"values",
"+=",
"self",
".",
"_getitem",
"(",
"slice",
"(",
"stop",
",",
"None",
")",
")",
"self",
".",
"_clear",
"(",
")",
"return",
"self",
".",
"_update",
"(",
"values",
")",
"indices",
"=",
"range",
"(",
"start",
",",
"stop",
",",
"step",
")",
"# Delete items from greatest index to least so",
"# that the indices remain valid throughout iteration.",
"if",
"step",
">",
"0",
":",
"indices",
"=",
"reversed",
"(",
"indices",
")",
"_pos",
",",
"_delete",
"=",
"self",
".",
"_pos",
",",
"self",
".",
"_delete",
"for",
"index",
"in",
"indices",
":",
"pos",
",",
"idx",
"=",
"_pos",
"(",
"index",
")",
"_delete",
"(",
"pos",
",",
"idx",
")",
"else",
":",
"pos",
",",
"idx",
"=",
"self",
".",
"_pos",
"(",
"index",
")",
"self",
".",
"_delete",
"(",
"pos",
",",
"idx",
")"
] |
https://github.com/Ultimaker/Uranium/blob/66da853cd9a04edd3a8a03526fac81e83c03f5aa/UM/SortedList.py#L797-L846
|
||
rootpy/rootpy
|
3926935e1f2100d8ba68070c2ab44055d4800f73
|
rootpy/extern/pyparsing.py
|
python
|
ParserElement.inlineLiteralsUsing
|
(cls)
|
Set class to be used for inclusion of string literals into a parser.
|
Set class to be used for inclusion of string literals into a parser.
|
[
"Set",
"class",
"to",
"be",
"used",
"for",
"inclusion",
"of",
"string",
"literals",
"into",
"a",
"parser",
"."
] |
def inlineLiteralsUsing(cls):
"""
Set class to be used for inclusion of string literals into a parser.
"""
ParserElement.literalStringClass = cls
|
[
"def",
"inlineLiteralsUsing",
"(",
"cls",
")",
":",
"ParserElement",
".",
"literalStringClass",
"=",
"cls"
] |
https://github.com/rootpy/rootpy/blob/3926935e1f2100d8ba68070c2ab44055d4800f73/rootpy/extern/pyparsing.py#L800-L804
|
||
iterative/dvc
|
13238e97168007cb5ba21966368457776274b9ca
|
dvc/repo/experiments/utils.py
|
python
|
resolve_name
|
(
scm: "Git",
exp_names: Union[Iterable[str], str],
git_remote: Optional[str] = None,
)
|
return result
|
find the ref_info of specified names.
|
find the ref_info of specified names.
|
[
"find",
"the",
"ref_info",
"of",
"specified",
"names",
"."
] |
def resolve_name(
scm: "Git",
exp_names: Union[Iterable[str], str],
git_remote: Optional[str] = None,
) -> Dict[str, Optional[ExpRefInfo]]:
"""find the ref_info of specified names."""
if isinstance(exp_names, str):
exp_names = [exp_names]
result = {}
unresolved = set()
for exp_name in exp_names:
if exp_name.startswith("refs/"):
result[exp_name] = ExpRefInfo.from_ref(exp_name)
else:
unresolved.add(exp_name)
unresolved_result = exp_refs_by_names(scm, unresolved, git_remote)
cur_rev = scm.get_rev()
for name in unresolved:
ref_info_list = unresolved_result[name]
if not ref_info_list:
result[name] = None
elif len(ref_info_list) == 1:
result[name] = ref_info_list[0]
else:
for ref_info in ref_info_list:
if ref_info.baseline_sha == cur_rev:
result[name] = ref_info
break
else:
raise AmbiguousExpRefInfo(name, ref_info_list)
return result
|
[
"def",
"resolve_name",
"(",
"scm",
":",
"\"Git\"",
",",
"exp_names",
":",
"Union",
"[",
"Iterable",
"[",
"str",
"]",
",",
"str",
"]",
",",
"git_remote",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
")",
"->",
"Dict",
"[",
"str",
",",
"Optional",
"[",
"ExpRefInfo",
"]",
"]",
":",
"if",
"isinstance",
"(",
"exp_names",
",",
"str",
")",
":",
"exp_names",
"=",
"[",
"exp_names",
"]",
"result",
"=",
"{",
"}",
"unresolved",
"=",
"set",
"(",
")",
"for",
"exp_name",
"in",
"exp_names",
":",
"if",
"exp_name",
".",
"startswith",
"(",
"\"refs/\"",
")",
":",
"result",
"[",
"exp_name",
"]",
"=",
"ExpRefInfo",
".",
"from_ref",
"(",
"exp_name",
")",
"else",
":",
"unresolved",
".",
"add",
"(",
"exp_name",
")",
"unresolved_result",
"=",
"exp_refs_by_names",
"(",
"scm",
",",
"unresolved",
",",
"git_remote",
")",
"cur_rev",
"=",
"scm",
".",
"get_rev",
"(",
")",
"for",
"name",
"in",
"unresolved",
":",
"ref_info_list",
"=",
"unresolved_result",
"[",
"name",
"]",
"if",
"not",
"ref_info_list",
":",
"result",
"[",
"name",
"]",
"=",
"None",
"elif",
"len",
"(",
"ref_info_list",
")",
"==",
"1",
":",
"result",
"[",
"name",
"]",
"=",
"ref_info_list",
"[",
"0",
"]",
"else",
":",
"for",
"ref_info",
"in",
"ref_info_list",
":",
"if",
"ref_info",
".",
"baseline_sha",
"==",
"cur_rev",
":",
"result",
"[",
"name",
"]",
"=",
"ref_info",
"break",
"else",
":",
"raise",
"AmbiguousExpRefInfo",
"(",
"name",
",",
"ref_info_list",
")",
"return",
"result"
] |
https://github.com/iterative/dvc/blob/13238e97168007cb5ba21966368457776274b9ca/dvc/repo/experiments/utils.py#L180-L212
|
|
caiiiac/Machine-Learning-with-Python
|
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
|
MachineLearning/venv/lib/python3.5/site-packages/sklearn/datasets/samples_generator.py
|
python
|
make_classification
|
(n_samples=100, n_features=20, n_informative=2,
n_redundant=2, n_repeated=0, n_classes=2,
n_clusters_per_class=2, weights=None, flip_y=0.01,
class_sep=1.0, hypercube=True, shift=0.0, scale=1.0,
shuffle=True, random_state=None)
|
return X, y
|
Generate a random n-class classification problem.
This initially creates clusters of points normally distributed (std=1)
about vertices of a `2 * class_sep`-sided hypercube, and assigns an equal
number of clusters to each class. It introduces interdependence between
these features and adds various types of further noise to the data.
Prior to shuffling, `X` stacks a number of these primary "informative"
features, "redundant" linear combinations of these, "repeated" duplicates
of sampled features, and arbitrary noise for and remaining features.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=20)
The total number of features. These comprise `n_informative`
informative features, `n_redundant` redundant features, `n_repeated`
duplicated features and `n_features-n_informative-n_redundant-
n_repeated` useless features drawn at random.
n_informative : int, optional (default=2)
The number of informative features. Each class is composed of a number
of gaussian clusters each located around the vertices of a hypercube
in a subspace of dimension `n_informative`. For each cluster,
informative features are drawn independently from N(0, 1) and then
randomly linearly combined within each cluster in order to add
covariance. The clusters are then placed on the vertices of the
hypercube.
n_redundant : int, optional (default=2)
The number of redundant features. These features are generated as
random linear combinations of the informative features.
n_repeated : int, optional (default=0)
The number of duplicated features, drawn randomly from the informative
and the redundant features.
n_classes : int, optional (default=2)
The number of classes (or labels) of the classification problem.
n_clusters_per_class : int, optional (default=2)
The number of clusters per class.
weights : list of floats or None (default=None)
The proportions of samples assigned to each class. If None, then
classes are balanced. Note that if `len(weights) == n_classes - 1`,
then the last class weight is automatically inferred.
More than `n_samples` samples may be returned if the sum of `weights`
exceeds 1.
flip_y : float, optional (default=0.01)
The fraction of samples whose class are randomly exchanged.
class_sep : float, optional (default=1.0)
The factor multiplying the hypercube dimension.
hypercube : boolean, optional (default=True)
If True, the clusters are put on the vertices of a hypercube. If
False, the clusters are put on the vertices of a random polytope.
shift : float, array of shape [n_features] or None, optional (default=0.0)
Shift features by the specified value. If None, then features
are shifted by a random value drawn in [-class_sep, class_sep].
scale : float, array of shape [n_features] or None, optional (default=1.0)
Multiply features by the specified value. If None, then features
are scaled by a random value drawn in [1, 100]. Note that scaling
happens after shifting.
shuffle : boolean, optional (default=True)
Shuffle the samples and the features.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The generated samples.
y : array of shape [n_samples]
The integer labels for class membership of each sample.
Notes
-----
The algorithm is adapted from Guyon [1] and was designed to generate
the "Madelon" dataset.
References
----------
.. [1] I. Guyon, "Design of experiments for the NIPS 2003 variable
selection benchmark", 2003.
See also
--------
make_blobs: simplified variant
make_multilabel_classification: unrelated generator for multilabel tasks
|
Generate a random n-class classification problem.
|
[
"Generate",
"a",
"random",
"n",
"-",
"class",
"classification",
"problem",
"."
] |
def make_classification(n_samples=100, n_features=20, n_informative=2,
n_redundant=2, n_repeated=0, n_classes=2,
n_clusters_per_class=2, weights=None, flip_y=0.01,
class_sep=1.0, hypercube=True, shift=0.0, scale=1.0,
shuffle=True, random_state=None):
"""Generate a random n-class classification problem.
This initially creates clusters of points normally distributed (std=1)
about vertices of a `2 * class_sep`-sided hypercube, and assigns an equal
number of clusters to each class. It introduces interdependence between
these features and adds various types of further noise to the data.
Prior to shuffling, `X` stacks a number of these primary "informative"
features, "redundant" linear combinations of these, "repeated" duplicates
of sampled features, and arbitrary noise for and remaining features.
Read more in the :ref:`User Guide <sample_generators>`.
Parameters
----------
n_samples : int, optional (default=100)
The number of samples.
n_features : int, optional (default=20)
The total number of features. These comprise `n_informative`
informative features, `n_redundant` redundant features, `n_repeated`
duplicated features and `n_features-n_informative-n_redundant-
n_repeated` useless features drawn at random.
n_informative : int, optional (default=2)
The number of informative features. Each class is composed of a number
of gaussian clusters each located around the vertices of a hypercube
in a subspace of dimension `n_informative`. For each cluster,
informative features are drawn independently from N(0, 1) and then
randomly linearly combined within each cluster in order to add
covariance. The clusters are then placed on the vertices of the
hypercube.
n_redundant : int, optional (default=2)
The number of redundant features. These features are generated as
random linear combinations of the informative features.
n_repeated : int, optional (default=0)
The number of duplicated features, drawn randomly from the informative
and the redundant features.
n_classes : int, optional (default=2)
The number of classes (or labels) of the classification problem.
n_clusters_per_class : int, optional (default=2)
The number of clusters per class.
weights : list of floats or None (default=None)
The proportions of samples assigned to each class. If None, then
classes are balanced. Note that if `len(weights) == n_classes - 1`,
then the last class weight is automatically inferred.
More than `n_samples` samples may be returned if the sum of `weights`
exceeds 1.
flip_y : float, optional (default=0.01)
The fraction of samples whose class are randomly exchanged.
class_sep : float, optional (default=1.0)
The factor multiplying the hypercube dimension.
hypercube : boolean, optional (default=True)
If True, the clusters are put on the vertices of a hypercube. If
False, the clusters are put on the vertices of a random polytope.
shift : float, array of shape [n_features] or None, optional (default=0.0)
Shift features by the specified value. If None, then features
are shifted by a random value drawn in [-class_sep, class_sep].
scale : float, array of shape [n_features] or None, optional (default=1.0)
Multiply features by the specified value. If None, then features
are scaled by a random value drawn in [1, 100]. Note that scaling
happens after shifting.
shuffle : boolean, optional (default=True)
Shuffle the samples and the features.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Returns
-------
X : array of shape [n_samples, n_features]
The generated samples.
y : array of shape [n_samples]
The integer labels for class membership of each sample.
Notes
-----
The algorithm is adapted from Guyon [1] and was designed to generate
the "Madelon" dataset.
References
----------
.. [1] I. Guyon, "Design of experiments for the NIPS 2003 variable
selection benchmark", 2003.
See also
--------
make_blobs: simplified variant
make_multilabel_classification: unrelated generator for multilabel tasks
"""
generator = check_random_state(random_state)
# Count features, clusters and samples
if n_informative + n_redundant + n_repeated > n_features:
raise ValueError("Number of informative, redundant and repeated "
"features must sum to less than the number of total"
" features")
if 2 ** n_informative < n_classes * n_clusters_per_class:
raise ValueError("n_classes * n_clusters_per_class must"
" be smaller or equal 2 ** n_informative")
if weights and len(weights) not in [n_classes, n_classes - 1]:
raise ValueError("Weights specified but incompatible with number "
"of classes.")
n_useless = n_features - n_informative - n_redundant - n_repeated
n_clusters = n_classes * n_clusters_per_class
if weights and len(weights) == (n_classes - 1):
weights.append(1.0 - sum(weights))
if weights is None:
weights = [1.0 / n_classes] * n_classes
weights[-1] = 1.0 - sum(weights[:-1])
# Distribute samples among clusters by weight
n_samples_per_cluster = []
for k in range(n_clusters):
n_samples_per_cluster.append(int(n_samples * weights[k % n_classes]
/ n_clusters_per_class))
for i in range(n_samples - sum(n_samples_per_cluster)):
n_samples_per_cluster[i % n_clusters] += 1
# Initialize X and y
X = np.zeros((n_samples, n_features))
y = np.zeros(n_samples, dtype=np.int)
# Build the polytope whose vertices become cluster centroids
centroids = _generate_hypercube(n_clusters, n_informative,
generator).astype(float)
centroids *= 2 * class_sep
centroids -= class_sep
if not hypercube:
centroids *= generator.rand(n_clusters, 1)
centroids *= generator.rand(1, n_informative)
# Initially draw informative features from the standard normal
X[:, :n_informative] = generator.randn(n_samples, n_informative)
# Create each cluster; a variant of make_blobs
stop = 0
for k, centroid in enumerate(centroids):
start, stop = stop, stop + n_samples_per_cluster[k]
y[start:stop] = k % n_classes # assign labels
X_k = X[start:stop, :n_informative] # slice a view of the cluster
A = 2 * generator.rand(n_informative, n_informative) - 1
X_k[...] = np.dot(X_k, A) # introduce random covariance
X_k += centroid # shift the cluster to a vertex
# Create redundant features
if n_redundant > 0:
B = 2 * generator.rand(n_informative, n_redundant) - 1
X[:, n_informative:n_informative + n_redundant] = \
np.dot(X[:, :n_informative], B)
# Repeat some features
if n_repeated > 0:
n = n_informative + n_redundant
indices = ((n - 1) * generator.rand(n_repeated) + 0.5).astype(np.intp)
X[:, n:n + n_repeated] = X[:, indices]
# Fill useless features
if n_useless > 0:
X[:, -n_useless:] = generator.randn(n_samples, n_useless)
# Randomly replace labels
if flip_y >= 0.0:
flip_mask = generator.rand(n_samples) < flip_y
y[flip_mask] = generator.randint(n_classes, size=flip_mask.sum())
# Randomly shift and scale
if shift is None:
shift = (2 * generator.rand(n_features) - 1) * class_sep
X += shift
if scale is None:
scale = 1 + 100 * generator.rand(n_features)
X *= scale
if shuffle:
# Randomly permute samples
X, y = util_shuffle(X, y, random_state=generator)
# Randomly permute features
indices = np.arange(n_features)
generator.shuffle(indices)
X[:, :] = X[:, indices]
return X, y
|
[
"def",
"make_classification",
"(",
"n_samples",
"=",
"100",
",",
"n_features",
"=",
"20",
",",
"n_informative",
"=",
"2",
",",
"n_redundant",
"=",
"2",
",",
"n_repeated",
"=",
"0",
",",
"n_classes",
"=",
"2",
",",
"n_clusters_per_class",
"=",
"2",
",",
"weights",
"=",
"None",
",",
"flip_y",
"=",
"0.01",
",",
"class_sep",
"=",
"1.0",
",",
"hypercube",
"=",
"True",
",",
"shift",
"=",
"0.0",
",",
"scale",
"=",
"1.0",
",",
"shuffle",
"=",
"True",
",",
"random_state",
"=",
"None",
")",
":",
"generator",
"=",
"check_random_state",
"(",
"random_state",
")",
"# Count features, clusters and samples",
"if",
"n_informative",
"+",
"n_redundant",
"+",
"n_repeated",
">",
"n_features",
":",
"raise",
"ValueError",
"(",
"\"Number of informative, redundant and repeated \"",
"\"features must sum to less than the number of total\"",
"\" features\"",
")",
"if",
"2",
"**",
"n_informative",
"<",
"n_classes",
"*",
"n_clusters_per_class",
":",
"raise",
"ValueError",
"(",
"\"n_classes * n_clusters_per_class must\"",
"\" be smaller or equal 2 ** n_informative\"",
")",
"if",
"weights",
"and",
"len",
"(",
"weights",
")",
"not",
"in",
"[",
"n_classes",
",",
"n_classes",
"-",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"\"Weights specified but incompatible with number \"",
"\"of classes.\"",
")",
"n_useless",
"=",
"n_features",
"-",
"n_informative",
"-",
"n_redundant",
"-",
"n_repeated",
"n_clusters",
"=",
"n_classes",
"*",
"n_clusters_per_class",
"if",
"weights",
"and",
"len",
"(",
"weights",
")",
"==",
"(",
"n_classes",
"-",
"1",
")",
":",
"weights",
".",
"append",
"(",
"1.0",
"-",
"sum",
"(",
"weights",
")",
")",
"if",
"weights",
"is",
"None",
":",
"weights",
"=",
"[",
"1.0",
"/",
"n_classes",
"]",
"*",
"n_classes",
"weights",
"[",
"-",
"1",
"]",
"=",
"1.0",
"-",
"sum",
"(",
"weights",
"[",
":",
"-",
"1",
"]",
")",
"# Distribute samples among clusters by weight",
"n_samples_per_cluster",
"=",
"[",
"]",
"for",
"k",
"in",
"range",
"(",
"n_clusters",
")",
":",
"n_samples_per_cluster",
".",
"append",
"(",
"int",
"(",
"n_samples",
"*",
"weights",
"[",
"k",
"%",
"n_classes",
"]",
"/",
"n_clusters_per_class",
")",
")",
"for",
"i",
"in",
"range",
"(",
"n_samples",
"-",
"sum",
"(",
"n_samples_per_cluster",
")",
")",
":",
"n_samples_per_cluster",
"[",
"i",
"%",
"n_clusters",
"]",
"+=",
"1",
"# Initialize X and y",
"X",
"=",
"np",
".",
"zeros",
"(",
"(",
"n_samples",
",",
"n_features",
")",
")",
"y",
"=",
"np",
".",
"zeros",
"(",
"n_samples",
",",
"dtype",
"=",
"np",
".",
"int",
")",
"# Build the polytope whose vertices become cluster centroids",
"centroids",
"=",
"_generate_hypercube",
"(",
"n_clusters",
",",
"n_informative",
",",
"generator",
")",
".",
"astype",
"(",
"float",
")",
"centroids",
"*=",
"2",
"*",
"class_sep",
"centroids",
"-=",
"class_sep",
"if",
"not",
"hypercube",
":",
"centroids",
"*=",
"generator",
".",
"rand",
"(",
"n_clusters",
",",
"1",
")",
"centroids",
"*=",
"generator",
".",
"rand",
"(",
"1",
",",
"n_informative",
")",
"# Initially draw informative features from the standard normal",
"X",
"[",
":",
",",
":",
"n_informative",
"]",
"=",
"generator",
".",
"randn",
"(",
"n_samples",
",",
"n_informative",
")",
"# Create each cluster; a variant of make_blobs",
"stop",
"=",
"0",
"for",
"k",
",",
"centroid",
"in",
"enumerate",
"(",
"centroids",
")",
":",
"start",
",",
"stop",
"=",
"stop",
",",
"stop",
"+",
"n_samples_per_cluster",
"[",
"k",
"]",
"y",
"[",
"start",
":",
"stop",
"]",
"=",
"k",
"%",
"n_classes",
"# assign labels",
"X_k",
"=",
"X",
"[",
"start",
":",
"stop",
",",
":",
"n_informative",
"]",
"# slice a view of the cluster",
"A",
"=",
"2",
"*",
"generator",
".",
"rand",
"(",
"n_informative",
",",
"n_informative",
")",
"-",
"1",
"X_k",
"[",
"...",
"]",
"=",
"np",
".",
"dot",
"(",
"X_k",
",",
"A",
")",
"# introduce random covariance",
"X_k",
"+=",
"centroid",
"# shift the cluster to a vertex",
"# Create redundant features",
"if",
"n_redundant",
">",
"0",
":",
"B",
"=",
"2",
"*",
"generator",
".",
"rand",
"(",
"n_informative",
",",
"n_redundant",
")",
"-",
"1",
"X",
"[",
":",
",",
"n_informative",
":",
"n_informative",
"+",
"n_redundant",
"]",
"=",
"np",
".",
"dot",
"(",
"X",
"[",
":",
",",
":",
"n_informative",
"]",
",",
"B",
")",
"# Repeat some features",
"if",
"n_repeated",
">",
"0",
":",
"n",
"=",
"n_informative",
"+",
"n_redundant",
"indices",
"=",
"(",
"(",
"n",
"-",
"1",
")",
"*",
"generator",
".",
"rand",
"(",
"n_repeated",
")",
"+",
"0.5",
")",
".",
"astype",
"(",
"np",
".",
"intp",
")",
"X",
"[",
":",
",",
"n",
":",
"n",
"+",
"n_repeated",
"]",
"=",
"X",
"[",
":",
",",
"indices",
"]",
"# Fill useless features",
"if",
"n_useless",
">",
"0",
":",
"X",
"[",
":",
",",
"-",
"n_useless",
":",
"]",
"=",
"generator",
".",
"randn",
"(",
"n_samples",
",",
"n_useless",
")",
"# Randomly replace labels",
"if",
"flip_y",
">=",
"0.0",
":",
"flip_mask",
"=",
"generator",
".",
"rand",
"(",
"n_samples",
")",
"<",
"flip_y",
"y",
"[",
"flip_mask",
"]",
"=",
"generator",
".",
"randint",
"(",
"n_classes",
",",
"size",
"=",
"flip_mask",
".",
"sum",
"(",
")",
")",
"# Randomly shift and scale",
"if",
"shift",
"is",
"None",
":",
"shift",
"=",
"(",
"2",
"*",
"generator",
".",
"rand",
"(",
"n_features",
")",
"-",
"1",
")",
"*",
"class_sep",
"X",
"+=",
"shift",
"if",
"scale",
"is",
"None",
":",
"scale",
"=",
"1",
"+",
"100",
"*",
"generator",
".",
"rand",
"(",
"n_features",
")",
"X",
"*=",
"scale",
"if",
"shuffle",
":",
"# Randomly permute samples",
"X",
",",
"y",
"=",
"util_shuffle",
"(",
"X",
",",
"y",
",",
"random_state",
"=",
"generator",
")",
"# Randomly permute features",
"indices",
"=",
"np",
".",
"arange",
"(",
"n_features",
")",
"generator",
".",
"shuffle",
"(",
"indices",
")",
"X",
"[",
":",
",",
":",
"]",
"=",
"X",
"[",
":",
",",
"indices",
"]",
"return",
"X",
",",
"y"
] |
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/sklearn/datasets/samples_generator.py#L38-L247
|
|
django/django
|
0a17666045de6739ae1c2ac695041823d5f827f7
|
django/core/serializers/xml_serializer.py
|
python
|
Deserializer._handle_m2m_field_node
|
(self, node, field)
|
Handle a <field> node for a ManyToManyField.
|
Handle a <field> node for a ManyToManyField.
|
[
"Handle",
"a",
"<field",
">",
"node",
"for",
"a",
"ManyToManyField",
"."
] |
def _handle_m2m_field_node(self, node, field):
"""
Handle a <field> node for a ManyToManyField.
"""
model = field.remote_field.model
default_manager = model._default_manager
if hasattr(default_manager, 'get_by_natural_key'):
def m2m_convert(n):
keys = n.getElementsByTagName('natural')
if keys:
# If there are 'natural' subelements, it must be a natural key
field_value = [getInnerText(k).strip() for k in keys]
obj_pk = default_manager.db_manager(self.db).get_by_natural_key(*field_value).pk
else:
# Otherwise, treat like a normal PK value.
obj_pk = model._meta.pk.to_python(n.getAttribute('pk'))
return obj_pk
else:
def m2m_convert(n):
return model._meta.pk.to_python(n.getAttribute('pk'))
values = []
try:
for c in node.getElementsByTagName('object'):
values.append(m2m_convert(c))
except Exception as e:
if isinstance(e, ObjectDoesNotExist) and self.handle_forward_references:
return base.DEFER_FIELD
else:
raise base.M2MDeserializationError(e, c)
else:
return values
|
[
"def",
"_handle_m2m_field_node",
"(",
"self",
",",
"node",
",",
"field",
")",
":",
"model",
"=",
"field",
".",
"remote_field",
".",
"model",
"default_manager",
"=",
"model",
".",
"_default_manager",
"if",
"hasattr",
"(",
"default_manager",
",",
"'get_by_natural_key'",
")",
":",
"def",
"m2m_convert",
"(",
"n",
")",
":",
"keys",
"=",
"n",
".",
"getElementsByTagName",
"(",
"'natural'",
")",
"if",
"keys",
":",
"# If there are 'natural' subelements, it must be a natural key",
"field_value",
"=",
"[",
"getInnerText",
"(",
"k",
")",
".",
"strip",
"(",
")",
"for",
"k",
"in",
"keys",
"]",
"obj_pk",
"=",
"default_manager",
".",
"db_manager",
"(",
"self",
".",
"db",
")",
".",
"get_by_natural_key",
"(",
"*",
"field_value",
")",
".",
"pk",
"else",
":",
"# Otherwise, treat like a normal PK value.",
"obj_pk",
"=",
"model",
".",
"_meta",
".",
"pk",
".",
"to_python",
"(",
"n",
".",
"getAttribute",
"(",
"'pk'",
")",
")",
"return",
"obj_pk",
"else",
":",
"def",
"m2m_convert",
"(",
"n",
")",
":",
"return",
"model",
".",
"_meta",
".",
"pk",
".",
"to_python",
"(",
"n",
".",
"getAttribute",
"(",
"'pk'",
")",
")",
"values",
"=",
"[",
"]",
"try",
":",
"for",
"c",
"in",
"node",
".",
"getElementsByTagName",
"(",
"'object'",
")",
":",
"values",
".",
"append",
"(",
"m2m_convert",
"(",
"c",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"if",
"isinstance",
"(",
"e",
",",
"ObjectDoesNotExist",
")",
"and",
"self",
".",
"handle_forward_references",
":",
"return",
"base",
".",
"DEFER_FIELD",
"else",
":",
"raise",
"base",
".",
"M2MDeserializationError",
"(",
"e",
",",
"c",
")",
"else",
":",
"return",
"values"
] |
https://github.com/django/django/blob/0a17666045de6739ae1c2ac695041823d5f827f7/django/core/serializers/xml_serializer.py#L285-L315
|
||
plotly/plotly.py
|
cfad7862594b35965c0e000813bd7805e8494a5b
|
packages/python/plotly/plotly/graph_objs/_histogram2dcontour.py
|
python
|
Histogram2dContour.colorbar
|
(self)
|
return self["colorbar"]
|
The 'colorbar' property is an instance of ColorBar
that may be specified as:
- An instance of :class:`plotly.graph_objs.histogram2dcontour.ColorBar`
- A dict of string/value properties that will be passed
to the ColorBar constructor
Supported dict properties:
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.histogr
am2dcontour.colorbar.Tickformatstop` instances
or dicts with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.histogram2dcontour.colorbar.tickformatstopdef
aults), sets the default property values to use
for elements of
histogram2dcontour.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of
the axis. The default value for inside tick
labels is *hide past domain*. In other cases
the default is *hide past div*.
ticklabelposition
Determines where tick labels are drawn relative
to the ticks. Left and right options are used
when `orientation` is "h", top and bottom when
`orientation` is "v".
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for `ticktext`.
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for `tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.histogram2dcontour
.colorbar.Title` instance or dict with
compatible properties
titlefont
Deprecated: Please use
histogram2dcontour.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
histogram2dcontour.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Defaults to
"top" when `orientation` if "v" and defaults
to "right" when `orientation` if "h". Note that
the title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction). Defaults to 1.02 when `orientation`
is "v" and 0.5 when `orientation` is "h".
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar. Defaults to "left" when `orientation` is
"v" and "center" when `orientation` is "h".
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction). Defaults to 0.5 when `orientation`
is "v" and 1.02 when `orientation` is "h".
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
Defaults to "middle" when `orientation` is "v"
and "bottom" when `orientation` is "h".
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
plotly.graph_objs.histogram2dcontour.ColorBar
|
The 'colorbar' property is an instance of ColorBar
that may be specified as:
- An instance of :class:`plotly.graph_objs.histogram2dcontour.ColorBar`
- A dict of string/value properties that will be passed
to the ColorBar constructor
Supported dict properties:
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.histogr
am2dcontour.colorbar.Tickformatstop` instances
or dicts with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.histogram2dcontour.colorbar.tickformatstopdef
aults), sets the default property values to use
for elements of
histogram2dcontour.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of
the axis. The default value for inside tick
labels is *hide past domain*. In other cases
the default is *hide past div*.
ticklabelposition
Determines where tick labels are drawn relative
to the ticks. Left and right options are used
when `orientation` is "h", top and bottom when
`orientation` is "v".
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for `ticktext`.
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for `tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.histogram2dcontour
.colorbar.Title` instance or dict with
compatible properties
titlefont
Deprecated: Please use
histogram2dcontour.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
histogram2dcontour.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Defaults to
"top" when `orientation` if "v" and defaults
to "right" when `orientation` if "h". Note that
the title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction). Defaults to 1.02 when `orientation`
is "v" and 0.5 when `orientation` is "h".
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar. Defaults to "left" when `orientation` is
"v" and "center" when `orientation` is "h".
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction). Defaults to 0.5 when `orientation`
is "v" and 1.02 when `orientation` is "h".
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
Defaults to "middle" when `orientation` is "v"
and "bottom" when `orientation` is "h".
ypad
Sets the amount of padding (in px) along the y
direction.
|
[
"The",
"colorbar",
"property",
"is",
"an",
"instance",
"of",
"ColorBar",
"that",
"may",
"be",
"specified",
"as",
":",
"-",
"An",
"instance",
"of",
":",
"class",
":",
"plotly",
".",
"graph_objs",
".",
"histogram2dcontour",
".",
"ColorBar",
"-",
"A",
"dict",
"of",
"string",
"/",
"value",
"properties",
"that",
"will",
"be",
"passed",
"to",
"the",
"ColorBar",
"constructor",
"Supported",
"dict",
"properties",
":",
"bgcolor",
"Sets",
"the",
"color",
"of",
"padded",
"area",
".",
"bordercolor",
"Sets",
"the",
"axis",
"line",
"color",
".",
"borderwidth",
"Sets",
"the",
"width",
"(",
"in",
"px",
")",
"or",
"the",
"border",
"enclosing",
"this",
"color",
"bar",
".",
"dtick",
"Sets",
"the",
"step",
"in",
"-",
"between",
"ticks",
"on",
"this",
"axis",
".",
"Use",
"with",
"tick0",
".",
"Must",
"be",
"a",
"positive",
"number",
"or",
"special",
"strings",
"available",
"to",
"log",
"and",
"date",
"axes",
".",
"If",
"the",
"axis",
"type",
"is",
"log",
"then",
"ticks",
"are",
"set",
"every",
"10^",
"(",
"n",
"*",
"dtick",
")",
"where",
"n",
"is",
"the",
"tick",
"number",
".",
"For",
"example",
"to",
"set",
"a",
"tick",
"mark",
"at",
"1",
"10",
"100",
"1000",
"...",
"set",
"dtick",
"to",
"1",
".",
"To",
"set",
"tick",
"marks",
"at",
"1",
"100",
"10000",
"...",
"set",
"dtick",
"to",
"2",
".",
"To",
"set",
"tick",
"marks",
"at",
"1",
"5",
"25",
"125",
"625",
"3125",
"...",
"set",
"dtick",
"to",
"log_10",
"(",
"5",
")",
"or",
"0",
".",
"69897000433",
".",
"log",
"has",
"several",
"special",
"values",
";",
"L<f",
">",
"where",
"f",
"is",
"a",
"positive",
"number",
"gives",
"ticks",
"linearly",
"spaced",
"in",
"value",
"(",
"but",
"not",
"position",
")",
".",
"For",
"example",
"tick0",
"=",
"0",
".",
"1",
"dtick",
"=",
"L0",
".",
"5",
"will",
"put",
"ticks",
"at",
"0",
".",
"1",
"0",
".",
"6",
"1",
".",
"1",
"1",
".",
"6",
"etc",
".",
"To",
"show",
"powers",
"of",
"10",
"plus",
"small",
"digits",
"between",
"use",
"D1",
"(",
"all",
"digits",
")",
"or",
"D2",
"(",
"only",
"2",
"and",
"5",
")",
".",
"tick0",
"is",
"ignored",
"for",
"D1",
"and",
"D2",
".",
"If",
"the",
"axis",
"type",
"is",
"date",
"then",
"you",
"must",
"convert",
"the",
"time",
"to",
"milliseconds",
".",
"For",
"example",
"to",
"set",
"the",
"interval",
"between",
"ticks",
"to",
"one",
"day",
"set",
"dtick",
"to",
"86400000",
".",
"0",
".",
"date",
"also",
"has",
"special",
"values",
"M<n",
">",
"gives",
"ticks",
"spaced",
"by",
"a",
"number",
"of",
"months",
".",
"n",
"must",
"be",
"a",
"positive",
"integer",
".",
"To",
"set",
"ticks",
"on",
"the",
"15th",
"of",
"every",
"third",
"month",
"set",
"tick0",
"to",
"2000",
"-",
"01",
"-",
"15",
"and",
"dtick",
"to",
"M3",
".",
"To",
"set",
"ticks",
"every",
"4",
"years",
"set",
"dtick",
"to",
"M48",
"exponentformat",
"Determines",
"a",
"formatting",
"rule",
"for",
"the",
"tick",
"exponents",
".",
"For",
"example",
"consider",
"the",
"number",
"1",
"000",
"000",
"000",
".",
"If",
"none",
"it",
"appears",
"as",
"1",
"000",
"000",
"000",
".",
"If",
"e",
"1e",
"+",
"9",
".",
"If",
"E",
"1E",
"+",
"9",
".",
"If",
"power",
"1x10^9",
"(",
"with",
"9",
"in",
"a",
"super",
"script",
")",
".",
"If",
"SI",
"1G",
".",
"If",
"B",
"1B",
".",
"len",
"Sets",
"the",
"length",
"of",
"the",
"color",
"bar",
"This",
"measure",
"excludes",
"the",
"padding",
"of",
"both",
"ends",
".",
"That",
"is",
"the",
"color",
"bar",
"length",
"is",
"this",
"length",
"minus",
"the",
"padding",
"on",
"both",
"ends",
".",
"lenmode",
"Determines",
"whether",
"this",
"color",
"bar",
"s",
"length",
"(",
"i",
".",
"e",
".",
"the",
"measure",
"in",
"the",
"color",
"variation",
"direction",
")",
"is",
"set",
"in",
"units",
"of",
"plot",
"fraction",
"or",
"in",
"*",
"pixels",
".",
"Use",
"len",
"to",
"set",
"the",
"value",
".",
"minexponent",
"Hide",
"SI",
"prefix",
"for",
"10^n",
"if",
"|n|",
"is",
"below",
"this",
"number",
".",
"This",
"only",
"has",
"an",
"effect",
"when",
"tickformat",
"is",
"SI",
"or",
"B",
".",
"nticks",
"Specifies",
"the",
"maximum",
"number",
"of",
"ticks",
"for",
"the",
"particular",
"axis",
".",
"The",
"actual",
"number",
"of",
"ticks",
"will",
"be",
"chosen",
"automatically",
"to",
"be",
"less",
"than",
"or",
"equal",
"to",
"nticks",
".",
"Has",
"an",
"effect",
"only",
"if",
"tickmode",
"is",
"set",
"to",
"auto",
".",
"orientation",
"Sets",
"the",
"orientation",
"of",
"the",
"colorbar",
".",
"outlinecolor",
"Sets",
"the",
"axis",
"line",
"color",
".",
"outlinewidth",
"Sets",
"the",
"width",
"(",
"in",
"px",
")",
"of",
"the",
"axis",
"line",
".",
"separatethousands",
"If",
"true",
"even",
"4",
"-",
"digit",
"integers",
"are",
"separated",
"showexponent",
"If",
"all",
"all",
"exponents",
"are",
"shown",
"besides",
"their",
"significands",
".",
"If",
"first",
"only",
"the",
"exponent",
"of",
"the",
"first",
"tick",
"is",
"shown",
".",
"If",
"last",
"only",
"the",
"exponent",
"of",
"the",
"last",
"tick",
"is",
"shown",
".",
"If",
"none",
"no",
"exponents",
"appear",
".",
"showticklabels",
"Determines",
"whether",
"or",
"not",
"the",
"tick",
"labels",
"are",
"drawn",
".",
"showtickprefix",
"If",
"all",
"all",
"tick",
"labels",
"are",
"displayed",
"with",
"a",
"prefix",
".",
"If",
"first",
"only",
"the",
"first",
"tick",
"is",
"displayed",
"with",
"a",
"prefix",
".",
"If",
"last",
"only",
"the",
"last",
"tick",
"is",
"displayed",
"with",
"a",
"suffix",
".",
"If",
"none",
"tick",
"prefixes",
"are",
"hidden",
".",
"showticksuffix",
"Same",
"as",
"showtickprefix",
"but",
"for",
"tick",
"suffixes",
".",
"thickness",
"Sets",
"the",
"thickness",
"of",
"the",
"color",
"bar",
"This",
"measure",
"excludes",
"the",
"size",
"of",
"the",
"padding",
"ticks",
"and",
"labels",
".",
"thicknessmode",
"Determines",
"whether",
"this",
"color",
"bar",
"s",
"thickness",
"(",
"i",
".",
"e",
".",
"the",
"measure",
"in",
"the",
"constant",
"color",
"direction",
")",
"is",
"set",
"in",
"units",
"of",
"plot",
"fraction",
"or",
"in",
"pixels",
".",
"Use",
"thickness",
"to",
"set",
"the",
"value",
".",
"tick0",
"Sets",
"the",
"placement",
"of",
"the",
"first",
"tick",
"on",
"this",
"axis",
".",
"Use",
"with",
"dtick",
".",
"If",
"the",
"axis",
"type",
"is",
"log",
"then",
"you",
"must",
"take",
"the",
"log",
"of",
"your",
"starting",
"tick",
"(",
"e",
".",
"g",
".",
"to",
"set",
"the",
"starting",
"tick",
"to",
"100",
"set",
"the",
"tick0",
"to",
"2",
")",
"except",
"when",
"dtick",
"=",
"*",
"L<f",
">",
"*",
"(",
"see",
"dtick",
"for",
"more",
"info",
")",
".",
"If",
"the",
"axis",
"type",
"is",
"date",
"it",
"should",
"be",
"a",
"date",
"string",
"like",
"date",
"data",
".",
"If",
"the",
"axis",
"type",
"is",
"category",
"it",
"should",
"be",
"a",
"number",
"using",
"the",
"scale",
"where",
"each",
"category",
"is",
"assigned",
"a",
"serial",
"number",
"from",
"zero",
"in",
"the",
"order",
"it",
"appears",
".",
"tickangle",
"Sets",
"the",
"angle",
"of",
"the",
"tick",
"labels",
"with",
"respect",
"to",
"the",
"horizontal",
".",
"For",
"example",
"a",
"tickangle",
"of",
"-",
"90",
"draws",
"the",
"tick",
"labels",
"vertically",
".",
"tickcolor",
"Sets",
"the",
"tick",
"color",
".",
"tickfont",
"Sets",
"the",
"color",
"bar",
"s",
"tick",
"label",
"font",
"tickformat",
"Sets",
"the",
"tick",
"label",
"formatting",
"rule",
"using",
"d3",
"formatting",
"mini",
"-",
"languages",
"which",
"are",
"very",
"similar",
"to",
"those",
"in",
"Python",
".",
"For",
"numbers",
"see",
":",
"h",
"ttps",
":",
"//",
"github",
".",
"com",
"/",
"d3",
"/",
"d3",
"-",
"format",
"/",
"tree",
"/",
"v1",
".",
"4",
".",
"5#d3",
"-",
"f",
"ormat",
".",
"And",
"for",
"dates",
"see",
":",
"https",
":",
"//",
"github",
".",
"com",
"/",
"d3",
"/",
"d3",
"-",
"time",
"-",
"format",
"/",
"tree",
"/",
"v2",
".",
"2",
".",
"3#locale_format",
".",
"We",
"add",
"two",
"items",
"to",
"d3",
"s",
"date",
"formatter",
":",
"%h",
"for",
"half",
"of",
"the",
"year",
"as",
"a",
"decimal",
"number",
"as",
"well",
"as",
"%",
"{",
"n",
"}",
"f",
"for",
"fractional",
"seconds",
"with",
"n",
"digits",
".",
"For",
"example",
"*",
"2016",
"-",
"10",
"-",
"13",
"09",
":",
"15",
":",
"23",
".",
"456",
"*",
"with",
"tickformat",
"%H~%M~%S",
".",
"%2f",
"would",
"display",
"09~15~23",
".",
"46",
"tickformatstops",
"A",
"tuple",
"of",
":",
"class",
":",
"plotly",
".",
"graph_objects",
".",
"histogr",
"am2dcontour",
".",
"colorbar",
".",
"Tickformatstop",
"instances",
"or",
"dicts",
"with",
"compatible",
"properties",
"tickformatstopdefaults",
"When",
"used",
"in",
"a",
"template",
"(",
"as",
"layout",
".",
"template",
".",
"dat",
"a",
".",
"histogram2dcontour",
".",
"colorbar",
".",
"tickformatstopdef",
"aults",
")",
"sets",
"the",
"default",
"property",
"values",
"to",
"use",
"for",
"elements",
"of",
"histogram2dcontour",
".",
"colorbar",
".",
"tickformatstops",
"ticklabeloverflow",
"Determines",
"how",
"we",
"handle",
"tick",
"labels",
"that",
"would",
"overflow",
"either",
"the",
"graph",
"div",
"or",
"the",
"domain",
"of",
"the",
"axis",
".",
"The",
"default",
"value",
"for",
"inside",
"tick",
"labels",
"is",
"*",
"hide",
"past",
"domain",
"*",
".",
"In",
"other",
"cases",
"the",
"default",
"is",
"*",
"hide",
"past",
"div",
"*",
".",
"ticklabelposition",
"Determines",
"where",
"tick",
"labels",
"are",
"drawn",
"relative",
"to",
"the",
"ticks",
".",
"Left",
"and",
"right",
"options",
"are",
"used",
"when",
"orientation",
"is",
"h",
"top",
"and",
"bottom",
"when",
"orientation",
"is",
"v",
".",
"ticklen",
"Sets",
"the",
"tick",
"length",
"(",
"in",
"px",
")",
".",
"tickmode",
"Sets",
"the",
"tick",
"mode",
"for",
"this",
"axis",
".",
"If",
"auto",
"the",
"number",
"of",
"ticks",
"is",
"set",
"via",
"nticks",
".",
"If",
"linear",
"the",
"placement",
"of",
"the",
"ticks",
"is",
"determined",
"by",
"a",
"starting",
"position",
"tick0",
"and",
"a",
"tick",
"step",
"dtick",
"(",
"linear",
"is",
"the",
"default",
"value",
"if",
"tick0",
"and",
"dtick",
"are",
"provided",
")",
".",
"If",
"array",
"the",
"placement",
"of",
"the",
"ticks",
"is",
"set",
"via",
"tickvals",
"and",
"the",
"tick",
"text",
"is",
"ticktext",
".",
"(",
"array",
"is",
"the",
"default",
"value",
"if",
"tickvals",
"is",
"provided",
")",
".",
"tickprefix",
"Sets",
"a",
"tick",
"label",
"prefix",
".",
"ticks",
"Determines",
"whether",
"ticks",
"are",
"drawn",
"or",
"not",
".",
"If",
"this",
"axis",
"ticks",
"are",
"not",
"drawn",
".",
"If",
"outside",
"(",
"inside",
")",
"this",
"axis",
"are",
"drawn",
"outside",
"(",
"inside",
")",
"the",
"axis",
"lines",
".",
"ticksuffix",
"Sets",
"a",
"tick",
"label",
"suffix",
".",
"ticktext",
"Sets",
"the",
"text",
"displayed",
"at",
"the",
"ticks",
"position",
"via",
"tickvals",
".",
"Only",
"has",
"an",
"effect",
"if",
"tickmode",
"is",
"set",
"to",
"array",
".",
"Used",
"with",
"tickvals",
".",
"ticktextsrc",
"Sets",
"the",
"source",
"reference",
"on",
"Chart",
"Studio",
"Cloud",
"for",
"ticktext",
".",
"tickvals",
"Sets",
"the",
"values",
"at",
"which",
"ticks",
"on",
"this",
"axis",
"appear",
".",
"Only",
"has",
"an",
"effect",
"if",
"tickmode",
"is",
"set",
"to",
"array",
".",
"Used",
"with",
"ticktext",
".",
"tickvalssrc",
"Sets",
"the",
"source",
"reference",
"on",
"Chart",
"Studio",
"Cloud",
"for",
"tickvals",
".",
"tickwidth",
"Sets",
"the",
"tick",
"width",
"(",
"in",
"px",
")",
".",
"title",
":",
"class",
":",
"plotly",
".",
"graph_objects",
".",
"histogram2dcontour",
".",
"colorbar",
".",
"Title",
"instance",
"or",
"dict",
"with",
"compatible",
"properties",
"titlefont",
"Deprecated",
":",
"Please",
"use",
"histogram2dcontour",
".",
"colorbar",
".",
"title",
".",
"font",
"instead",
".",
"Sets",
"this",
"color",
"bar",
"s",
"title",
"font",
".",
"Note",
"that",
"the",
"title",
"s",
"font",
"used",
"to",
"be",
"set",
"by",
"the",
"now",
"deprecated",
"titlefont",
"attribute",
".",
"titleside",
"Deprecated",
":",
"Please",
"use",
"histogram2dcontour",
".",
"colorbar",
".",
"title",
".",
"side",
"instead",
".",
"Determines",
"the",
"location",
"of",
"color",
"bar",
"s",
"title",
"with",
"respect",
"to",
"the",
"color",
"bar",
".",
"Defaults",
"to",
"top",
"when",
"orientation",
"if",
"v",
"and",
"defaults",
"to",
"right",
"when",
"orientation",
"if",
"h",
".",
"Note",
"that",
"the",
"title",
"s",
"location",
"used",
"to",
"be",
"set",
"by",
"the",
"now",
"deprecated",
"titleside",
"attribute",
".",
"x",
"Sets",
"the",
"x",
"position",
"of",
"the",
"color",
"bar",
"(",
"in",
"plot",
"fraction",
")",
".",
"Defaults",
"to",
"1",
".",
"02",
"when",
"orientation",
"is",
"v",
"and",
"0",
".",
"5",
"when",
"orientation",
"is",
"h",
".",
"xanchor",
"Sets",
"this",
"color",
"bar",
"s",
"horizontal",
"position",
"anchor",
".",
"This",
"anchor",
"binds",
"the",
"x",
"position",
"to",
"the",
"left",
"center",
"or",
"right",
"of",
"the",
"color",
"bar",
".",
"Defaults",
"to",
"left",
"when",
"orientation",
"is",
"v",
"and",
"center",
"when",
"orientation",
"is",
"h",
".",
"xpad",
"Sets",
"the",
"amount",
"of",
"padding",
"(",
"in",
"px",
")",
"along",
"the",
"x",
"direction",
".",
"y",
"Sets",
"the",
"y",
"position",
"of",
"the",
"color",
"bar",
"(",
"in",
"plot",
"fraction",
")",
".",
"Defaults",
"to",
"0",
".",
"5",
"when",
"orientation",
"is",
"v",
"and",
"1",
".",
"02",
"when",
"orientation",
"is",
"h",
".",
"yanchor",
"Sets",
"this",
"color",
"bar",
"s",
"vertical",
"position",
"anchor",
"This",
"anchor",
"binds",
"the",
"y",
"position",
"to",
"the",
"top",
"middle",
"or",
"bottom",
"of",
"the",
"color",
"bar",
".",
"Defaults",
"to",
"middle",
"when",
"orientation",
"is",
"v",
"and",
"bottom",
"when",
"orientation",
"is",
"h",
".",
"ypad",
"Sets",
"the",
"amount",
"of",
"padding",
"(",
"in",
"px",
")",
"along",
"the",
"y",
"direction",
"."
] |
def colorbar(self):
"""
The 'colorbar' property is an instance of ColorBar
that may be specified as:
- An instance of :class:`plotly.graph_objs.histogram2dcontour.ColorBar`
- A dict of string/value properties that will be passed
to the ColorBar constructor
Supported dict properties:
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing
this color bar.
dtick
Sets the step in-between ticks on this axis.
Use with `tick0`. Must be a positive number, or
special strings available to "log" and "date"
axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick
number. For example, to set a tick mark at 1,
10, 100, 1000, ... set dtick to 1. To set tick
marks at 1, 100, 10000, ... set dtick to 2. To
set tick marks at 1, 5, 25, 125, 625, 3125, ...
set dtick to log_10(5), or 0.69897000433. "log"
has several special values; "L<f>", where `f`
is a positive number, gives ticks linearly
spaced in value (but not position). For example
`tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10
plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is
ignored for "D1" and "D2". If the axis `type`
is "date", then you must convert the time to
milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to
86400000.0. "date" also has special values
"M<n>" gives ticks spaced by a number of
months. `n` must be a positive integer. To set
ticks on the 15th of every third month, set
`tick0` to "2000-01-15" and `dtick` to "M3". To
set ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick
exponents. For example, consider the number
1,000,000,000. If "none", it appears as
1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If
"SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure
excludes the padding of both ends. That is, the
color bar length is this length minus the
padding on both ends.
lenmode
Determines whether this color bar's length
(i.e. the measure in the color variation
direction) is set in units of plot "fraction"
or in *pixels. Use `len` to set the value.
minexponent
Hide SI prefix for 10^n if |n| is below this
number. This only has an effect when
`tickformat` is "SI" or "B".
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks
will be chosen automatically to be less than or
equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
orientation
Sets the orientation of the colorbar.
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of
the first tick is shown. If "last", only the
exponent of the last tick is shown. If "none",
no exponents appear.
showticklabels
Determines whether or not the tick labels are
drawn.
showtickprefix
If "all", all tick labels are displayed with a
prefix. If "first", only the first tick is
displayed with a prefix. If "last", only the
last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This
measure excludes the size of the padding, ticks
and labels.
thicknessmode
Determines whether this color bar's thickness
(i.e. the measure in the constant color
direction) is set in units of plot "fraction"
or in "pixels". Use `thickness` to set the
value.
tick0
Sets the placement of the first tick on this
axis. Use with `dtick`. If the axis `type` is
"log", then you must take the log of your
starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when
`dtick`=*L<f>* (see `dtick` for more info). If
the axis `type` is "date", it should be a date
string, like date data. If the axis `type` is
"category", it should be a number, using the
scale where each category is assigned a serial
number from zero in the order it appears.
tickangle
Sets the angle of the tick labels with respect
to the horizontal. For example, a `tickangle`
of -90 draws the tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3
formatting mini-languages which are very
similar to those in Python. For numbers, see: h
ttps://github.com/d3/d3-format/tree/v1.4.5#d3-f
ormat. And for dates see:
https://github.com/d3/d3-time-
format/tree/v2.2.3#locale_format. We add two
items to d3's date formatter: "%h" for half of
the year as a decimal number as well as "%{n}f"
for fractional seconds with n digits. For
example, *2016-10-13 09:15:23.456* with
tickformat "%H~%M~%S.%2f" would display
"09~15~23.46"
tickformatstops
A tuple of :class:`plotly.graph_objects.histogr
am2dcontour.colorbar.Tickformatstop` instances
or dicts with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.dat
a.histogram2dcontour.colorbar.tickformatstopdef
aults), sets the default property values to use
for elements of
histogram2dcontour.colorbar.tickformatstops
ticklabeloverflow
Determines how we handle tick labels that would
overflow either the graph div or the domain of
the axis. The default value for inside tick
labels is *hide past domain*. In other cases
the default is *hide past div*.
ticklabelposition
Determines where tick labels are drawn relative
to the ticks. Left and right options are used
when `orientation` is "h", top and bottom when
`orientation` is "v".
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto",
the number of ticks is set via `nticks`. If
"linear", the placement of the ticks is
determined by a starting position `tick0` and a
tick step `dtick` ("linear" is the default
value if `tick0` and `dtick` are provided). If
"array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`.
("array" is the default value if `tickvals` is
provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If
"", this axis' ticks are not drawn. If
"outside" ("inside"), this axis' are drawn
outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position
via `tickvals`. Only has an effect if
`tickmode` is set to "array". Used with
`tickvals`.
ticktextsrc
Sets the source reference on Chart Studio Cloud
for `ticktext`.
tickvals
Sets the values at which ticks on this axis
appear. Only has an effect if `tickmode` is set
to "array". Used with `ticktext`.
tickvalssrc
Sets the source reference on Chart Studio Cloud
for `tickvals`.
tickwidth
Sets the tick width (in px).
title
:class:`plotly.graph_objects.histogram2dcontour
.colorbar.Title` instance or dict with
compatible properties
titlefont
Deprecated: Please use
histogram2dcontour.colorbar.title.font instead.
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
titleside
Deprecated: Please use
histogram2dcontour.colorbar.title.side instead.
Determines the location of color bar's title
with respect to the color bar. Defaults to
"top" when `orientation` if "v" and defaults
to "right" when `orientation` if "h". Note that
the title's location used to be set by the now
deprecated `titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction). Defaults to 1.02 when `orientation`
is "v" and 0.5 when `orientation` is "h".
xanchor
Sets this color bar's horizontal position
anchor. This anchor binds the `x` position to
the "left", "center" or "right" of the color
bar. Defaults to "left" when `orientation` is
"v" and "center" when `orientation` is "h".
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction). Defaults to 0.5 when `orientation`
is "v" and 1.02 when `orientation` is "h".
yanchor
Sets this color bar's vertical position anchor
This anchor binds the `y` position to the
"top", "middle" or "bottom" of the color bar.
Defaults to "middle" when `orientation` is "v"
and "bottom" when `orientation` is "h".
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
plotly.graph_objs.histogram2dcontour.ColorBar
"""
return self["colorbar"]
|
[
"def",
"colorbar",
"(",
"self",
")",
":",
"return",
"self",
"[",
"\"colorbar\"",
"]"
] |
https://github.com/plotly/plotly.py/blob/cfad7862594b35965c0e000813bd7805e8494a5b/packages/python/plotly/plotly/graph_objs/_histogram2dcontour.py#L224-L475
|
|
tp4a/teleport
|
1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad
|
server/www/packages/packages-linux/x64/tornado/httpserver.py
|
python
|
_HTTPRequestContext.__init__
|
(
self,
stream: iostream.IOStream,
address: Tuple,
protocol: Optional[str],
trusted_downstream: List[str] = None,
)
|
[] |
def __init__(
self,
stream: iostream.IOStream,
address: Tuple,
protocol: Optional[str],
trusted_downstream: List[str] = None,
) -> None:
self.address = address
# Save the socket's address family now so we know how to
# interpret self.address even after the stream is closed
# and its socket attribute replaced with None.
if stream.socket is not None:
self.address_family = stream.socket.family
else:
self.address_family = None
# In HTTPServerRequest we want an IP, not a full socket address.
if (
self.address_family in (socket.AF_INET, socket.AF_INET6)
and address is not None
):
self.remote_ip = address[0]
else:
# Unix (or other) socket; fake the remote address.
self.remote_ip = "0.0.0.0"
if protocol:
self.protocol = protocol
elif isinstance(stream, iostream.SSLIOStream):
self.protocol = "https"
else:
self.protocol = "http"
self._orig_remote_ip = self.remote_ip
self._orig_protocol = self.protocol
self.trusted_downstream = set(trusted_downstream or [])
|
[
"def",
"__init__",
"(",
"self",
",",
"stream",
":",
"iostream",
".",
"IOStream",
",",
"address",
":",
"Tuple",
",",
"protocol",
":",
"Optional",
"[",
"str",
"]",
",",
"trusted_downstream",
":",
"List",
"[",
"str",
"]",
"=",
"None",
",",
")",
"->",
"None",
":",
"self",
".",
"address",
"=",
"address",
"# Save the socket's address family now so we know how to",
"# interpret self.address even after the stream is closed",
"# and its socket attribute replaced with None.",
"if",
"stream",
".",
"socket",
"is",
"not",
"None",
":",
"self",
".",
"address_family",
"=",
"stream",
".",
"socket",
".",
"family",
"else",
":",
"self",
".",
"address_family",
"=",
"None",
"# In HTTPServerRequest we want an IP, not a full socket address.",
"if",
"(",
"self",
".",
"address_family",
"in",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"AF_INET6",
")",
"and",
"address",
"is",
"not",
"None",
")",
":",
"self",
".",
"remote_ip",
"=",
"address",
"[",
"0",
"]",
"else",
":",
"# Unix (or other) socket; fake the remote address.",
"self",
".",
"remote_ip",
"=",
"\"0.0.0.0\"",
"if",
"protocol",
":",
"self",
".",
"protocol",
"=",
"protocol",
"elif",
"isinstance",
"(",
"stream",
",",
"iostream",
".",
"SSLIOStream",
")",
":",
"self",
".",
"protocol",
"=",
"\"https\"",
"else",
":",
"self",
".",
"protocol",
"=",
"\"http\"",
"self",
".",
"_orig_remote_ip",
"=",
"self",
".",
"remote_ip",
"self",
".",
"_orig_protocol",
"=",
"self",
".",
"protocol",
"self",
".",
"trusted_downstream",
"=",
"set",
"(",
"trusted_downstream",
"or",
"[",
"]",
")"
] |
https://github.com/tp4a/teleport/blob/1fafd34f1f775d2cf80ea4af6e44468d8e0b24ad/server/www/packages/packages-linux/x64/tornado/httpserver.py#L287-L319
|
||||
pantsbuild/pex
|
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
|
pex/vendor/_vendored/pip/pip/_vendor/urllib3/exceptions.py
|
python
|
InvalidChunkLength.__repr__
|
(self)
|
return "InvalidChunkLength(got length %r, %i bytes read)" % (
self.length,
self.partial,
)
|
[] |
def __repr__(self):
return "InvalidChunkLength(got length %r, %i bytes read)" % (
self.length,
self.partial,
)
|
[
"def",
"__repr__",
"(",
"self",
")",
":",
"return",
"\"InvalidChunkLength(got length %r, %i bytes read)\"",
"%",
"(",
"self",
".",
"length",
",",
"self",
".",
"partial",
",",
")"
] |
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/vendor/_vendored/pip/pip/_vendor/urllib3/exceptions.py#L273-L277
|
|||
webpy/webpy
|
62245f7da4aab8f8607c192b98d5ef93873f995b
|
web/webapi.py
|
python
|
debug
|
(*args)
|
return ""
|
Prints a prettyprinted version of `args` to stderr.
|
Prints a prettyprinted version of `args` to stderr.
|
[
"Prints",
"a",
"prettyprinted",
"version",
"of",
"args",
"to",
"stderr",
"."
] |
def debug(*args):
"""
Prints a prettyprinted version of `args` to stderr.
"""
try:
out = ctx.environ["wsgi.errors"]
except:
out = sys.stderr
for arg in args:
print(pprint.pformat(arg), file=out)
return ""
|
[
"def",
"debug",
"(",
"*",
"args",
")",
":",
"try",
":",
"out",
"=",
"ctx",
".",
"environ",
"[",
"\"wsgi.errors\"",
"]",
"except",
":",
"out",
"=",
"sys",
".",
"stderr",
"for",
"arg",
"in",
"args",
":",
"print",
"(",
"pprint",
".",
"pformat",
"(",
"arg",
")",
",",
"file",
"=",
"out",
")",
"return",
"\"\""
] |
https://github.com/webpy/webpy/blob/62245f7da4aab8f8607c192b98d5ef93873f995b/web/webapi.py#L605-L615
|
|
zeropointdynamics/zelos
|
0c5bd57b4bab56c23c27dc5301ba1a42ee054726
|
src/zelos/scheduler.py
|
python
|
Scheduler.stop
|
(self, stop_reason: str)
|
Stops execution of the running processes, exiting the run loop.
If there is no process running, this will prevent the next run.
Args:
stop_reason: A string passed in for debugging purposes to
indicate what caused Zelos to stop.
|
Stops execution of the running processes, exiting the run loop.
If there is no process running, this will prevent the next run.
|
[
"Stops",
"execution",
"of",
"the",
"running",
"processes",
"exiting",
"the",
"run",
"loop",
".",
"If",
"there",
"is",
"no",
"process",
"running",
"this",
"will",
"prevent",
"the",
"next",
"run",
"."
] |
def stop(self, stop_reason: str) -> None:
"""
Stops execution of the running processes, exiting the run loop.
If there is no process running, this will prevent the next run.
Args:
stop_reason: A string passed in for debugging purposes to
indicate what caused Zelos to stop.
"""
self.stop_and_exec(stop_reason, lambda: False)
|
[
"def",
"stop",
"(",
"self",
",",
"stop_reason",
":",
"str",
")",
"->",
"None",
":",
"self",
".",
"stop_and_exec",
"(",
"stop_reason",
",",
"lambda",
":",
"False",
")"
] |
https://github.com/zeropointdynamics/zelos/blob/0c5bd57b4bab56c23c27dc5301ba1a42ee054726/src/zelos/scheduler.py#L50-L60
|
||
pyjanitor-devs/pyjanitor
|
2207c0bddbf7e23f56e87892de0405787b11621e
|
janitor/functions/utils.py
|
python
|
_column_sel_dispatch
|
(columns_to_select, df)
|
return [*df.columns[filtered_columns]]
|
Base function for column selection.
Applies only to callables.
The callable is applied to every column in the dataframe.
Either True or False is expected per column.
A list of column names is returned.
|
Base function for column selection.
Applies only to callables.
The callable is applied to every column in the dataframe.
Either True or False is expected per column.
A list of column names is returned.
|
[
"Base",
"function",
"for",
"column",
"selection",
".",
"Applies",
"only",
"to",
"callables",
".",
"The",
"callable",
"is",
"applied",
"to",
"every",
"column",
"in",
"the",
"dataframe",
".",
"Either",
"True",
"or",
"False",
"is",
"expected",
"per",
"column",
".",
"A",
"list",
"of",
"column",
"names",
"is",
"returned",
"."
] |
def _column_sel_dispatch(columns_to_select, df): # noqa: F811
"""
Base function for column selection.
Applies only to callables.
The callable is applied to every column in the dataframe.
Either True or False is expected per column.
A list of column names is returned.
"""
# the function will be applied per series.
# this allows filtration based on the contents of the series
# or based on the name of the series,
# which happens to be a column name as well.
# whatever the case may be,
# the returned values should be a sequence of booleans,
# with at least one True.
filtered_columns = df.agg(columns_to_select)
if not filtered_columns.any():
raise ValueError(
"""
No match was returned for the provided callable.
"""
)
return [*df.columns[filtered_columns]]
|
[
"def",
"_column_sel_dispatch",
"(",
"columns_to_select",
",",
"df",
")",
":",
"# noqa: F811",
"# the function will be applied per series.",
"# this allows filtration based on the contents of the series",
"# or based on the name of the series,",
"# which happens to be a column name as well.",
"# whatever the case may be,",
"# the returned values should be a sequence of booleans,",
"# with at least one True.",
"filtered_columns",
"=",
"df",
".",
"agg",
"(",
"columns_to_select",
")",
"if",
"not",
"filtered_columns",
".",
"any",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"\"\"\n No match was returned for the provided callable.\n \"\"\"",
")",
"return",
"[",
"*",
"df",
".",
"columns",
"[",
"filtered_columns",
"]",
"]"
] |
https://github.com/pyjanitor-devs/pyjanitor/blob/2207c0bddbf7e23f56e87892de0405787b11621e/janitor/functions/utils.py#L419-L444
|
|
sagemath/sage
|
f9b2db94f675ff16963ccdefba4f1a3393b3fe0d
|
src/sage/combinat/species/generating_series.py
|
python
|
ExponentialGeneratingSeriesRing
|
(R)
|
return ExponentialGeneratingSeriesRing_class(R)
|
Return the ring of exponential generating series over ``R``.
Note that it is just a
:class:`LazyPowerSeriesRing` whose elements have
some extra methods.
EXAMPLES::
sage: from sage.combinat.species.generating_series import ExponentialGeneratingSeriesRing
sage: R = ExponentialGeneratingSeriesRing(QQ); R
Lazy Power Series Ring over Rational Field
sage: R([1]).coefficients(4)
[1, 1, 1, 1]
sage: R([1]).counts(4)
[1, 1, 2, 6]
TESTS:
We test to make sure that caching works.
::
sage: R is ExponentialGeneratingSeriesRing(QQ)
True
|
Return the ring of exponential generating series over ``R``.
|
[
"Return",
"the",
"ring",
"of",
"exponential",
"generating",
"series",
"over",
"R",
"."
] |
def ExponentialGeneratingSeriesRing(R):
"""
Return the ring of exponential generating series over ``R``.
Note that it is just a
:class:`LazyPowerSeriesRing` whose elements have
some extra methods.
EXAMPLES::
sage: from sage.combinat.species.generating_series import ExponentialGeneratingSeriesRing
sage: R = ExponentialGeneratingSeriesRing(QQ); R
Lazy Power Series Ring over Rational Field
sage: R([1]).coefficients(4)
[1, 1, 1, 1]
sage: R([1]).counts(4)
[1, 1, 2, 6]
TESTS:
We test to make sure that caching works.
::
sage: R is ExponentialGeneratingSeriesRing(QQ)
True
"""
return ExponentialGeneratingSeriesRing_class(R)
|
[
"def",
"ExponentialGeneratingSeriesRing",
"(",
"R",
")",
":",
"return",
"ExponentialGeneratingSeriesRing_class",
"(",
"R",
")"
] |
https://github.com/sagemath/sage/blob/f9b2db94f675ff16963ccdefba4f1a3393b3fe0d/src/sage/combinat/species/generating_series.py#L167-L194
|
|
Yelp/clusterman
|
54beef89c01a2681aafd1fbb93b6ad5f6252d6cf
|
clusterman/simulator/simulated_spot_fleet_resource_group.py
|
python
|
SimulatedSpotFleetResourceGroup._get_resource_group_tags
|
(self)
|
return {}
|
[] |
def _get_resource_group_tags(self):
return {}
|
[
"def",
"_get_resource_group_tags",
"(",
"self",
")",
":",
"return",
"{",
"}"
] |
https://github.com/Yelp/clusterman/blob/54beef89c01a2681aafd1fbb93b6ad5f6252d6cf/clusterman/simulator/simulated_spot_fleet_resource_group.py#L212-L213
|
|||
avocado-framework/avocado
|
1f9b3192e8ba47d029c33fe21266bd113d17811f
|
avocado/utils/asset.py
|
python
|
Asset._verify_hash
|
(self, asset_path)
|
return self._has_valid_hash(asset_path, self.asset_hash)
|
Verify if the `asset_path` hash matches the hash in the hash file.
:param asset_path: full path of the asset file.
:returns: True when self.asset_hash is None or when it has the same
value as the hash of the asset_file, otherwise return False.
:rtype: bool
|
Verify if the `asset_path` hash matches the hash in the hash file.
|
[
"Verify",
"if",
"the",
"asset_path",
"hash",
"matches",
"the",
"hash",
"in",
"the",
"hash",
"file",
"."
] |
def _verify_hash(self, asset_path):
"""
Verify if the `asset_path` hash matches the hash in the hash file.
:param asset_path: full path of the asset file.
:returns: True when self.asset_hash is None or when it has the same
value as the hash of the asset_file, otherwise return False.
:rtype: bool
"""
return self._has_valid_hash(asset_path, self.asset_hash)
|
[
"def",
"_verify_hash",
"(",
"self",
",",
"asset_path",
")",
":",
"return",
"self",
".",
"_has_valid_hash",
"(",
"asset_path",
",",
"self",
".",
"asset_hash",
")"
] |
https://github.com/avocado-framework/avocado/blob/1f9b3192e8ba47d029c33fe21266bd113d17811f/avocado/utils/asset.py#L330-L339
|
|
PyHDI/veriloggen
|
2382d200deabf59cfcfd741f5eba371010aaf2bb
|
veriloggen/dataflow/visitor.py
|
python
|
InputVisitor.visit__Variable
|
(self, node)
|
return set([node])
|
[] |
def visit__Variable(self, node):
if isinstance(node.input_data, dtypes._Numeric):
return self.visit(node.input_data)
return set([node])
|
[
"def",
"visit__Variable",
"(",
"self",
",",
"node",
")",
":",
"if",
"isinstance",
"(",
"node",
".",
"input_data",
",",
"dtypes",
".",
"_Numeric",
")",
":",
"return",
"self",
".",
"visit",
"(",
"node",
".",
"input_data",
")",
"return",
"set",
"(",
"[",
"node",
"]",
")"
] |
https://github.com/PyHDI/veriloggen/blob/2382d200deabf59cfcfd741f5eba371010aaf2bb/veriloggen/dataflow/visitor.py#L107-L110
|
|||
naftaliharris/tauthon
|
5587ceec329b75f7caf6d65a036db61ac1bae214
|
Lib/typing.py
|
python
|
_subs_tree
|
(cls, tvars=None, args=None)
|
return tree_args
|
Calculate substitution tree for generic cls after
replacing its type parameters with substitutions in tvars -> args (if any).
Repeat the same cyclicaly following __origin__'s.
|
Calculate substitution tree for generic cls after
replacing its type parameters with substitutions in tvars -> args (if any).
Repeat the same cyclicaly following __origin__'s.
|
[
"Calculate",
"substitution",
"tree",
"for",
"generic",
"cls",
"after",
"replacing",
"its",
"type",
"parameters",
"with",
"substitutions",
"in",
"tvars",
"-",
">",
"args",
"(",
"if",
"any",
")",
".",
"Repeat",
"the",
"same",
"cyclicaly",
"following",
"__origin__",
"s",
"."
] |
def _subs_tree(cls, tvars=None, args=None):
""" Calculate substitution tree for generic cls after
replacing its type parameters with substitutions in tvars -> args (if any).
Repeat the same cyclicaly following __origin__'s.
"""
if cls.__origin__ is None:
return cls
# Make of chain of origins (i.e. cls -> cls.__origin__)
current = cls.__origin__
orig_chain = []
while current.__origin__ is not None:
orig_chain.append(current)
current = current.__origin__
# Replace type variables in __args__ if asked ...
tree_args = []
for arg in cls.__args__:
tree_args.append(_replace_arg(arg, tvars, args))
# ... then continue replacing down the origin chain.
for ocls in orig_chain:
new_tree_args = []
for i, arg in enumerate(ocls.__args__):
new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
tree_args = new_tree_args
return tree_args
|
[
"def",
"_subs_tree",
"(",
"cls",
",",
"tvars",
"=",
"None",
",",
"args",
"=",
"None",
")",
":",
"if",
"cls",
".",
"__origin__",
"is",
"None",
":",
"return",
"cls",
"# Make of chain of origins (i.e. cls -> cls.__origin__)",
"current",
"=",
"cls",
".",
"__origin__",
"orig_chain",
"=",
"[",
"]",
"while",
"current",
".",
"__origin__",
"is",
"not",
"None",
":",
"orig_chain",
".",
"append",
"(",
"current",
")",
"current",
"=",
"current",
".",
"__origin__",
"# Replace type variables in __args__ if asked ...",
"tree_args",
"=",
"[",
"]",
"for",
"arg",
"in",
"cls",
".",
"__args__",
":",
"tree_args",
".",
"append",
"(",
"_replace_arg",
"(",
"arg",
",",
"tvars",
",",
"args",
")",
")",
"# ... then continue replacing down the origin chain.",
"for",
"ocls",
"in",
"orig_chain",
":",
"new_tree_args",
"=",
"[",
"]",
"for",
"i",
",",
"arg",
"in",
"enumerate",
"(",
"ocls",
".",
"__args__",
")",
":",
"new_tree_args",
".",
"append",
"(",
"_replace_arg",
"(",
"arg",
",",
"ocls",
".",
"__parameters__",
",",
"tree_args",
")",
")",
"tree_args",
"=",
"new_tree_args",
"return",
"tree_args"
] |
https://github.com/naftaliharris/tauthon/blob/5587ceec329b75f7caf6d65a036db61ac1bae214/Lib/typing.py#L582-L606
|
|
NVIDIA/OpenSeq2Seq
|
8681d381ed404fde516e2c1b823de5a213c59aba
|
open_seq2seq/parts/rnns/attention_wrapper.py
|
python
|
AttentionWrapper.__init__
|
(
self,
cell,
attention_mechanism,
attention_layer_size=None,
alignment_history=False,
cell_input_fn=None,
output_attention=True,
initial_cell_state=None,
name=None
)
|
Construct the `AttentionWrapper`.
**NOTE** If you are using the `BeamSearchDecoder` with a cell wrapped in
`AttentionWrapper`, then you must ensure that:
- The encoder output has been tiled to `beam_width` via
@{tf.contrib.seq2seq.tile_batch} (NOT `tf.tile`).
- The `batch_size` argument passed to the `zero_state` method of this
wrapper is equal to `true_batch_size * beam_width`.
- The initial state created with `zero_state` above contains a
`cell_state` value containing properly tiled final state from the
encoder.
An example:
```
tiled_encoder_outputs = tf.contrib.seq2seq.tile_batch(
encoder_outputs, multiplier=beam_width)
tiled_encoder_final_state = tf.conrib.seq2seq.tile_batch(
encoder_final_state, multiplier=beam_width)
tiled_sequence_length = tf.contrib.seq2seq.tile_batch(
sequence_length, multiplier=beam_width)
attention_mechanism = MyFavoriteAttentionMechanism(
num_units=attention_depth,
memory=tiled_inputs,
memory_sequence_length=tiled_sequence_length)
attention_cell = AttentionWrapper(cell, attention_mechanism, ...)
decoder_initial_state = attention_cell.zero_state(
dtype, batch_size=true_batch_size * beam_width)
decoder_initial_state = decoder_initial_state.clone(
cell_state=tiled_encoder_final_state)
```
Args:
cell: An instance of `RNNCell`.
attention_mechanism: A list of `AttentionMechanism` instances or a single
instance.
attention_layer_size: A list of Python integers or a single Python
integer, the depth of the attention (output) layer(s). If None
(default), use the context as attention at each time step. Otherwise,
feed the context and cell output into the attention layer to generate
attention at each time step. If attention_mechanism is a list,
attention_layer_size must be a list of the same length.
alignment_history: Python boolean, whether to store alignment history
from all time steps in the final output state (currently stored as a
time major `TensorArray` on which you must call `stack()`).
cell_input_fn: (optional) A `callable`. The default is:
`lambda inputs, attention: array_ops.concat([inputs, attention], -1)`.
output_attention: bool or "both". If `True` (default), the output at each
time step is the attention value. This is the behavior of Luong-style
attention mechanisms. If `False`, the output at each time step is
the output of `cell`. This is the beahvior of Bhadanau-style
attention mechanisms. If "both", the attention value and cell output
are concatenated together and set as the output. In all cases, the
`attention` tensor is propagated to the next time step via the state and
is used there. This flag only controls whether the attention mechanism
is propagated up to the next cell in an RNN stack or to the top RNN
output.
initial_cell_state: The initial state value to use for the cell when
the user calls `zero_state()`. Note that if this value is provided
now, and the user uses a `batch_size` argument of `zero_state` which
does not match the batch size of `initial_cell_state`, proper
behavior is not guaranteed.
name: Name to use when creating ops.
Raises:
TypeError: `attention_layer_size` is not None and (`attention_mechanism`
is a list but `attention_layer_size` is not; or vice versa).
ValueError: if `attention_layer_size` is not None, `attention_mechanism`
is a list, and its length does not match that of `attention_layer_size`.
|
Construct the `AttentionWrapper`.
|
[
"Construct",
"the",
"AttentionWrapper",
"."
] |
def __init__(
self,
cell,
attention_mechanism,
attention_layer_size=None,
alignment_history=False,
cell_input_fn=None,
output_attention=True,
initial_cell_state=None,
name=None
):
"""Construct the `AttentionWrapper`.
**NOTE** If you are using the `BeamSearchDecoder` with a cell wrapped in
`AttentionWrapper`, then you must ensure that:
- The encoder output has been tiled to `beam_width` via
@{tf.contrib.seq2seq.tile_batch} (NOT `tf.tile`).
- The `batch_size` argument passed to the `zero_state` method of this
wrapper is equal to `true_batch_size * beam_width`.
- The initial state created with `zero_state` above contains a
`cell_state` value containing properly tiled final state from the
encoder.
An example:
```
tiled_encoder_outputs = tf.contrib.seq2seq.tile_batch(
encoder_outputs, multiplier=beam_width)
tiled_encoder_final_state = tf.conrib.seq2seq.tile_batch(
encoder_final_state, multiplier=beam_width)
tiled_sequence_length = tf.contrib.seq2seq.tile_batch(
sequence_length, multiplier=beam_width)
attention_mechanism = MyFavoriteAttentionMechanism(
num_units=attention_depth,
memory=tiled_inputs,
memory_sequence_length=tiled_sequence_length)
attention_cell = AttentionWrapper(cell, attention_mechanism, ...)
decoder_initial_state = attention_cell.zero_state(
dtype, batch_size=true_batch_size * beam_width)
decoder_initial_state = decoder_initial_state.clone(
cell_state=tiled_encoder_final_state)
```
Args:
cell: An instance of `RNNCell`.
attention_mechanism: A list of `AttentionMechanism` instances or a single
instance.
attention_layer_size: A list of Python integers or a single Python
integer, the depth of the attention (output) layer(s). If None
(default), use the context as attention at each time step. Otherwise,
feed the context and cell output into the attention layer to generate
attention at each time step. If attention_mechanism is a list,
attention_layer_size must be a list of the same length.
alignment_history: Python boolean, whether to store alignment history
from all time steps in the final output state (currently stored as a
time major `TensorArray` on which you must call `stack()`).
cell_input_fn: (optional) A `callable`. The default is:
`lambda inputs, attention: array_ops.concat([inputs, attention], -1)`.
output_attention: bool or "both". If `True` (default), the output at each
time step is the attention value. This is the behavior of Luong-style
attention mechanisms. If `False`, the output at each time step is
the output of `cell`. This is the beahvior of Bhadanau-style
attention mechanisms. If "both", the attention value and cell output
are concatenated together and set as the output. In all cases, the
`attention` tensor is propagated to the next time step via the state and
is used there. This flag only controls whether the attention mechanism
is propagated up to the next cell in an RNN stack or to the top RNN
output.
initial_cell_state: The initial state value to use for the cell when
the user calls `zero_state()`. Note that if this value is provided
now, and the user uses a `batch_size` argument of `zero_state` which
does not match the batch size of `initial_cell_state`, proper
behavior is not guaranteed.
name: Name to use when creating ops.
Raises:
TypeError: `attention_layer_size` is not None and (`attention_mechanism`
is a list but `attention_layer_size` is not; or vice versa).
ValueError: if `attention_layer_size` is not None, `attention_mechanism`
is a list, and its length does not match that of `attention_layer_size`.
"""
super(AttentionWrapper, self).__init__(name=name)
rnn_cell_impl.assert_like_rnncell("cell", cell)
if isinstance(attention_mechanism, (list, tuple)):
self._is_multi = True
attention_mechanisms = attention_mechanism
for attention_mechanism in attention_mechanisms:
if not isinstance(attention_mechanism, AttentionMechanism):
raise TypeError(
"attention_mechanism must contain only instances of "
"AttentionMechanism, saw type: %s" %
type(attention_mechanism).__name__
)
else:
self._is_multi = False
if not isinstance(attention_mechanism, AttentionMechanism):
raise TypeError(
"attention_mechanism must be an AttentionMechanism or list of "
"multiple AttentionMechanism instances, saw type: %s" %
type(attention_mechanism).__name__
)
attention_mechanisms = (attention_mechanism,)
if cell_input_fn is None:
cell_input_fn = (
lambda inputs, attention: array_ops.concat([inputs, attention], -1)
)
else:
if not callable(cell_input_fn):
raise TypeError(
"cell_input_fn must be callable, saw type: %s" %
type(cell_input_fn).__name__
)
if attention_layer_size is not None:
attention_layer_sizes = tuple(
attention_layer_size
if isinstance(attention_layer_size, (list, tuple
)) else (attention_layer_size,)
)
if len(attention_layer_sizes) != len(attention_mechanisms):
raise ValueError(
"If provided, attention_layer_size must contain exactly one "
"integer per attention_mechanism, saw: %d vs %d" %
(len(attention_layer_sizes), len(attention_mechanisms))
)
self._attention_layers = tuple(
layers_core.Dense(
attention_layer_size,
name="attention_layer",
use_bias=False,
dtype=attention_mechanisms[i].dtype
) for i, attention_layer_size in enumerate(attention_layer_sizes)
)
self._attention_layer_size = sum(attention_layer_sizes)
else:
self._attention_layers = None
self._attention_layer_size = sum(
attention_mechanism.values.get_shape()[-1].value
for attention_mechanism in attention_mechanisms
)
self._cell = cell
self._attention_mechanisms = attention_mechanisms
self._cell_input_fn = cell_input_fn
self._output_attention = output_attention
self._alignment_history = alignment_history
with ops.name_scope(name, "AttentionWrapperInit"):
if initial_cell_state is None:
self._initial_cell_state = None
else:
final_state_tensor = nest.flatten(initial_cell_state)[-1]
state_batch_size = (
final_state_tensor.shape[0].value or
array_ops.shape(final_state_tensor)[0]
)
error_message = (
"When constructing AttentionWrapper %s: " % self._base_name +
"Non-matching batch sizes between the memory "
"(encoder output) and initial_cell_state. Are you using "
"the BeamSearchDecoder? You may need to tile your initial state "
"via the tf.contrib.seq2seq.tile_batch function with argument "
"multiple=beam_width."
)
with ops.control_dependencies(
self._batch_size_checks(state_batch_size, error_message)
):
self._initial_cell_state = nest.map_structure(
lambda s: array_ops.identity(s, name="check_initial_cell_state"),
initial_cell_state
)
|
[
"def",
"__init__",
"(",
"self",
",",
"cell",
",",
"attention_mechanism",
",",
"attention_layer_size",
"=",
"None",
",",
"alignment_history",
"=",
"False",
",",
"cell_input_fn",
"=",
"None",
",",
"output_attention",
"=",
"True",
",",
"initial_cell_state",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"super",
"(",
"AttentionWrapper",
",",
"self",
")",
".",
"__init__",
"(",
"name",
"=",
"name",
")",
"rnn_cell_impl",
".",
"assert_like_rnncell",
"(",
"\"cell\"",
",",
"cell",
")",
"if",
"isinstance",
"(",
"attention_mechanism",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"self",
".",
"_is_multi",
"=",
"True",
"attention_mechanisms",
"=",
"attention_mechanism",
"for",
"attention_mechanism",
"in",
"attention_mechanisms",
":",
"if",
"not",
"isinstance",
"(",
"attention_mechanism",
",",
"AttentionMechanism",
")",
":",
"raise",
"TypeError",
"(",
"\"attention_mechanism must contain only instances of \"",
"\"AttentionMechanism, saw type: %s\"",
"%",
"type",
"(",
"attention_mechanism",
")",
".",
"__name__",
")",
"else",
":",
"self",
".",
"_is_multi",
"=",
"False",
"if",
"not",
"isinstance",
"(",
"attention_mechanism",
",",
"AttentionMechanism",
")",
":",
"raise",
"TypeError",
"(",
"\"attention_mechanism must be an AttentionMechanism or list of \"",
"\"multiple AttentionMechanism instances, saw type: %s\"",
"%",
"type",
"(",
"attention_mechanism",
")",
".",
"__name__",
")",
"attention_mechanisms",
"=",
"(",
"attention_mechanism",
",",
")",
"if",
"cell_input_fn",
"is",
"None",
":",
"cell_input_fn",
"=",
"(",
"lambda",
"inputs",
",",
"attention",
":",
"array_ops",
".",
"concat",
"(",
"[",
"inputs",
",",
"attention",
"]",
",",
"-",
"1",
")",
")",
"else",
":",
"if",
"not",
"callable",
"(",
"cell_input_fn",
")",
":",
"raise",
"TypeError",
"(",
"\"cell_input_fn must be callable, saw type: %s\"",
"%",
"type",
"(",
"cell_input_fn",
")",
".",
"__name__",
")",
"if",
"attention_layer_size",
"is",
"not",
"None",
":",
"attention_layer_sizes",
"=",
"tuple",
"(",
"attention_layer_size",
"if",
"isinstance",
"(",
"attention_layer_size",
",",
"(",
"list",
",",
"tuple",
")",
")",
"else",
"(",
"attention_layer_size",
",",
")",
")",
"if",
"len",
"(",
"attention_layer_sizes",
")",
"!=",
"len",
"(",
"attention_mechanisms",
")",
":",
"raise",
"ValueError",
"(",
"\"If provided, attention_layer_size must contain exactly one \"",
"\"integer per attention_mechanism, saw: %d vs %d\"",
"%",
"(",
"len",
"(",
"attention_layer_sizes",
")",
",",
"len",
"(",
"attention_mechanisms",
")",
")",
")",
"self",
".",
"_attention_layers",
"=",
"tuple",
"(",
"layers_core",
".",
"Dense",
"(",
"attention_layer_size",
",",
"name",
"=",
"\"attention_layer\"",
",",
"use_bias",
"=",
"False",
",",
"dtype",
"=",
"attention_mechanisms",
"[",
"i",
"]",
".",
"dtype",
")",
"for",
"i",
",",
"attention_layer_size",
"in",
"enumerate",
"(",
"attention_layer_sizes",
")",
")",
"self",
".",
"_attention_layer_size",
"=",
"sum",
"(",
"attention_layer_sizes",
")",
"else",
":",
"self",
".",
"_attention_layers",
"=",
"None",
"self",
".",
"_attention_layer_size",
"=",
"sum",
"(",
"attention_mechanism",
".",
"values",
".",
"get_shape",
"(",
")",
"[",
"-",
"1",
"]",
".",
"value",
"for",
"attention_mechanism",
"in",
"attention_mechanisms",
")",
"self",
".",
"_cell",
"=",
"cell",
"self",
".",
"_attention_mechanisms",
"=",
"attention_mechanisms",
"self",
".",
"_cell_input_fn",
"=",
"cell_input_fn",
"self",
".",
"_output_attention",
"=",
"output_attention",
"self",
".",
"_alignment_history",
"=",
"alignment_history",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"AttentionWrapperInit\"",
")",
":",
"if",
"initial_cell_state",
"is",
"None",
":",
"self",
".",
"_initial_cell_state",
"=",
"None",
"else",
":",
"final_state_tensor",
"=",
"nest",
".",
"flatten",
"(",
"initial_cell_state",
")",
"[",
"-",
"1",
"]",
"state_batch_size",
"=",
"(",
"final_state_tensor",
".",
"shape",
"[",
"0",
"]",
".",
"value",
"or",
"array_ops",
".",
"shape",
"(",
"final_state_tensor",
")",
"[",
"0",
"]",
")",
"error_message",
"=",
"(",
"\"When constructing AttentionWrapper %s: \"",
"%",
"self",
".",
"_base_name",
"+",
"\"Non-matching batch sizes between the memory \"",
"\"(encoder output) and initial_cell_state. Are you using \"",
"\"the BeamSearchDecoder? You may need to tile your initial state \"",
"\"via the tf.contrib.seq2seq.tile_batch function with argument \"",
"\"multiple=beam_width.\"",
")",
"with",
"ops",
".",
"control_dependencies",
"(",
"self",
".",
"_batch_size_checks",
"(",
"state_batch_size",
",",
"error_message",
")",
")",
":",
"self",
".",
"_initial_cell_state",
"=",
"nest",
".",
"map_structure",
"(",
"lambda",
"s",
":",
"array_ops",
".",
"identity",
"(",
"s",
",",
"name",
"=",
"\"check_initial_cell_state\"",
")",
",",
"initial_cell_state",
")"
] |
https://github.com/NVIDIA/OpenSeq2Seq/blob/8681d381ed404fde516e2c1b823de5a213c59aba/open_seq2seq/parts/rnns/attention_wrapper.py#L1422-L1593
|
||
jbjorne/TEES
|
caf19a4a1352ac59f5dc13a8684cc42ce4342d9d
|
ExampleBuilders/ExampleStats.py
|
python
|
ExampleStats.printStats
|
(self)
|
[] |
def printStats(self):
print >> sys.stderr, "Example Statistics (total/filtered)"
#print >> sys.stderr, self.examplesByClass.keys()
counts = [0,0]
for className in sorted(self.examplesByClass.keys()):
if self.filteredByClassByFilter.has_key(className):
filterStr = str( self.filteredByClassByFilter[className] )
else:
filterStr = ""
print >> sys.stderr, " ", className + ": " + str(self.examplesByClass[className]) + "/" + str(self.filteredByClass[className]), filterStr
if className != "neg":
counts[0] += self.examplesByClass[className]
counts[1] += self.filteredByClass[className]
if counts[0] != 0:
posCoverage = float(counts[0] - counts[1]) / float(counts[0]) * 100.0
print >> sys.stderr, "Positives Coverage %.2f" % posCoverage, "%", counts
# Print generic counts
for value in sorted(self.values.keys()):
print >> sys.stderr, value + ":", self.values[value]
for variable in sorted(self.variables.keys()):
print >> sys.stderr, variable + ":", self.variables[variable]
|
[
"def",
"printStats",
"(",
"self",
")",
":",
"print",
">>",
"sys",
".",
"stderr",
",",
"\"Example Statistics (total/filtered)\"",
"#print >> sys.stderr, self.examplesByClass.keys()",
"counts",
"=",
"[",
"0",
",",
"0",
"]",
"for",
"className",
"in",
"sorted",
"(",
"self",
".",
"examplesByClass",
".",
"keys",
"(",
")",
")",
":",
"if",
"self",
".",
"filteredByClassByFilter",
".",
"has_key",
"(",
"className",
")",
":",
"filterStr",
"=",
"str",
"(",
"self",
".",
"filteredByClassByFilter",
"[",
"className",
"]",
")",
"else",
":",
"filterStr",
"=",
"\"\"",
"print",
">>",
"sys",
".",
"stderr",
",",
"\" \"",
",",
"className",
"+",
"\": \"",
"+",
"str",
"(",
"self",
".",
"examplesByClass",
"[",
"className",
"]",
")",
"+",
"\"/\"",
"+",
"str",
"(",
"self",
".",
"filteredByClass",
"[",
"className",
"]",
")",
",",
"filterStr",
"if",
"className",
"!=",
"\"neg\"",
":",
"counts",
"[",
"0",
"]",
"+=",
"self",
".",
"examplesByClass",
"[",
"className",
"]",
"counts",
"[",
"1",
"]",
"+=",
"self",
".",
"filteredByClass",
"[",
"className",
"]",
"if",
"counts",
"[",
"0",
"]",
"!=",
"0",
":",
"posCoverage",
"=",
"float",
"(",
"counts",
"[",
"0",
"]",
"-",
"counts",
"[",
"1",
"]",
")",
"/",
"float",
"(",
"counts",
"[",
"0",
"]",
")",
"*",
"100.0",
"print",
">>",
"sys",
".",
"stderr",
",",
"\"Positives Coverage %.2f\"",
"%",
"posCoverage",
",",
"\"%\"",
",",
"counts",
"# Print generic counts",
"for",
"value",
"in",
"sorted",
"(",
"self",
".",
"values",
".",
"keys",
"(",
")",
")",
":",
"print",
">>",
"sys",
".",
"stderr",
",",
"value",
"+",
"\":\"",
",",
"self",
".",
"values",
"[",
"value",
"]",
"for",
"variable",
"in",
"sorted",
"(",
"self",
".",
"variables",
".",
"keys",
"(",
")",
")",
":",
"print",
">>",
"sys",
".",
"stderr",
",",
"variable",
"+",
"\":\"",
",",
"self",
".",
"variables",
"[",
"variable",
"]"
] |
https://github.com/jbjorne/TEES/blob/caf19a4a1352ac59f5dc13a8684cc42ce4342d9d/ExampleBuilders/ExampleStats.py#L64-L84
|
||||
makerbot/ReplicatorG
|
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
|
skein_engines/skeinforge-35/skeinforge_application/skeinforge_plugins/profile.py
|
python
|
ProfileMenuSaveListener.__init__
|
( self, menu, window )
|
Set the menu.
|
Set the menu.
|
[
"Set",
"the",
"menu",
"."
] |
def __init__( self, menu, window ):
"Set the menu."
self.menu = menu
addToProfileMenu( menu )
euclidean.addElementToListTableIfNotThere( self, window, settings.globalProfileSaveListenerListTable )
|
[
"def",
"__init__",
"(",
"self",
",",
"menu",
",",
"window",
")",
":",
"self",
".",
"menu",
"=",
"menu",
"addToProfileMenu",
"(",
"menu",
")",
"euclidean",
".",
"addElementToListTableIfNotThere",
"(",
"self",
",",
"window",
",",
"settings",
".",
"globalProfileSaveListenerListTable",
")"
] |
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-35/skeinforge_application/skeinforge_plugins/profile.py#L109-L113
|
||
spesmilo/electrum
|
bdbd59300fbd35b01605e66145458e5f396108e8
|
electrum/transaction.py
|
python
|
PartialTxInput.already_has_some_signatures
|
(self)
|
return (self.part_sigs
or self.script_sig is not None
or self.witness is not None)
|
Returns whether progress has been made towards completing this input.
|
Returns whether progress has been made towards completing this input.
|
[
"Returns",
"whether",
"progress",
"has",
"been",
"made",
"towards",
"completing",
"this",
"input",
"."
] |
def already_has_some_signatures(self) -> bool:
"""Returns whether progress has been made towards completing this input."""
return (self.part_sigs
or self.script_sig is not None
or self.witness is not None)
|
[
"def",
"already_has_some_signatures",
"(",
"self",
")",
"->",
"bool",
":",
"return",
"(",
"self",
".",
"part_sigs",
"or",
"self",
".",
"script_sig",
"is",
"not",
"None",
"or",
"self",
".",
"witness",
"is",
"not",
"None",
")"
] |
https://github.com/spesmilo/electrum/blob/bdbd59300fbd35b01605e66145458e5f396108e8/electrum/transaction.py#L1558-L1562
|
|
FrancoisSchnell/GPicSync
|
07d7c4b7da44e4e6665abb94bbb9ef6da0e779d1
|
src/gpicsync-GUI.py
|
python
|
GUI.geoWriterFrame
|
(self,evt)
|
Frame to manually write latitude/longitude in the EXIF header of the picture
|
Frame to manually write latitude/longitude in the EXIF header of the picture
|
[
"Frame",
"to",
"manually",
"write",
"latitude",
"/",
"longitude",
"in",
"the",
"EXIF",
"header",
"of",
"the",
"picture"
] |
def geoWriterFrame(self,evt):
""" Frame to manually write latitude/longitude in the EXIF header of the picture"""
self.winGeoFrame=wx.Frame(win,size=(300,300),title=("Manual latitude/longitude EXIF writer"))
bkg=wx.Panel(self.winGeoFrame)
instructionLabel = wx.StaticText(bkg, -1,("Enter coordinates in decimal degrees"))
latLabel = wx.StaticText(bkg, -1,("Latitude")+":")
self.latEntry=wx.TextCtrl(bkg,size=(100,-1))
self.latEntry.SetValue(str(self.defaultLat))
lonLabel = wx.StaticText(bkg, -1,("Longitude")+":")
self.lonEntry=wx.TextCtrl(bkg,size=(100,-1))
self.lonEntry.SetValue(str(self.defaultLon))
eleLabel = wx.StaticText(bkg, -1,("Eventual elevation (meters)")+":")
self.eleEntry=wx.TextCtrl(bkg,size=(100,-1))
selectButton=wx.Button(bkg,size=(-1,-1),label=("Select and write in picture(s)"))
self.Bind(wx.EVT_BUTTON, self.manualGeoWrite, selectButton)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(instructionLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox.Add(latLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.latEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(lonLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.lonEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(eleLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.eleEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(selectButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
bkg.SetSizer(vbox)
self.winGeoFrame.Show()
|
[
"def",
"geoWriterFrame",
"(",
"self",
",",
"evt",
")",
":",
"self",
".",
"winGeoFrame",
"=",
"wx",
".",
"Frame",
"(",
"win",
",",
"size",
"=",
"(",
"300",
",",
"300",
")",
",",
"title",
"=",
"(",
"\"Manual latitude/longitude EXIF writer\"",
")",
")",
"bkg",
"=",
"wx",
".",
"Panel",
"(",
"self",
".",
"winGeoFrame",
")",
"instructionLabel",
"=",
"wx",
".",
"StaticText",
"(",
"bkg",
",",
"-",
"1",
",",
"(",
"\"Enter coordinates in decimal degrees\"",
")",
")",
"latLabel",
"=",
"wx",
".",
"StaticText",
"(",
"bkg",
",",
"-",
"1",
",",
"(",
"\"Latitude\"",
")",
"+",
"\":\"",
")",
"self",
".",
"latEntry",
"=",
"wx",
".",
"TextCtrl",
"(",
"bkg",
",",
"size",
"=",
"(",
"100",
",",
"-",
"1",
")",
")",
"self",
".",
"latEntry",
".",
"SetValue",
"(",
"str",
"(",
"self",
".",
"defaultLat",
")",
")",
"lonLabel",
"=",
"wx",
".",
"StaticText",
"(",
"bkg",
",",
"-",
"1",
",",
"(",
"\"Longitude\"",
")",
"+",
"\":\"",
")",
"self",
".",
"lonEntry",
"=",
"wx",
".",
"TextCtrl",
"(",
"bkg",
",",
"size",
"=",
"(",
"100",
",",
"-",
"1",
")",
")",
"self",
".",
"lonEntry",
".",
"SetValue",
"(",
"str",
"(",
"self",
".",
"defaultLon",
")",
")",
"eleLabel",
"=",
"wx",
".",
"StaticText",
"(",
"bkg",
",",
"-",
"1",
",",
"(",
"\"Eventual elevation (meters)\"",
")",
"+",
"\":\"",
")",
"self",
".",
"eleEntry",
"=",
"wx",
".",
"TextCtrl",
"(",
"bkg",
",",
"size",
"=",
"(",
"100",
",",
"-",
"1",
")",
")",
"selectButton",
"=",
"wx",
".",
"Button",
"(",
"bkg",
",",
"size",
"=",
"(",
"-",
"1",
",",
"-",
"1",
")",
",",
"label",
"=",
"(",
"\"Select and write in picture(s)\"",
")",
")",
"self",
".",
"Bind",
"(",
"wx",
".",
"EVT_BUTTON",
",",
"self",
".",
"manualGeoWrite",
",",
"selectButton",
")",
"vbox",
"=",
"wx",
".",
"BoxSizer",
"(",
"wx",
".",
"VERTICAL",
")",
"vbox",
".",
"Add",
"(",
"instructionLabel",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
"|",
"wx",
".",
"ALL",
",",
"border",
"=",
"20",
")",
"vbox",
".",
"Add",
"(",
"latLabel",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
"|",
"wx",
".",
"ALL",
",",
"border",
"=",
"5",
")",
"vbox",
".",
"Add",
"(",
"self",
".",
"latEntry",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
",",
"border",
"=",
"5",
")",
"vbox",
".",
"Add",
"(",
"lonLabel",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
"|",
"wx",
".",
"ALL",
",",
"border",
"=",
"5",
")",
"vbox",
".",
"Add",
"(",
"self",
".",
"lonEntry",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
",",
"border",
"=",
"5",
")",
"vbox",
".",
"Add",
"(",
"eleLabel",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
"|",
"wx",
".",
"ALL",
",",
"border",
"=",
"5",
")",
"vbox",
".",
"Add",
"(",
"self",
".",
"eleEntry",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
",",
"border",
"=",
"5",
")",
"vbox",
".",
"Add",
"(",
"selectButton",
",",
"proportion",
"=",
"0",
",",
"flag",
"=",
"wx",
".",
"ALIGN_CENTER",
"|",
"wx",
".",
"ALL",
",",
"border",
"=",
"20",
")",
"bkg",
".",
"SetSizer",
"(",
"vbox",
")",
"self",
".",
"winGeoFrame",
".",
"Show",
"(",
")"
] |
https://github.com/FrancoisSchnell/GPicSync/blob/07d7c4b7da44e4e6665abb94bbb9ef6da0e779d1/src/gpicsync-GUI.py#L649-L674
|
||
haiwen/seahub
|
e92fcd44e3e46260597d8faa9347cb8222b8b10d
|
seahub/utils/__init__.py
|
python
|
get_password_strength_level
|
(password)
|
return calculate_bitwise(num)
|
[] |
def get_password_strength_level(password):
num = 0
for letter in password:
# get ascii dec
# bitwise OR
num |= get_char_mode(ord(letter))
return calculate_bitwise(num)
|
[
"def",
"get_password_strength_level",
"(",
"password",
")",
":",
"num",
"=",
"0",
"for",
"letter",
"in",
"password",
":",
"# get ascii dec",
"# bitwise OR",
"num",
"|=",
"get_char_mode",
"(",
"ord",
"(",
"letter",
")",
")",
"return",
"calculate_bitwise",
"(",
"num",
")"
] |
https://github.com/haiwen/seahub/blob/e92fcd44e3e46260597d8faa9347cb8222b8b10d/seahub/utils/__init__.py#L1195-L1203
|
|||
landlab/landlab
|
a5dd80b8ebfd03d1ba87ef6c4368c409485f222c
|
landlab/components/species_evolution/record.py
|
python
|
Record.latest_time
|
(self)
|
return max(self.times)
|
The latest time in the record.
|
The latest time in the record.
|
[
"The",
"latest",
"time",
"in",
"the",
"record",
"."
] |
def latest_time(self):
"""The latest time in the record."""
return max(self.times)
|
[
"def",
"latest_time",
"(",
"self",
")",
":",
"return",
"max",
"(",
"self",
".",
"times",
")"
] |
https://github.com/landlab/landlab/blob/a5dd80b8ebfd03d1ba87ef6c4368c409485f222c/landlab/components/species_evolution/record.py#L62-L64
|
|
mgear-dev/mgear
|
06ddc26c5adb5eab07ca470c7fafa77404c8a1de
|
scripts/mgear/maya/shifter/component/hydraulic_01/__init__.py
|
python
|
Component.addOperators
|
(self)
|
Create operators and set the relations for the component rig
Apply operators, constraints, expressions to the hierarchy.
In order to keep the code clean and easier to debug,
we shouldn't create any new object in this method.
|
Create operators and set the relations for the component rig
|
[
"Create",
"operators",
"and",
"set",
"the",
"relations",
"for",
"the",
"component",
"rig"
] |
def addOperators(self):
"""Create operators and set the relations for the component rig
Apply operators, constraints, expressions to the hierarchy.
In order to keep the code clean and easier to debug,
we shouldn't create any new object in this method.
"""
applyop.aimCns(self.ref_base,
self.tip_ctl,
axis="yx",
wupType=2,
wupVector=[1, 0, 0],
wupObject=self.ctl,
maintainOffset=False)
applyop.aimCns(self.ref_tip,
self.ctl,
axis="-yx",
wupType=2,
wupVector=[1, 0, 0],
wupObject=self.tip_ctl,
maintainOffset=False)
bIncrement = 1.0 / (self.settings["div"] - 1)
blend = 0
for i, div_cns in enumerate(self.div_cns):
intMatrix = applyop.gear_intmatrix_op(
self.ref_base.attr("worldMatrix"),
self.ref_tip.attr("worldMatrix"),
blend)
applyop.gear_mulmatrix_op(intMatrix.attr("output"),
div_cns.attr("parentInverseMatrix[0]"),
div_cns)
blend = blend + bIncrement
|
[
"def",
"addOperators",
"(",
"self",
")",
":",
"applyop",
".",
"aimCns",
"(",
"self",
".",
"ref_base",
",",
"self",
".",
"tip_ctl",
",",
"axis",
"=",
"\"yx\"",
",",
"wupType",
"=",
"2",
",",
"wupVector",
"=",
"[",
"1",
",",
"0",
",",
"0",
"]",
",",
"wupObject",
"=",
"self",
".",
"ctl",
",",
"maintainOffset",
"=",
"False",
")",
"applyop",
".",
"aimCns",
"(",
"self",
".",
"ref_tip",
",",
"self",
".",
"ctl",
",",
"axis",
"=",
"\"-yx\"",
",",
"wupType",
"=",
"2",
",",
"wupVector",
"=",
"[",
"1",
",",
"0",
",",
"0",
"]",
",",
"wupObject",
"=",
"self",
".",
"tip_ctl",
",",
"maintainOffset",
"=",
"False",
")",
"bIncrement",
"=",
"1.0",
"/",
"(",
"self",
".",
"settings",
"[",
"\"div\"",
"]",
"-",
"1",
")",
"blend",
"=",
"0",
"for",
"i",
",",
"div_cns",
"in",
"enumerate",
"(",
"self",
".",
"div_cns",
")",
":",
"intMatrix",
"=",
"applyop",
".",
"gear_intmatrix_op",
"(",
"self",
".",
"ref_base",
".",
"attr",
"(",
"\"worldMatrix\"",
")",
",",
"self",
".",
"ref_tip",
".",
"attr",
"(",
"\"worldMatrix\"",
")",
",",
"blend",
")",
"applyop",
".",
"gear_mulmatrix_op",
"(",
"intMatrix",
".",
"attr",
"(",
"\"output\"",
")",
",",
"div_cns",
".",
"attr",
"(",
"\"parentInverseMatrix[0]\"",
")",
",",
"div_cns",
")",
"blend",
"=",
"blend",
"+",
"bIncrement"
] |
https://github.com/mgear-dev/mgear/blob/06ddc26c5adb5eab07ca470c7fafa77404c8a1de/scripts/mgear/maya/shifter/component/hydraulic_01/__init__.py#L93-L128
|
||
openstack/ironic
|
b392dc19bcd29cef5a69ec00d2f18a7a19a679e5
|
ironic/api/controllers/v1/port.py
|
python
|
PortsController._check_allowed_port_fields
|
(self, fields)
|
Check if fetching a particular field of a port is allowed.
Check if the required version is being requested for fields
that are only allowed to be fetched in a particular API version.
:param fields: list or set of fields to check
:raises: NotAcceptable if a field is not allowed
|
Check if fetching a particular field of a port is allowed.
|
[
"Check",
"if",
"fetching",
"a",
"particular",
"field",
"of",
"a",
"port",
"is",
"allowed",
"."
] |
def _check_allowed_port_fields(self, fields):
"""Check if fetching a particular field of a port is allowed.
Check if the required version is being requested for fields
that are only allowed to be fetched in a particular API version.
:param fields: list or set of fields to check
:raises: NotAcceptable if a field is not allowed
"""
if fields is None:
return
if (not api_utils.allow_port_advanced_net_fields()
and set(fields).intersection(self.advanced_net_fields)):
raise exception.NotAcceptable()
if ('portgroup_uuid' in fields
and not api_utils.allow_portgroups_subcontrollers()):
raise exception.NotAcceptable()
if ('physical_network' in fields
and not api_utils.allow_port_physical_network()):
raise exception.NotAcceptable()
if ('is_smartnic' in fields
and not api_utils.allow_port_is_smartnic()):
raise exception.NotAcceptable()
if ('local_link_connection/network_type' in fields
and not api_utils.allow_local_link_connection_network_type()):
raise exception.NotAcceptable()
if (isinstance(fields, dict)
and fields.get('local_link_connection') is not None):
if (not api_utils.allow_local_link_connection_network_type()
and 'network_type' in fields['local_link_connection']):
raise exception.NotAcceptable()
|
[
"def",
"_check_allowed_port_fields",
"(",
"self",
",",
"fields",
")",
":",
"if",
"fields",
"is",
"None",
":",
"return",
"if",
"(",
"not",
"api_utils",
".",
"allow_port_advanced_net_fields",
"(",
")",
"and",
"set",
"(",
"fields",
")",
".",
"intersection",
"(",
"self",
".",
"advanced_net_fields",
")",
")",
":",
"raise",
"exception",
".",
"NotAcceptable",
"(",
")",
"if",
"(",
"'portgroup_uuid'",
"in",
"fields",
"and",
"not",
"api_utils",
".",
"allow_portgroups_subcontrollers",
"(",
")",
")",
":",
"raise",
"exception",
".",
"NotAcceptable",
"(",
")",
"if",
"(",
"'physical_network'",
"in",
"fields",
"and",
"not",
"api_utils",
".",
"allow_port_physical_network",
"(",
")",
")",
":",
"raise",
"exception",
".",
"NotAcceptable",
"(",
")",
"if",
"(",
"'is_smartnic'",
"in",
"fields",
"and",
"not",
"api_utils",
".",
"allow_port_is_smartnic",
"(",
")",
")",
":",
"raise",
"exception",
".",
"NotAcceptable",
"(",
")",
"if",
"(",
"'local_link_connection/network_type'",
"in",
"fields",
"and",
"not",
"api_utils",
".",
"allow_local_link_connection_network_type",
"(",
")",
")",
":",
"raise",
"exception",
".",
"NotAcceptable",
"(",
")",
"if",
"(",
"isinstance",
"(",
"fields",
",",
"dict",
")",
"and",
"fields",
".",
"get",
"(",
"'local_link_connection'",
")",
"is",
"not",
"None",
")",
":",
"if",
"(",
"not",
"api_utils",
".",
"allow_local_link_connection_network_type",
"(",
")",
"and",
"'network_type'",
"in",
"fields",
"[",
"'local_link_connection'",
"]",
")",
":",
"raise",
"exception",
".",
"NotAcceptable",
"(",
")"
] |
https://github.com/openstack/ironic/blob/b392dc19bcd29cef5a69ec00d2f18a7a19a679e5/ironic/api/controllers/v1/port.py#L316-L346
|
||
fastavro/fastavro
|
dc1179d6d0e63c1d6e7cbeb5e0886bf70672745f
|
fastavro/_logical_writers_py.py
|
python
|
prepare_time_millis
|
(data, schema)
|
Convert datetime.time to int timestamp with milliseconds
|
Convert datetime.time to int timestamp with milliseconds
|
[
"Convert",
"datetime",
".",
"time",
"to",
"int",
"timestamp",
"with",
"milliseconds"
] |
def prepare_time_millis(data, schema):
"""Convert datetime.time to int timestamp with milliseconds"""
if isinstance(data, datetime.time):
return int(
data.hour * MLS_PER_HOUR
+ data.minute * MLS_PER_MINUTE
+ data.second * MLS_PER_SECOND
+ int(data.microsecond / 1000)
)
else:
return data
|
[
"def",
"prepare_time_millis",
"(",
"data",
",",
"schema",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"datetime",
".",
"time",
")",
":",
"return",
"int",
"(",
"data",
".",
"hour",
"*",
"MLS_PER_HOUR",
"+",
"data",
".",
"minute",
"*",
"MLS_PER_MINUTE",
"+",
"data",
".",
"second",
"*",
"MLS_PER_SECOND",
"+",
"int",
"(",
"data",
".",
"microsecond",
"/",
"1000",
")",
")",
"else",
":",
"return",
"data"
] |
https://github.com/fastavro/fastavro/blob/dc1179d6d0e63c1d6e7cbeb5e0886bf70672745f/fastavro/_logical_writers_py.py#L224-L234
|
||
BillBillBillBill/Tickeys-linux
|
2df31b8665004c58a5d4ab05277f245267d96364
|
tickeys/kivy_32/kivy/base.py
|
python
|
ExceptionManagerBase.add_handler
|
(self, cls)
|
Add a new exception handler to the stack.
|
Add a new exception handler to the stack.
|
[
"Add",
"a",
"new",
"exception",
"handler",
"to",
"the",
"stack",
"."
] |
def add_handler(self, cls):
'''Add a new exception handler to the stack.'''
if not cls in self.handlers:
self.handlers.append(cls)
|
[
"def",
"add_handler",
"(",
"self",
",",
"cls",
")",
":",
"if",
"not",
"cls",
"in",
"self",
".",
"handlers",
":",
"self",
".",
"handlers",
".",
"append",
"(",
"cls",
")"
] |
https://github.com/BillBillBillBill/Tickeys-linux/blob/2df31b8665004c58a5d4ab05277f245267d96364/tickeys/kivy_32/kivy/base.py#L72-L75
|
||
PaddlePaddle/X2Paddle
|
b492545f61446af69e5d5d6288bc3a43a9a3931e
|
x2paddle/project_convertor/pytorch/models/resnet.py
|
python
|
resnet152
|
(pretrained: bool=False, progress: bool=True,
**kwargs: Any)
|
return _resnet('resnet152', Bottleneck, [3, 8, 36, 3], pretrained, **kwargs)
|
r"""ResNet-152 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
|
r"""ResNet-152 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
|
[
"r",
"ResNet",
"-",
"152",
"model",
"from",
"Deep",
"Residual",
"Learning",
"for",
"Image",
"Recognition",
"<https",
":",
"//",
"arxiv",
".",
"org",
"/",
"pdf",
"/",
"1512",
".",
"03385",
".",
"pdf",
">",
"_",
"."
] |
def resnet152(pretrained: bool=False, progress: bool=True,
**kwargs: Any) -> ResNet:
r"""ResNet-152 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
"""
return _resnet('resnet152', Bottleneck, [3, 8, 36, 3], pretrained, **kwargs)
|
[
"def",
"resnet152",
"(",
"pretrained",
":",
"bool",
"=",
"False",
",",
"progress",
":",
"bool",
"=",
"True",
",",
"*",
"*",
"kwargs",
":",
"Any",
")",
"->",
"ResNet",
":",
"return",
"_resnet",
"(",
"'resnet152'",
",",
"Bottleneck",
",",
"[",
"3",
",",
"8",
",",
"36",
",",
"3",
"]",
",",
"pretrained",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/PaddlePaddle/X2Paddle/blob/b492545f61446af69e5d5d6288bc3a43a9a3931e/x2paddle/project_convertor/pytorch/models/resnet.py#L358-L367
|
|
holzschu/Carnets
|
44effb10ddfc6aa5c8b0687582a724ba82c6b547
|
Library/lib/python3.7/site-packages/astropy-4.0-py3.7-macosx-10.9-x86_64.egg/astropy/table/column.py
|
python
|
col_copy
|
(col, copy_indices=True)
|
return newcol
|
Mixin-safe version of Column.copy() (with copy_data=True).
Parameters
----------
col : Column or mixin column
Input column
copy_indices : bool
Copy the column ``indices`` attribute
Returns
-------
col : Copy of input column
|
Mixin-safe version of Column.copy() (with copy_data=True).
|
[
"Mixin",
"-",
"safe",
"version",
"of",
"Column",
".",
"copy",
"()",
"(",
"with",
"copy_data",
"=",
"True",
")",
"."
] |
def col_copy(col, copy_indices=True):
"""
Mixin-safe version of Column.copy() (with copy_data=True).
Parameters
----------
col : Column or mixin column
Input column
copy_indices : bool
Copy the column ``indices`` attribute
Returns
-------
col : Copy of input column
"""
if isinstance(col, BaseColumn):
return col.copy()
# The new column should have None for the parent_table ref. If the
# original parent_table weakref there at the point of copying then it
# generates an infinite recursion. Instead temporarily remove the weakref
# on the original column and restore after the copy in an exception-safe
# manner.
parent_table = col.info.parent_table
indices = col.info.indices
col.info.parent_table = None
col.info.indices = []
try:
newcol = col.copy() if hasattr(col, 'copy') else deepcopy(col)
newcol.info = col.info
newcol.info.indices = deepcopy(indices or []) if copy_indices else []
for index in newcol.info.indices:
index.replace_col(col, newcol)
finally:
col.info.parent_table = parent_table
col.info.indices = indices
return newcol
|
[
"def",
"col_copy",
"(",
"col",
",",
"copy_indices",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"col",
",",
"BaseColumn",
")",
":",
"return",
"col",
".",
"copy",
"(",
")",
"# The new column should have None for the parent_table ref. If the",
"# original parent_table weakref there at the point of copying then it",
"# generates an infinite recursion. Instead temporarily remove the weakref",
"# on the original column and restore after the copy in an exception-safe",
"# manner.",
"parent_table",
"=",
"col",
".",
"info",
".",
"parent_table",
"indices",
"=",
"col",
".",
"info",
".",
"indices",
"col",
".",
"info",
".",
"parent_table",
"=",
"None",
"col",
".",
"info",
".",
"indices",
"=",
"[",
"]",
"try",
":",
"newcol",
"=",
"col",
".",
"copy",
"(",
")",
"if",
"hasattr",
"(",
"col",
",",
"'copy'",
")",
"else",
"deepcopy",
"(",
"col",
")",
"newcol",
".",
"info",
"=",
"col",
".",
"info",
"newcol",
".",
"info",
".",
"indices",
"=",
"deepcopy",
"(",
"indices",
"or",
"[",
"]",
")",
"if",
"copy_indices",
"else",
"[",
"]",
"for",
"index",
"in",
"newcol",
".",
"info",
".",
"indices",
":",
"index",
".",
"replace_col",
"(",
"col",
",",
"newcol",
")",
"finally",
":",
"col",
".",
"info",
".",
"parent_table",
"=",
"parent_table",
"col",
".",
"info",
".",
"indices",
"=",
"indices",
"return",
"newcol"
] |
https://github.com/holzschu/Carnets/blob/44effb10ddfc6aa5c8b0687582a724ba82c6b547/Library/lib/python3.7/site-packages/astropy-4.0-py3.7-macosx-10.9-x86_64.egg/astropy/table/column.py#L59-L98
|
|
opendevops-cn/codo-admin
|
7328acab38e71332136cc6684918f534d8e30948
|
mg/handlers/users_handler.py
|
python
|
UserHandler.put
|
(self, *args, **kwargs)
|
[] |
def put(self, *args, **kwargs):
data = json.loads(self.request.body.decode("utf-8"))
key = data.get('key', None)
value = data.get('value', None)
user_id = data.get('user_id', None)
if not key or not value or not user_id:
return self.write(dict(code=-1, msg='不能为空'))
try:
with DBContext('w', None, True) as session:
session.query(Users).filter(Users.user_id == user_id).update({key: value})
except Exception as e:
return self.write(dict(code=-2, msg='修改失败,请检查数据是否合法或者重复'))
self.write(dict(code=0, msg='编辑成功'))
|
[
"def",
"put",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"json",
".",
"loads",
"(",
"self",
".",
"request",
".",
"body",
".",
"decode",
"(",
"\"utf-8\"",
")",
")",
"key",
"=",
"data",
".",
"get",
"(",
"'key'",
",",
"None",
")",
"value",
"=",
"data",
".",
"get",
"(",
"'value'",
",",
"None",
")",
"user_id",
"=",
"data",
".",
"get",
"(",
"'user_id'",
",",
"None",
")",
"if",
"not",
"key",
"or",
"not",
"value",
"or",
"not",
"user_id",
":",
"return",
"self",
".",
"write",
"(",
"dict",
"(",
"code",
"=",
"-",
"1",
",",
"msg",
"=",
"'不能为空'))",
"",
"",
"try",
":",
"with",
"DBContext",
"(",
"'w'",
",",
"None",
",",
"True",
")",
"as",
"session",
":",
"session",
".",
"query",
"(",
"Users",
")",
".",
"filter",
"(",
"Users",
".",
"user_id",
"==",
"user_id",
")",
".",
"update",
"(",
"{",
"key",
":",
"value",
"}",
")",
"except",
"Exception",
"as",
"e",
":",
"return",
"self",
".",
"write",
"(",
"dict",
"(",
"code",
"=",
"-",
"2",
",",
"msg",
"=",
"'修改失败,请检查数据是否合法或者重复'))",
"",
"",
"self",
".",
"write",
"(",
"dict",
"(",
"code",
"=",
"0",
",",
"msg",
"=",
"'编辑成功'))",
"",
""
] |
https://github.com/opendevops-cn/codo-admin/blob/7328acab38e71332136cc6684918f534d8e30948/mg/handlers/users_handler.py#L132-L147
|
||||
PyHDI/veriloggen
|
2382d200deabf59cfcfd741f5eba371010aaf2bb
|
veriloggen/types/axi.py
|
python
|
AxiLiteWriteAddress.__init__
|
(self, m, name=None, datawidth=32, addrwidth=32,
itype=None, otype=None)
|
[] |
def __init__(self, m, name=None, datawidth=32, addrwidth=32,
itype=None, otype=None):
AxiLiteInterfaceBase.__init__(self, m, name, datawidth, addrwidth,
itype, otype)
self.awaddr = util.make_port(
m, self.otype, name + '_awaddr', self.addrwidth, initval=0)
self.awcache = util.make_port(
m, self.otype, name + '_awcache', 4, initval=0, no_reg=True)
self.awprot = util.make_port(
m, self.otype, name + '_awprot', 3, initval=0, no_reg=True)
self.awvalid = util.make_port(
m, self.otype, name + '_awvalid', None, initval=0)
self.awready = util.make_port(
m, self.itype, name + '_awready', None, initval=0)
|
[
"def",
"__init__",
"(",
"self",
",",
"m",
",",
"name",
"=",
"None",
",",
"datawidth",
"=",
"32",
",",
"addrwidth",
"=",
"32",
",",
"itype",
"=",
"None",
",",
"otype",
"=",
"None",
")",
":",
"AxiLiteInterfaceBase",
".",
"__init__",
"(",
"self",
",",
"m",
",",
"name",
",",
"datawidth",
",",
"addrwidth",
",",
"itype",
",",
"otype",
")",
"self",
".",
"awaddr",
"=",
"util",
".",
"make_port",
"(",
"m",
",",
"self",
".",
"otype",
",",
"name",
"+",
"'_awaddr'",
",",
"self",
".",
"addrwidth",
",",
"initval",
"=",
"0",
")",
"self",
".",
"awcache",
"=",
"util",
".",
"make_port",
"(",
"m",
",",
"self",
".",
"otype",
",",
"name",
"+",
"'_awcache'",
",",
"4",
",",
"initval",
"=",
"0",
",",
"no_reg",
"=",
"True",
")",
"self",
".",
"awprot",
"=",
"util",
".",
"make_port",
"(",
"m",
",",
"self",
".",
"otype",
",",
"name",
"+",
"'_awprot'",
",",
"3",
",",
"initval",
"=",
"0",
",",
"no_reg",
"=",
"True",
")",
"self",
".",
"awvalid",
"=",
"util",
".",
"make_port",
"(",
"m",
",",
"self",
".",
"otype",
",",
"name",
"+",
"'_awvalid'",
",",
"None",
",",
"initval",
"=",
"0",
")",
"self",
".",
"awready",
"=",
"util",
".",
"make_port",
"(",
"m",
",",
"self",
".",
"itype",
",",
"name",
"+",
"'_awready'",
",",
"None",
",",
"initval",
"=",
"0",
")"
] |
https://github.com/PyHDI/veriloggen/blob/2382d200deabf59cfcfd741f5eba371010aaf2bb/veriloggen/types/axi.py#L148-L163
|
||||
Aceinna/gnss-ins-sim
|
e8a0495af21c12628cdf106a7c54a0fc7bd0b12a
|
gnss_ins_sim/sim/ins_sim.py
|
python
|
Sim.__convert_pos
|
(self, data, units, ref_frame)
|
return data, units
|
Convert position data into a proper form.
For example, if units are [deg deg m] or [rad rad m] and ref_frame is 1, convertion
is needed. LLA form position will be converted to [x y z] form. Vice Versa.
Args:
data: nx3 numpy array, can be in [Lat Lon Alt] or [x y z] form.
units: units of the data.
ref_frame: reference frame of the simulation. 0:NED, 1:virtual inertial
Returns:
data: nx3 numpy array after convertion.
units: units of converted dta
|
Convert position data into a proper form.
For example, if units are [deg deg m] or [rad rad m] and ref_frame is 1, convertion
is needed. LLA form position will be converted to [x y z] form. Vice Versa.
Args:
data: nx3 numpy array, can be in [Lat Lon Alt] or [x y z] form.
units: units of the data.
ref_frame: reference frame of the simulation. 0:NED, 1:virtual inertial
Returns:
data: nx3 numpy array after convertion.
units: units of converted dta
|
[
"Convert",
"position",
"data",
"into",
"a",
"proper",
"form",
".",
"For",
"example",
"if",
"units",
"are",
"[",
"deg",
"deg",
"m",
"]",
"or",
"[",
"rad",
"rad",
"m",
"]",
"and",
"ref_frame",
"is",
"1",
"convertion",
"is",
"needed",
".",
"LLA",
"form",
"position",
"will",
"be",
"converted",
"to",
"[",
"x",
"y",
"z",
"]",
"form",
".",
"Vice",
"Versa",
".",
"Args",
":",
"data",
":",
"nx3",
"numpy",
"array",
"can",
"be",
"in",
"[",
"Lat",
"Lon",
"Alt",
"]",
"or",
"[",
"x",
"y",
"z",
"]",
"form",
".",
"units",
":",
"units",
"of",
"the",
"data",
".",
"ref_frame",
":",
"reference",
"frame",
"of",
"the",
"simulation",
".",
"0",
":",
"NED",
"1",
":",
"virtual",
"inertial",
"Returns",
":",
"data",
":",
"nx3",
"numpy",
"array",
"after",
"convertion",
".",
"units",
":",
"units",
"of",
"converted",
"dta"
] |
def __convert_pos(self, data, units, ref_frame):
'''
Convert position data into a proper form.
For example, if units are [deg deg m] or [rad rad m] and ref_frame is 1, convertion
is needed. LLA form position will be converted to [x y z] form. Vice Versa.
Args:
data: nx3 numpy array, can be in [Lat Lon Alt] or [x y z] form.
units: units of the data.
ref_frame: reference frame of the simulation. 0:NED, 1:virtual inertial
Returns:
data: nx3 numpy array after convertion.
units: units of converted dta
'''
if ref_frame == 1:
# deg to rad
if units == ['deg', 'deg', 'm']:
units = ['rad', 'rad', 'm']
data[:, 0] = data[:, 0] * attitude.D2R
data[:, 1] = data[:, 1] * attitude.D2R
# lla2ned
if units == ['rad', 'rad', 'm']:
units = ['m', 'm', 'm']
# relative motion in ECEF
data = geoparams.lla2ecef_batch(data)
ini_pos_ecef = data[0, :] # initial ECEF position
data = data - ini_pos_ecef
# relative motion in ECEF to NED, NED defined by first LLA
c_ne = attitude.ecef_to_ned(data[0, 0], data[0, 1])
data = data.dot(c_ne.T)
data = data + ini_pos_ecef
elif ref_frame == 0:
# ned2lla or ecef2lla
# Because if the data are in NED or ECEF is unknown, this is not supported.
if units == ['m', 'm', 'm']:
units = ['rad', 'rad', 'm']
print("Unsupported position conversion from xyz to LLA.")
return data, units
|
[
"def",
"__convert_pos",
"(",
"self",
",",
"data",
",",
"units",
",",
"ref_frame",
")",
":",
"if",
"ref_frame",
"==",
"1",
":",
"# deg to rad",
"if",
"units",
"==",
"[",
"'deg'",
",",
"'deg'",
",",
"'m'",
"]",
":",
"units",
"=",
"[",
"'rad'",
",",
"'rad'",
",",
"'m'",
"]",
"data",
"[",
":",
",",
"0",
"]",
"=",
"data",
"[",
":",
",",
"0",
"]",
"*",
"attitude",
".",
"D2R",
"data",
"[",
":",
",",
"1",
"]",
"=",
"data",
"[",
":",
",",
"1",
"]",
"*",
"attitude",
".",
"D2R",
"# lla2ned",
"if",
"units",
"==",
"[",
"'rad'",
",",
"'rad'",
",",
"'m'",
"]",
":",
"units",
"=",
"[",
"'m'",
",",
"'m'",
",",
"'m'",
"]",
"# relative motion in ECEF",
"data",
"=",
"geoparams",
".",
"lla2ecef_batch",
"(",
"data",
")",
"ini_pos_ecef",
"=",
"data",
"[",
"0",
",",
":",
"]",
"# initial ECEF position",
"data",
"=",
"data",
"-",
"ini_pos_ecef",
"# relative motion in ECEF to NED, NED defined by first LLA",
"c_ne",
"=",
"attitude",
".",
"ecef_to_ned",
"(",
"data",
"[",
"0",
",",
"0",
"]",
",",
"data",
"[",
"0",
",",
"1",
"]",
")",
"data",
"=",
"data",
".",
"dot",
"(",
"c_ne",
".",
"T",
")",
"data",
"=",
"data",
"+",
"ini_pos_ecef",
"elif",
"ref_frame",
"==",
"0",
":",
"# ned2lla or ecef2lla",
"# Because if the data are in NED or ECEF is unknown, this is not supported.",
"if",
"units",
"==",
"[",
"'m'",
",",
"'m'",
",",
"'m'",
"]",
":",
"units",
"=",
"[",
"'rad'",
",",
"'rad'",
",",
"'m'",
"]",
"print",
"(",
"\"Unsupported position conversion from xyz to LLA.\"",
")",
"return",
"data",
",",
"units"
] |
https://github.com/Aceinna/gnss-ins-sim/blob/e8a0495af21c12628cdf106a7c54a0fc7bd0b12a/gnss_ins_sim/sim/ins_sim.py#L796-L832
|
|
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/mfi/switch.py
|
python
|
MfiSwitch.turn_off
|
(self, **kwargs)
|
Turn the switch off.
|
Turn the switch off.
|
[
"Turn",
"the",
"switch",
"off",
"."
] |
def turn_off(self, **kwargs):
"""Turn the switch off."""
self._port.control(False)
self._target_state = False
|
[
"def",
"turn_off",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_port",
".",
"control",
"(",
"False",
")",
"self",
".",
"_target_state",
"=",
"False"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/mfi/switch.py#L109-L112
|
||
dulwich/dulwich
|
1f66817d712e3563ce1ff53b1218491a2eae39da
|
dulwich/pack.py
|
python
|
Pack.data
|
(self)
|
return self._data
|
The pack data object being used.
|
The pack data object being used.
|
[
"The",
"pack",
"data",
"object",
"being",
"used",
"."
] |
def data(self):
"""The pack data object being used."""
if self._data is None:
self._data = self._data_load()
self._data.pack = self
self.check_length_and_checksum()
return self._data
|
[
"def",
"data",
"(",
"self",
")",
":",
"if",
"self",
".",
"_data",
"is",
"None",
":",
"self",
".",
"_data",
"=",
"self",
".",
"_data_load",
"(",
")",
"self",
".",
"_data",
".",
"pack",
"=",
"self",
"self",
".",
"check_length_and_checksum",
"(",
")",
"return",
"self",
".",
"_data"
] |
https://github.com/dulwich/dulwich/blob/1f66817d712e3563ce1ff53b1218491a2eae39da/dulwich/pack.py#L2004-L2010
|
|
stepjam/PyRep
|
d778d5d4ffa3be366d4e699f6e2941553fd47ecc
|
pyrep/robots/robot_component.py
|
python
|
RobotComponent.set_motor_locked_at_zero_velocity
|
(self, value: bool)
|
Sets if motor is locked when target velocity is zero for all joints.
When enabled in velocity mode and its target velocity is zero, then the
joint is locked in place.
:param value: If the motors should be locked at zero velocity.
|
Sets if motor is locked when target velocity is zero for all joints.
|
[
"Sets",
"if",
"motor",
"is",
"locked",
"when",
"target",
"velocity",
"is",
"zero",
"for",
"all",
"joints",
"."
] |
def set_motor_locked_at_zero_velocity(self, value: bool) -> None:
"""Sets if motor is locked when target velocity is zero for all joints.
When enabled in velocity mode and its target velocity is zero, then the
joint is locked in place.
:param value: If the motors should be locked at zero velocity.
"""
[j.set_motor_locked_at_zero_velocity(value) # type: ignore
for j in self.joints]
|
[
"def",
"set_motor_locked_at_zero_velocity",
"(",
"self",
",",
"value",
":",
"bool",
")",
"->",
"None",
":",
"[",
"j",
".",
"set_motor_locked_at_zero_velocity",
"(",
"value",
")",
"# type: ignore",
"for",
"j",
"in",
"self",
".",
"joints",
"]"
] |
https://github.com/stepjam/PyRep/blob/d778d5d4ffa3be366d4e699f6e2941553fd47ecc/pyrep/robots/robot_component.py#L233-L242
|
||
cokelaer/bioservices
|
b491e8d71e285f9006e0af0a56f0cc5128cb86fa
|
src/bioservices/services.py
|
python
|
Service.on_web
|
(self, url)
|
Open a URL into a browser
|
Open a URL into a browser
|
[
"Open",
"a",
"URL",
"into",
"a",
"browser"
] |
def on_web(self, url):
"""Open a URL into a browser"""
import webbrowser
webbrowser.open(url)
|
[
"def",
"on_web",
"(",
"self",
",",
"url",
")",
":",
"import",
"webbrowser",
"webbrowser",
".",
"open",
"(",
"url",
")"
] |
https://github.com/cokelaer/bioservices/blob/b491e8d71e285f9006e0af0a56f0cc5128cb86fa/src/bioservices/services.py#L235-L239
|
||
scikit-learn/scikit-learn
|
1d1aadd0711b87d2a11c80aad15df6f8cf156712
|
sklearn/feature_selection/_from_model.py
|
python
|
SelectFromModel.fit
|
(self, X, y=None, **fit_params)
|
return self
|
Fit the SelectFromModel meta-transformer.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like of shape (n_samples,), default=None
The target values (integers that correspond to classes in
classification, real numbers in regression).
**fit_params : dict
Other estimator specific parameters.
Returns
-------
self : object
Fitted estimator.
|
Fit the SelectFromModel meta-transformer.
|
[
"Fit",
"the",
"SelectFromModel",
"meta",
"-",
"transformer",
"."
] |
def fit(self, X, y=None, **fit_params):
"""Fit the SelectFromModel meta-transformer.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like of shape (n_samples,), default=None
The target values (integers that correspond to classes in
classification, real numbers in regression).
**fit_params : dict
Other estimator specific parameters.
Returns
-------
self : object
Fitted estimator.
"""
if self.max_features is not None:
if not isinstance(self.max_features, numbers.Integral):
raise TypeError(
"'max_features' should be an integer between"
" 0 and {} features. Got {!r} instead.".format(
X.shape[1], self.max_features
)
)
elif self.max_features < 0 or self.max_features > X.shape[1]:
raise ValueError(
"'max_features' should be 0 and {} features.Got {} instead.".format(
X.shape[1], self.max_features
)
)
if self.prefit:
raise NotFittedError("Since 'prefit=True', call transform directly")
self.estimator_ = clone(self.estimator)
self.estimator_.fit(X, y, **fit_params)
if hasattr(self.estimator_, "feature_names_in_"):
self.feature_names_in_ = self.estimator_.feature_names_in_
else:
self._check_feature_names(X, reset=True)
return self
|
[
"def",
"fit",
"(",
"self",
",",
"X",
",",
"y",
"=",
"None",
",",
"*",
"*",
"fit_params",
")",
":",
"if",
"self",
".",
"max_features",
"is",
"not",
"None",
":",
"if",
"not",
"isinstance",
"(",
"self",
".",
"max_features",
",",
"numbers",
".",
"Integral",
")",
":",
"raise",
"TypeError",
"(",
"\"'max_features' should be an integer between\"",
"\" 0 and {} features. Got {!r} instead.\"",
".",
"format",
"(",
"X",
".",
"shape",
"[",
"1",
"]",
",",
"self",
".",
"max_features",
")",
")",
"elif",
"self",
".",
"max_features",
"<",
"0",
"or",
"self",
".",
"max_features",
">",
"X",
".",
"shape",
"[",
"1",
"]",
":",
"raise",
"ValueError",
"(",
"\"'max_features' should be 0 and {} features.Got {} instead.\"",
".",
"format",
"(",
"X",
".",
"shape",
"[",
"1",
"]",
",",
"self",
".",
"max_features",
")",
")",
"if",
"self",
".",
"prefit",
":",
"raise",
"NotFittedError",
"(",
"\"Since 'prefit=True', call transform directly\"",
")",
"self",
".",
"estimator_",
"=",
"clone",
"(",
"self",
".",
"estimator",
")",
"self",
".",
"estimator_",
".",
"fit",
"(",
"X",
",",
"y",
",",
"*",
"*",
"fit_params",
")",
"if",
"hasattr",
"(",
"self",
".",
"estimator_",
",",
"\"feature_names_in_\"",
")",
":",
"self",
".",
"feature_names_in_",
"=",
"self",
".",
"estimator_",
".",
"feature_names_in_",
"else",
":",
"self",
".",
"_check_feature_names",
"(",
"X",
",",
"reset",
"=",
"True",
")",
"return",
"self"
] |
https://github.com/scikit-learn/scikit-learn/blob/1d1aadd0711b87d2a11c80aad15df6f8cf156712/sklearn/feature_selection/_from_model.py#L229-L274
|
|
mchristopher/PokemonGo-DesktopMap
|
ec37575f2776ee7d64456e2a1f6b6b78830b4fe0
|
app/pylibs/osx64/Cryptodome/Hash/SHA3_224.py
|
python
|
SHA3_224_Hash.update
|
(self, data)
|
return self
|
Continue hashing of a message by consuming the next chunk of data.
Repeated calls are equivalent to a single call with the concatenation
of all the arguments. In other words:
>>> m.update(a); m.update(b)
is equivalent to:
>>> m.update(a+b)
:Parameters:
data : byte string
The next chunk of the message being hashed.
|
Continue hashing of a message by consuming the next chunk of data.
|
[
"Continue",
"hashing",
"of",
"a",
"message",
"by",
"consuming",
"the",
"next",
"chunk",
"of",
"data",
"."
] |
def update(self, data):
"""Continue hashing of a message by consuming the next chunk of data.
Repeated calls are equivalent to a single call with the concatenation
of all the arguments. In other words:
>>> m.update(a); m.update(b)
is equivalent to:
>>> m.update(a+b)
:Parameters:
data : byte string
The next chunk of the message being hashed.
"""
if self._digest_done and not self._update_after_digest:
raise TypeError("You can only call 'digest' or 'hexdigest' on this object")
expect_byte_string(data)
result = _raw_keccak_lib.keccak_absorb(self._state.get(),
data,
c_size_t(len(data)))
if result:
raise ValueError("Error %d while updating SHA-3/224"
% result)
return self
|
[
"def",
"update",
"(",
"self",
",",
"data",
")",
":",
"if",
"self",
".",
"_digest_done",
"and",
"not",
"self",
".",
"_update_after_digest",
":",
"raise",
"TypeError",
"(",
"\"You can only call 'digest' or 'hexdigest' on this object\"",
")",
"expect_byte_string",
"(",
"data",
")",
"result",
"=",
"_raw_keccak_lib",
".",
"keccak_absorb",
"(",
"self",
".",
"_state",
".",
"get",
"(",
")",
",",
"data",
",",
"c_size_t",
"(",
"len",
"(",
"data",
")",
")",
")",
"if",
"result",
":",
"raise",
"ValueError",
"(",
"\"Error %d while updating SHA-3/224\"",
"%",
"result",
")",
"return",
"self"
] |
https://github.com/mchristopher/PokemonGo-DesktopMap/blob/ec37575f2776ee7d64456e2a1f6b6b78830b4fe0/app/pylibs/osx64/Cryptodome/Hash/SHA3_224.py#L73-L100
|
|
avocado-framework/avocado
|
1f9b3192e8ba47d029c33fe21266bd113d17811f
|
avocado/utils/asset.py
|
python
|
Asset.name_scheme
|
(self)
|
This property will return the scheme part of the name if is an URL.
Otherwise, will return None.
|
This property will return the scheme part of the name if is an URL.
|
[
"This",
"property",
"will",
"return",
"the",
"scheme",
"part",
"of",
"the",
"name",
"if",
"is",
"an",
"URL",
"."
] |
def name_scheme(self):
"""This property will return the scheme part of the name if is an URL.
Otherwise, will return None.
"""
parsed = self.parsed_name
if parsed:
return parsed.scheme
|
[
"def",
"name_scheme",
"(",
"self",
")",
":",
"parsed",
"=",
"self",
".",
"parsed_name",
"if",
"parsed",
":",
"return",
"parsed",
".",
"scheme"
] |
https://github.com/avocado-framework/avocado/blob/1f9b3192e8ba47d029c33fe21266bd113d17811f/avocado/utils/asset.py#L606-L613
|
||
theotherp/nzbhydra
|
4b03d7f769384b97dfc60dade4806c0fc987514e
|
libs/cffi/api.py
|
python
|
FFI.set_unicode
|
(self, enabled_flag)
|
Windows: if 'enabled_flag' is True, enable the UNICODE and
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
to be (pointers to) wchar_t. If 'enabled_flag' is False,
declare these types to be (pointers to) plain 8-bit characters.
This is mostly for backward compatibility; you usually want True.
|
Windows: if 'enabled_flag' is True, enable the UNICODE and
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
to be (pointers to) wchar_t. If 'enabled_flag' is False,
declare these types to be (pointers to) plain 8-bit characters.
This is mostly for backward compatibility; you usually want True.
|
[
"Windows",
":",
"if",
"enabled_flag",
"is",
"True",
"enable",
"the",
"UNICODE",
"and",
"_UNICODE",
"defines",
"in",
"C",
"and",
"declare",
"the",
"types",
"like",
"TCHAR",
"and",
"LPTCSTR",
"to",
"be",
"(",
"pointers",
"to",
")",
"wchar_t",
".",
"If",
"enabled_flag",
"is",
"False",
"declare",
"these",
"types",
"to",
"be",
"(",
"pointers",
"to",
")",
"plain",
"8",
"-",
"bit",
"characters",
".",
"This",
"is",
"mostly",
"for",
"backward",
"compatibility",
";",
"you",
"usually",
"want",
"True",
"."
] |
def set_unicode(self, enabled_flag):
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
to be (pointers to) wchar_t. If 'enabled_flag' is False,
declare these types to be (pointers to) plain 8-bit characters.
This is mostly for backward compatibility; you usually want True.
"""
if self._windows_unicode is not None:
raise ValueError("set_unicode() can only be called once")
enabled_flag = bool(enabled_flag)
if enabled_flag:
self.cdef("typedef wchar_t TBYTE;"
"typedef wchar_t TCHAR;"
"typedef const wchar_t *LPCTSTR;"
"typedef const wchar_t *PCTSTR;"
"typedef wchar_t *LPTSTR;"
"typedef wchar_t *PTSTR;"
"typedef TBYTE *PTBYTE;"
"typedef TCHAR *PTCHAR;")
else:
self.cdef("typedef char TBYTE;"
"typedef char TCHAR;"
"typedef const char *LPCTSTR;"
"typedef const char *PCTSTR;"
"typedef char *LPTSTR;"
"typedef char *PTSTR;"
"typedef TBYTE *PTBYTE;"
"typedef TCHAR *PTCHAR;")
self._windows_unicode = enabled_flag
|
[
"def",
"set_unicode",
"(",
"self",
",",
"enabled_flag",
")",
":",
"if",
"self",
".",
"_windows_unicode",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"set_unicode() can only be called once\"",
")",
"enabled_flag",
"=",
"bool",
"(",
"enabled_flag",
")",
"if",
"enabled_flag",
":",
"self",
".",
"cdef",
"(",
"\"typedef wchar_t TBYTE;\"",
"\"typedef wchar_t TCHAR;\"",
"\"typedef const wchar_t *LPCTSTR;\"",
"\"typedef const wchar_t *PCTSTR;\"",
"\"typedef wchar_t *LPTSTR;\"",
"\"typedef wchar_t *PTSTR;\"",
"\"typedef TBYTE *PTBYTE;\"",
"\"typedef TCHAR *PTCHAR;\"",
")",
"else",
":",
"self",
".",
"cdef",
"(",
"\"typedef char TBYTE;\"",
"\"typedef char TCHAR;\"",
"\"typedef const char *LPCTSTR;\"",
"\"typedef const char *PCTSTR;\"",
"\"typedef char *LPTSTR;\"",
"\"typedef char *PTSTR;\"",
"\"typedef TBYTE *PTBYTE;\"",
"\"typedef TCHAR *PTCHAR;\"",
")",
"self",
".",
"_windows_unicode",
"=",
"enabled_flag"
] |
https://github.com/theotherp/nzbhydra/blob/4b03d7f769384b97dfc60dade4806c0fc987514e/libs/cffi/api.py#L495-L523
|
||
bendmorris/static-python
|
2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473
|
Lib/tkinter/__init__.py
|
python
|
Toplevel.__init__
|
(self, master=None, cnf={}, **kw)
|
Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width.
|
Construct a toplevel widget with the parent MASTER.
|
[
"Construct",
"a",
"toplevel",
"widget",
"with",
"the",
"parent",
"MASTER",
"."
] |
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
|
[
"def",
"__init__",
"(",
"self",
",",
"master",
"=",
"None",
",",
"cnf",
"=",
"{",
"}",
",",
"*",
"*",
"kw",
")",
":",
"if",
"kw",
":",
"cnf",
"=",
"_cnfmerge",
"(",
"(",
"cnf",
",",
"kw",
")",
")",
"extra",
"=",
"(",
")",
"for",
"wmkey",
"in",
"[",
"'screen'",
",",
"'class_'",
",",
"'class'",
",",
"'visual'",
",",
"'colormap'",
"]",
":",
"if",
"wmkey",
"in",
"cnf",
":",
"val",
"=",
"cnf",
"[",
"wmkey",
"]",
"# TBD: a hack needed because some keys",
"# are not valid as keyword arguments",
"if",
"wmkey",
"[",
"-",
"1",
"]",
"==",
"'_'",
":",
"opt",
"=",
"'-'",
"+",
"wmkey",
"[",
":",
"-",
"1",
"]",
"else",
":",
"opt",
"=",
"'-'",
"+",
"wmkey",
"extra",
"=",
"extra",
"+",
"(",
"opt",
",",
"val",
")",
"del",
"cnf",
"[",
"wmkey",
"]",
"BaseWidget",
".",
"__init__",
"(",
"self",
",",
"master",
",",
"'toplevel'",
",",
"cnf",
",",
"{",
"}",
",",
"extra",
")",
"root",
"=",
"self",
".",
"_root",
"(",
")",
"self",
".",
"iconname",
"(",
"root",
".",
"iconname",
"(",
")",
")",
"self",
".",
"title",
"(",
"root",
".",
"title",
"(",
")",
")",
"self",
".",
"protocol",
"(",
"\"WM_DELETE_WINDOW\"",
",",
"self",
".",
"destroy",
")"
] |
https://github.com/bendmorris/static-python/blob/2e0f8c4d7ed5b359dc7d8a75b6fb37e6b6c5c473/Lib/tkinter/__init__.py#L2098-L2122
|
||
mdiazcl/fuzzbunch-debian
|
2b76c2249ade83a389ae3badb12a1bd09901fd2c
|
windows/Resources/Python/Override/Lib/multiprocessing/util.py
|
python
|
Finalize.still_active
|
(self)
|
return self._key in _finalizer_registry
|
Return whether this finalizer is still waiting to invoke callback
|
Return whether this finalizer is still waiting to invoke callback
|
[
"Return",
"whether",
"this",
"finalizer",
"is",
"still",
"waiting",
"to",
"invoke",
"callback"
] |
def still_active(self):
'''
Return whether this finalizer is still waiting to invoke callback
'''
return self._key in _finalizer_registry
|
[
"def",
"still_active",
"(",
"self",
")",
":",
"return",
"self",
".",
"_key",
"in",
"_finalizer_registry"
] |
https://github.com/mdiazcl/fuzzbunch-debian/blob/2b76c2249ade83a389ae3badb12a1bd09901fd2c/windows/Resources/Python/Override/Lib/multiprocessing/util.py#L217-L221
|
|
lazylibrarian/LazyLibrarian
|
ae3c14e9db9328ce81765e094ab2a14ed7155624
|
lib/requests/adapters.py
|
python
|
BaseAdapter.send
|
(self, request, stream=False, timeout=None, verify=True,
cert=None, proxies=None)
|
Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
|
Sends PreparedRequest object. Returns Response object.
|
[
"Sends",
"PreparedRequest",
"object",
".",
"Returns",
"Response",
"object",
"."
] |
def send(self, request, stream=False, timeout=None, verify=True,
cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object.
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) How long to wait for the server to send
data before giving up, as a float, or a :ref:`(connect timeout,
read timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param verify: (optional) Whether to verify SSL certificates.
:param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
raise NotImplementedError
|
[
"def",
"send",
"(",
"self",
",",
"request",
",",
"stream",
"=",
"False",
",",
"timeout",
"=",
"None",
",",
"verify",
"=",
"True",
",",
"cert",
"=",
"None",
",",
"proxies",
"=",
"None",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/lazylibrarian/LazyLibrarian/blob/ae3c14e9db9328ce81765e094ab2a14ed7155624/lib/requests/adapters.py#L57-L71
|
||
mrJean1/PyGeodesy
|
7da5ca71aa3edb7bc49e219e0b8190686e1a7965
|
pygeodesy/deprecated/__init__.py
|
python
|
scalar
|
(value, low=EPS, high=1.0, name=_scalar_, Error=ValueError)
|
return C_(value, name=name, Error=Error, low=low, high=high)
|
DEPRECATED, use class L{Number_} or L{Scalar_}.
@return: New value (C{float} or C{int} for C{int} B{C{low}}).
@raise Error: Invalid B{C{value}}.
|
DEPRECATED, use class L{Number_} or L{Scalar_}.
|
[
"DEPRECATED",
"use",
"class",
"L",
"{",
"Number_",
"}",
"or",
"L",
"{",
"Scalar_",
"}",
"."
] |
def scalar(value, low=EPS, high=1.0, name=_scalar_, Error=ValueError): # PYCHOK no cover
'''DEPRECATED, use class L{Number_} or L{Scalar_}.
@return: New value (C{float} or C{int} for C{int} B{C{low}}).
@raise Error: Invalid B{C{value}}.
'''
from pygeodesy.basics import isint # _MODS.basics.isint
C_ = Number_ if isint(low) else Scalar_
return C_(value, name=name, Error=Error, low=low, high=high)
|
[
"def",
"scalar",
"(",
"value",
",",
"low",
"=",
"EPS",
",",
"high",
"=",
"1.0",
",",
"name",
"=",
"_scalar_",
",",
"Error",
"=",
"ValueError",
")",
":",
"# PYCHOK no cover",
"from",
"pygeodesy",
".",
"basics",
"import",
"isint",
"# _MODS.basics.isint",
"C_",
"=",
"Number_",
"if",
"isint",
"(",
"low",
")",
"else",
"Scalar_",
"return",
"C_",
"(",
"value",
",",
"name",
"=",
"name",
",",
"Error",
"=",
"Error",
",",
"low",
"=",
"low",
",",
"high",
"=",
"high",
")"
] |
https://github.com/mrJean1/PyGeodesy/blob/7da5ca71aa3edb7bc49e219e0b8190686e1a7965/pygeodesy/deprecated/__init__.py#L365-L374
|
|
onnx/sklearn-onnx
|
8e19d19b8a9bcae7f17d5b7cc2514cf6b89f8199
|
skl2onnx/common/_onnx_optimisation_common.py
|
python
|
_make_node
|
(op_type, inputs, outputs, name=None, doc_string=None,
domain=None, attributes=None)
|
return node
|
Constructs a NodeProto.
:param op_type: (string): The name of the operator to construct
:param inputs: list of input names
:param outputs: list of output names
:param name: optional unique identifier for NodeProto
:param doc_string: optional documentation
string for NodeProto
:param domain: optional domain for NodeProto.
If it's None, we will just use default domain (which is empty)
:param attributes: the attributes of the node. The acceptable values
are documented in :func:`make_attribute`.
:return: node
|
Constructs a NodeProto.
|
[
"Constructs",
"a",
"NodeProto",
"."
] |
def _make_node(op_type, inputs, outputs, name=None, doc_string=None,
domain=None, attributes=None):
"""
Constructs a NodeProto.
:param op_type: (string): The name of the operator to construct
:param inputs: list of input names
:param outputs: list of output names
:param name: optional unique identifier for NodeProto
:param doc_string: optional documentation
string for NodeProto
:param domain: optional domain for NodeProto.
If it's None, we will just use default domain (which is empty)
:param attributes: the attributes of the node. The acceptable values
are documented in :func:`make_attribute`.
:return: node
"""
node = NodeProto()
node.op_type = op_type
node.input.extend(inputs)
node.output.extend(outputs)
if name:
node.name = name
if doc_string:
node.doc_string = doc_string
if domain is not None:
node.domain = domain
if isinstance(attributes, dict):
if len(attributes) > 0:
node.attribute.extend(
make_attribute(key, value)
for key, value in sorted(attributes.items()))
elif attributes:
for att in attributes:
node.attribute.extend([att])
return node
|
[
"def",
"_make_node",
"(",
"op_type",
",",
"inputs",
",",
"outputs",
",",
"name",
"=",
"None",
",",
"doc_string",
"=",
"None",
",",
"domain",
"=",
"None",
",",
"attributes",
"=",
"None",
")",
":",
"node",
"=",
"NodeProto",
"(",
")",
"node",
".",
"op_type",
"=",
"op_type",
"node",
".",
"input",
".",
"extend",
"(",
"inputs",
")",
"node",
".",
"output",
".",
"extend",
"(",
"outputs",
")",
"if",
"name",
":",
"node",
".",
"name",
"=",
"name",
"if",
"doc_string",
":",
"node",
".",
"doc_string",
"=",
"doc_string",
"if",
"domain",
"is",
"not",
"None",
":",
"node",
".",
"domain",
"=",
"domain",
"if",
"isinstance",
"(",
"attributes",
",",
"dict",
")",
":",
"if",
"len",
"(",
"attributes",
")",
">",
"0",
":",
"node",
".",
"attribute",
".",
"extend",
"(",
"make_attribute",
"(",
"key",
",",
"value",
")",
"for",
"key",
",",
"value",
"in",
"sorted",
"(",
"attributes",
".",
"items",
"(",
")",
")",
")",
"elif",
"attributes",
":",
"for",
"att",
"in",
"attributes",
":",
"node",
".",
"attribute",
".",
"extend",
"(",
"[",
"att",
"]",
")",
"return",
"node"
] |
https://github.com/onnx/sklearn-onnx/blob/8e19d19b8a9bcae7f17d5b7cc2514cf6b89f8199/skl2onnx/common/_onnx_optimisation_common.py#L78-L113
|
|
pmaupin/pdfrw
|
6c892160e7e976b243db0c12c3e56ed8c78afc5a
|
examples/rl1/platypus_pdf_template.py
|
python
|
MyDocTemplate.afterFlowable
|
(self, flowable)
|
Adds Heading1 to table of contents
|
Adds Heading1 to table of contents
|
[
"Adds",
"Heading1",
"to",
"table",
"of",
"contents"
] |
def afterFlowable(self, flowable):
"""Adds Heading1 to table of contents"""
if flowable.__class__.__name__ == 'Paragraph':
style = flowable.style.name
text = flowable.getPlainText()
key = '%s' % self.seq.nextf('toc')
if style == 'Heading1':
self.canv.bookmarkPage(key)
self.notify('TOCEntry', [1, text, self.page, key])
|
[
"def",
"afterFlowable",
"(",
"self",
",",
"flowable",
")",
":",
"if",
"flowable",
".",
"__class__",
".",
"__name__",
"==",
"'Paragraph'",
":",
"style",
"=",
"flowable",
".",
"style",
".",
"name",
"text",
"=",
"flowable",
".",
"getPlainText",
"(",
")",
"key",
"=",
"'%s'",
"%",
"self",
".",
"seq",
".",
"nextf",
"(",
"'toc'",
")",
"if",
"style",
"==",
"'Heading1'",
":",
"self",
".",
"canv",
".",
"bookmarkPage",
"(",
"key",
")",
"self",
".",
"notify",
"(",
"'TOCEntry'",
",",
"[",
"1",
",",
"text",
",",
"self",
".",
"page",
",",
"key",
"]",
")"
] |
https://github.com/pmaupin/pdfrw/blob/6c892160e7e976b243db0c12c3e56ed8c78afc5a/examples/rl1/platypus_pdf_template.py#L65-L73
|
||
SINGROUP/dscribe
|
79a13939d66bdc858865dc050b91be9debd3c06a
|
dscribe/descriptors/lmbtr.py
|
python
|
LMBTR.create
|
(
self, system, positions=None, n_jobs=1, only_physical_cores=False, verbose=False
)
|
return output
|
Return the LMBTR output for the given systems and given positions.
Args:
system (:class:`ase.Atoms` or list of :class:`ase.Atoms`): One or
many atomic structures.
positions (list): Positions where to calculate LMBTR. Can be
provided as cartesian positions or atomic indices. If no
positions are defined, the LMBTR output will be created for all
atoms in the system. When calculating LMBTR for multiple
systems, provide the positions as a list for each system.
n_jobs (int): Number of parallel jobs to instantiate. Parallellizes
the calculation across samples. Defaults to serial calculation
with n_jobs=1. If a negative number is given, the used cpus
will be calculated with, n_cpus + n_jobs, where n_cpus is the
amount of CPUs as reported by the OS. With only_physical_cores
you can control which types of CPUs are counted in n_cpus.
only_physical_cores (bool): If a negative n_jobs is given,
determines which types of CPUs are used in calculating the
number of jobs. If set to False (default), also virtual CPUs
are counted. If set to True, only physical CPUs are counted.
verbose(bool): Controls whether to print the progress of each job
into to the console.
Returns:
np.ndarray | scipy.sparse.csr_matrix: The LMBTR output for the given
systems and positions. The return type depends on the
'sparse'-attribute. The first dimension is determined by the amount
of positions and systems and the second dimension is determined by
the get_number_of_features()-function.
|
Return the LMBTR output for the given systems and given positions.
|
[
"Return",
"the",
"LMBTR",
"output",
"for",
"the",
"given",
"systems",
"and",
"given",
"positions",
"."
] |
def create(
self, system, positions=None, n_jobs=1, only_physical_cores=False, verbose=False
):
"""Return the LMBTR output for the given systems and given positions.
Args:
system (:class:`ase.Atoms` or list of :class:`ase.Atoms`): One or
many atomic structures.
positions (list): Positions where to calculate LMBTR. Can be
provided as cartesian positions or atomic indices. If no
positions are defined, the LMBTR output will be created for all
atoms in the system. When calculating LMBTR for multiple
systems, provide the positions as a list for each system.
n_jobs (int): Number of parallel jobs to instantiate. Parallellizes
the calculation across samples. Defaults to serial calculation
with n_jobs=1. If a negative number is given, the used cpus
will be calculated with, n_cpus + n_jobs, where n_cpus is the
amount of CPUs as reported by the OS. With only_physical_cores
you can control which types of CPUs are counted in n_cpus.
only_physical_cores (bool): If a negative n_jobs is given,
determines which types of CPUs are used in calculating the
number of jobs. If set to False (default), also virtual CPUs
are counted. If set to True, only physical CPUs are counted.
verbose(bool): Controls whether to print the progress of each job
into to the console.
Returns:
np.ndarray | scipy.sparse.csr_matrix: The LMBTR output for the given
systems and positions. The return type depends on the
'sparse'-attribute. The first dimension is determined by the amount
of positions and systems and the second dimension is determined by
the get_number_of_features()-function.
"""
# Combine input arguments
if isinstance(system, Atoms):
system = [system]
positions = [positions]
n_samples = len(system)
if positions is None:
inp = [(i_sys,) for i_sys in system]
else:
n_pos = len(positions)
if n_pos != n_samples:
raise ValueError(
"The given number of positions does not match the given"
"number of systems."
)
inp = list(zip(system, positions))
# Determine if the outputs have a fixed size
n_features = self.get_number_of_features()
static_size = None
if positions is None:
n_centers = len(inp[0][0])
else:
first_sample, first_pos = inp[0]
if first_pos is not None:
n_centers = len(first_pos)
else:
n_centers = len(first_sample)
def is_static():
for i_job in inp:
if positions is None:
if len(i_job[0]) != n_centers:
return False
else:
if i_job[1] is not None:
if len(i_job[1]) != n_centers:
return False
else:
if len(i_job[0]) != n_centers:
return False
return True
if is_static():
static_size = [n_centers, n_features]
# Create in parallel
output = self.create_parallel(
inp,
self.create_single,
n_jobs,
static_size,
only_physical_cores,
verbose=verbose,
)
return output
|
[
"def",
"create",
"(",
"self",
",",
"system",
",",
"positions",
"=",
"None",
",",
"n_jobs",
"=",
"1",
",",
"only_physical_cores",
"=",
"False",
",",
"verbose",
"=",
"False",
")",
":",
"# Combine input arguments",
"if",
"isinstance",
"(",
"system",
",",
"Atoms",
")",
":",
"system",
"=",
"[",
"system",
"]",
"positions",
"=",
"[",
"positions",
"]",
"n_samples",
"=",
"len",
"(",
"system",
")",
"if",
"positions",
"is",
"None",
":",
"inp",
"=",
"[",
"(",
"i_sys",
",",
")",
"for",
"i_sys",
"in",
"system",
"]",
"else",
":",
"n_pos",
"=",
"len",
"(",
"positions",
")",
"if",
"n_pos",
"!=",
"n_samples",
":",
"raise",
"ValueError",
"(",
"\"The given number of positions does not match the given\"",
"\"number of systems.\"",
")",
"inp",
"=",
"list",
"(",
"zip",
"(",
"system",
",",
"positions",
")",
")",
"# Determine if the outputs have a fixed size",
"n_features",
"=",
"self",
".",
"get_number_of_features",
"(",
")",
"static_size",
"=",
"None",
"if",
"positions",
"is",
"None",
":",
"n_centers",
"=",
"len",
"(",
"inp",
"[",
"0",
"]",
"[",
"0",
"]",
")",
"else",
":",
"first_sample",
",",
"first_pos",
"=",
"inp",
"[",
"0",
"]",
"if",
"first_pos",
"is",
"not",
"None",
":",
"n_centers",
"=",
"len",
"(",
"first_pos",
")",
"else",
":",
"n_centers",
"=",
"len",
"(",
"first_sample",
")",
"def",
"is_static",
"(",
")",
":",
"for",
"i_job",
"in",
"inp",
":",
"if",
"positions",
"is",
"None",
":",
"if",
"len",
"(",
"i_job",
"[",
"0",
"]",
")",
"!=",
"n_centers",
":",
"return",
"False",
"else",
":",
"if",
"i_job",
"[",
"1",
"]",
"is",
"not",
"None",
":",
"if",
"len",
"(",
"i_job",
"[",
"1",
"]",
")",
"!=",
"n_centers",
":",
"return",
"False",
"else",
":",
"if",
"len",
"(",
"i_job",
"[",
"0",
"]",
")",
"!=",
"n_centers",
":",
"return",
"False",
"return",
"True",
"if",
"is_static",
"(",
")",
":",
"static_size",
"=",
"[",
"n_centers",
",",
"n_features",
"]",
"# Create in parallel",
"output",
"=",
"self",
".",
"create_parallel",
"(",
"inp",
",",
"self",
".",
"create_single",
",",
"n_jobs",
",",
"static_size",
",",
"only_physical_cores",
",",
"verbose",
"=",
"verbose",
",",
")",
"return",
"output"
] |
https://github.com/SINGROUP/dscribe/blob/79a13939d66bdc858865dc050b91be9debd3c06a/dscribe/descriptors/lmbtr.py#L207-L295
|
|
deeptools/deepTools
|
ac42d29c298c026aa0c53c9db2553087ebc86b97
|
deeptools/parserCommon.py
|
python
|
gtf_options
|
(suppress=False)
|
return parser
|
Arguments present whenever a BED/GTF file can be used
|
Arguments present whenever a BED/GTF file can be used
|
[
"Arguments",
"present",
"whenever",
"a",
"BED",
"/",
"GTF",
"file",
"can",
"be",
"used"
] |
def gtf_options(suppress=False):
"""
Arguments present whenever a BED/GTF file can be used
"""
if suppress:
parser = argparse.ArgumentParser(add_help=False)
group = parser
else:
parser = argparse.ArgumentParser(add_help=False)
group = parser.add_argument_group('GTF/BED12 options')
if suppress:
help = argparse.SUPPRESS
else:
help = 'When either a BED12 or GTF file are used to provide \
regions, perform the computation on the merged exons, \
rather than using the genomic interval defined by the \
5-prime and 3-prime most transcript bound (i.e., columns \
2 and 3 of a BED file). If a BED3 or BED6 file is used \
as input, then columns 2 and 3 are used as an exon. (Default: %(default)s)'
group.add_argument('--metagene',
help=help,
action='store_true',
dest='keepExons')
if suppress is False:
help = 'When a GTF file is used to provide regions, only \
entries with this value as their feature (column 3) \
will be processed as transcripts. (Default: %(default)s)'
group.add_argument('--transcriptID',
help=help,
default='transcript')
if suppress is False:
help = 'When a GTF file is used to provide regions, only \
entries with this value as their feature (column 3) \
will be processed as exons. CDS would be another common \
value for this. (Default: %(default)s)'
group.add_argument('--exonID',
help=help,
default='exon')
if suppress is False:
help = 'Each region has an ID (e.g., ACTB) assigned to it, \
which for BED files is either column 4 (if it exists) \
or the interval bounds. For GTF files this is instead \
stored in the last column as a key:value pair (e.g., as \
\'transcript_id "ACTB"\', for a key of transcript_id \
and a value of ACTB). In some cases it can be \
convenient to use a different identifier. To do so, set \
this to the desired key. (Default: %(default)s)'
group.add_argument('--transcript_id_designator',
help=help,
default='transcript_id')
return parser
|
[
"def",
"gtf_options",
"(",
"suppress",
"=",
"False",
")",
":",
"if",
"suppress",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"add_help",
"=",
"False",
")",
"group",
"=",
"parser",
"else",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"add_help",
"=",
"False",
")",
"group",
"=",
"parser",
".",
"add_argument_group",
"(",
"'GTF/BED12 options'",
")",
"if",
"suppress",
":",
"help",
"=",
"argparse",
".",
"SUPPRESS",
"else",
":",
"help",
"=",
"'When either a BED12 or GTF file are used to provide \\\n regions, perform the computation on the merged exons, \\\n rather than using the genomic interval defined by the \\\n 5-prime and 3-prime most transcript bound (i.e., columns \\\n 2 and 3 of a BED file). If a BED3 or BED6 file is used \\\n as input, then columns 2 and 3 are used as an exon. (Default: %(default)s)'",
"group",
".",
"add_argument",
"(",
"'--metagene'",
",",
"help",
"=",
"help",
",",
"action",
"=",
"'store_true'",
",",
"dest",
"=",
"'keepExons'",
")",
"if",
"suppress",
"is",
"False",
":",
"help",
"=",
"'When a GTF file is used to provide regions, only \\\n entries with this value as their feature (column 3) \\\n will be processed as transcripts. (Default: %(default)s)'",
"group",
".",
"add_argument",
"(",
"'--transcriptID'",
",",
"help",
"=",
"help",
",",
"default",
"=",
"'transcript'",
")",
"if",
"suppress",
"is",
"False",
":",
"help",
"=",
"'When a GTF file is used to provide regions, only \\\n entries with this value as their feature (column 3) \\\n will be processed as exons. CDS would be another common \\\n value for this. (Default: %(default)s)'",
"group",
".",
"add_argument",
"(",
"'--exonID'",
",",
"help",
"=",
"help",
",",
"default",
"=",
"'exon'",
")",
"if",
"suppress",
"is",
"False",
":",
"help",
"=",
"'Each region has an ID (e.g., ACTB) assigned to it, \\\n which for BED files is either column 4 (if it exists) \\\n or the interval bounds. For GTF files this is instead \\\n stored in the last column as a key:value pair (e.g., as \\\n \\'transcript_id \"ACTB\"\\', for a key of transcript_id \\\n and a value of ACTB). In some cases it can be \\\n convenient to use a different identifier. To do so, set \\\n this to the desired key. (Default: %(default)s)'",
"group",
".",
"add_argument",
"(",
"'--transcript_id_designator'",
",",
"help",
"=",
"help",
",",
"default",
"=",
"'transcript_id'",
")",
"return",
"parser"
] |
https://github.com/deeptools/deepTools/blob/ac42d29c298c026aa0c53c9db2553087ebc86b97/deeptools/parserCommon.py#L140-L199
|
|
hakril/PythonForWindows
|
61e027a678d5b87aa64fcf8a37a6661a86236589
|
windows/winobject/registry.py
|
python
|
PyHKey.empty
|
(self)
|
[] |
def empty(self):
windows.winproxy.RegDeleteTreeW(self.phkey, None)
|
[
"def",
"empty",
"(",
"self",
")",
":",
"windows",
".",
"winproxy",
".",
"RegDeleteTreeW",
"(",
"self",
".",
"phkey",
",",
"None",
")"
] |
https://github.com/hakril/PythonForWindows/blob/61e027a678d5b87aa64fcf8a37a6661a86236589/windows/winobject/registry.py#L374-L375
|
||||
munificent/magpie
|
f5138e3d316ec1a664b5eadba1bcc8573d3faca3
|
dep/gyp/pylib/gyp/msvs_emulation.py
|
python
|
MsvsSettings.GetOutputName
|
(self, config, expand_special)
|
return output_file
|
Gets the explicitly overridden output name for a target or returns None
if it's not overridden.
|
Gets the explicitly overridden output name for a target or returns None
if it's not overridden.
|
[
"Gets",
"the",
"explicitly",
"overridden",
"output",
"name",
"for",
"a",
"target",
"or",
"returns",
"None",
"if",
"it",
"s",
"not",
"overridden",
"."
] |
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
|
[
"def",
"GetOutputName",
"(",
"self",
",",
"config",
",",
"expand_special",
")",
":",
"config",
"=",
"self",
".",
"_TargetConfig",
"(",
"config",
")",
"type",
"=",
"self",
".",
"spec",
"[",
"'type'",
"]",
"root",
"=",
"'VCLibrarianTool'",
"if",
"type",
"==",
"'static_library'",
"else",
"'VCLinkerTool'",
"# TODO(scottmg): Handle OutputDirectory without OutputFile.",
"output_file",
"=",
"self",
".",
"_Setting",
"(",
"(",
"root",
",",
"'OutputFile'",
")",
",",
"config",
")",
"if",
"output_file",
":",
"output_file",
"=",
"expand_special",
"(",
"self",
".",
"ConvertVSMacros",
"(",
"output_file",
",",
"config",
"=",
"config",
")",
")",
"return",
"output_file"
] |
https://github.com/munificent/magpie/blob/f5138e3d316ec1a664b5eadba1bcc8573d3faca3/dep/gyp/pylib/gyp/msvs_emulation.py#L282-L293
|
|
matrix-org/synapse
|
8e57584a5859a9002759963eb546d523d2498a01
|
synapse/handlers/presence.py
|
python
|
PresenceHandler._persist_and_notify
|
(self, states: List[UserPresenceState])
|
Persist states in the database, poke the notifier and send to
interested remote servers
|
Persist states in the database, poke the notifier and send to
interested remote servers
|
[
"Persist",
"states",
"in",
"the",
"database",
"poke",
"the",
"notifier",
"and",
"send",
"to",
"interested",
"remote",
"servers"
] |
async def _persist_and_notify(self, states: List[UserPresenceState]) -> None:
"""Persist states in the database, poke the notifier and send to
interested remote servers
"""
stream_id, max_token = await self.store.update_presence(states)
parties = await get_interested_parties(self.store, self.presence_router, states)
room_ids_to_states, users_to_states = parties
self.notifier.on_new_event(
"presence_key",
stream_id,
rooms=room_ids_to_states.keys(),
users=[UserID.from_string(u) for u in users_to_states],
)
# We only want to poke the local federation sender, if any, as other
# workers will receive the presence updates via the presence replication
# stream (which is updated by `store.update_presence`).
await self.maybe_send_presence_to_interested_destinations(states)
|
[
"async",
"def",
"_persist_and_notify",
"(",
"self",
",",
"states",
":",
"List",
"[",
"UserPresenceState",
"]",
")",
"->",
"None",
":",
"stream_id",
",",
"max_token",
"=",
"await",
"self",
".",
"store",
".",
"update_presence",
"(",
"states",
")",
"parties",
"=",
"await",
"get_interested_parties",
"(",
"self",
".",
"store",
",",
"self",
".",
"presence_router",
",",
"states",
")",
"room_ids_to_states",
",",
"users_to_states",
"=",
"parties",
"self",
".",
"notifier",
".",
"on_new_event",
"(",
"\"presence_key\"",
",",
"stream_id",
",",
"rooms",
"=",
"room_ids_to_states",
".",
"keys",
"(",
")",
",",
"users",
"=",
"[",
"UserID",
".",
"from_string",
"(",
"u",
")",
"for",
"u",
"in",
"users_to_states",
"]",
",",
")",
"# We only want to poke the local federation sender, if any, as other",
"# workers will receive the presence updates via the presence replication",
"# stream (which is updated by `store.update_presence`).",
"await",
"self",
".",
"maybe_send_presence_to_interested_destinations",
"(",
"states",
")"
] |
https://github.com/matrix-org/synapse/blob/8e57584a5859a9002759963eb546d523d2498a01/synapse/handlers/presence.py#L1095-L1114
|
||
pycontribs/jira
|
09ece94f3cae7e6d0becfa87d77d6a05ce01cdf6
|
jira/client.py
|
python
|
JIRA.project_components
|
(self, project: str)
|
return components
|
Get a list of component Resources present on a project.
Args:
project (str): ID or key of the project to get components from
Returns:
List[Component]
|
Get a list of component Resources present on a project.
|
[
"Get",
"a",
"list",
"of",
"component",
"Resources",
"present",
"on",
"a",
"project",
"."
] |
def project_components(self, project: str) -> List[Component]:
"""Get a list of component Resources present on a project.
Args:
project (str): ID or key of the project to get components from
Returns:
List[Component]
"""
r_json = self._get_json("project/" + project + "/components")
components = [
Component(self._options, self._session, raw_comp_json)
for raw_comp_json in r_json
]
return components
|
[
"def",
"project_components",
"(",
"self",
",",
"project",
":",
"str",
")",
"->",
"List",
"[",
"Component",
"]",
":",
"r_json",
"=",
"self",
".",
"_get_json",
"(",
"\"project/\"",
"+",
"project",
"+",
"\"/components\"",
")",
"components",
"=",
"[",
"Component",
"(",
"self",
".",
"_options",
",",
"self",
".",
"_session",
",",
"raw_comp_json",
")",
"for",
"raw_comp_json",
"in",
"r_json",
"]",
"return",
"components"
] |
https://github.com/pycontribs/jira/blob/09ece94f3cae7e6d0becfa87d77d6a05ce01cdf6/jira/client.py#L2678-L2692
|
|
NifTK/NiftyNet
|
935bf4334cd00fa9f9d50f6a95ddcbfdde4031e0
|
niftynet/contrib/csv_reader/multitask_classifseg_application.py
|
python
|
MultiClassifSegApplication.interpret_output
|
(self, batch_output)
|
return True
|
Specifies how the output should be decoded
:param batch_output:
:return:
|
Specifies how the output should be decoded
:param batch_output:
:return:
|
[
"Specifies",
"how",
"the",
"output",
"should",
"be",
"decoded",
":",
"param",
"batch_output",
":",
":",
"return",
":"
] |
def interpret_output(self, batch_output):
'''
Specifies how the output should be decoded
:param batch_output:
:return:
'''
if not self.is_training:
return self.output_decoder.decode_batch(
{'window_seg': batch_output['seg'],
'csv_class': batch_output['value']},
batch_output['location'])
return True
|
[
"def",
"interpret_output",
"(",
"self",
",",
"batch_output",
")",
":",
"if",
"not",
"self",
".",
"is_training",
":",
"return",
"self",
".",
"output_decoder",
".",
"decode_batch",
"(",
"{",
"'window_seg'",
":",
"batch_output",
"[",
"'seg'",
"]",
",",
"'csv_class'",
":",
"batch_output",
"[",
"'value'",
"]",
"}",
",",
"batch_output",
"[",
"'location'",
"]",
")",
"return",
"True"
] |
https://github.com/NifTK/NiftyNet/blob/935bf4334cd00fa9f9d50f6a95ddcbfdde4031e0/niftynet/contrib/csv_reader/multitask_classifseg_application.py#L519-L530
|
|
brianwrf/hackUtils
|
168123350d93b040fa0c437c9d59faf8fa65d8e6
|
hackUtils.py
|
python
|
php_str_noquotes
|
(data)
|
return encoded[:-1]
|
Convert string to chr(xx).chr(xx) for use in php
|
Convert string to chr(xx).chr(xx) for use in php
|
[
"Convert",
"string",
"to",
"chr",
"(",
"xx",
")",
".",
"chr",
"(",
"xx",
")",
"for",
"use",
"in",
"php"
] |
def php_str_noquotes(data):
"Convert string to chr(xx).chr(xx) for use in php"
encoded = ""
for char in data:
encoded += "chr({0}).".format(ord(char))
return encoded[:-1]
|
[
"def",
"php_str_noquotes",
"(",
"data",
")",
":",
"encoded",
"=",
"\"\"",
"for",
"char",
"in",
"data",
":",
"encoded",
"+=",
"\"chr({0}).\"",
".",
"format",
"(",
"ord",
"(",
"char",
")",
")",
"return",
"encoded",
"[",
":",
"-",
"1",
"]"
] |
https://github.com/brianwrf/hackUtils/blob/168123350d93b040fa0c437c9d59faf8fa65d8e6/hackUtils.py#L458-L463
|
|
git-cola/git-cola
|
b48b8028e0c3baf47faf7b074b9773737358163d
|
cola/cmds.py
|
python
|
ResetHard.confirm
|
(self)
|
return Interaction.confirm(title, question, info, ok_text)
|
[] |
def confirm(self):
title = N_('Restore Worktree and Reset All (Hard)')
question = N_('Restore Worktree and Reset All?')
info = self.tooltip(self.ref)
ok_text = N_('Reset and Restore')
return Interaction.confirm(title, question, info, ok_text)
|
[
"def",
"confirm",
"(",
"self",
")",
":",
"title",
"=",
"N_",
"(",
"'Restore Worktree and Reset All (Hard)'",
")",
"question",
"=",
"N_",
"(",
"'Restore Worktree and Reset All?'",
")",
"info",
"=",
"self",
".",
"tooltip",
"(",
"self",
".",
"ref",
")",
"ok_text",
"=",
"N_",
"(",
"'Reset and Restore'",
")",
"return",
"Interaction",
".",
"confirm",
"(",
"title",
",",
"question",
",",
"info",
",",
"ok_text",
")"
] |
https://github.com/git-cola/git-cola/blob/b48b8028e0c3baf47faf7b074b9773737358163d/cola/cmds.py#L574-L579
|
|||
beancount/beancount
|
cb3526a1af95b3b5be70347470c381b5a86055fe
|
beancount/core/account.py
|
python
|
sans_root
|
(account_name: Account)
|
return join(*components) if account_name else None
|
Get the name of the account without the root.
For example, an input of 'Assets:BofA:Checking' will produce 'BofA:Checking'.
Args:
account_name: A string, the name of the account whose leaf name to return.
Returns:
A string, the name of the non-root portion of this account name.
|
Get the name of the account without the root.
|
[
"Get",
"the",
"name",
"of",
"the",
"account",
"without",
"the",
"root",
"."
] |
def sans_root(account_name: Account)-> Account:
"""Get the name of the account without the root.
For example, an input of 'Assets:BofA:Checking' will produce 'BofA:Checking'.
Args:
account_name: A string, the name of the account whose leaf name to return.
Returns:
A string, the name of the non-root portion of this account name.
"""
assert isinstance(account_name, str)
components = account_name.split(sep)[1:]
return join(*components) if account_name else None
|
[
"def",
"sans_root",
"(",
"account_name",
":",
"Account",
")",
"->",
"Account",
":",
"assert",
"isinstance",
"(",
"account_name",
",",
"str",
")",
"components",
"=",
"account_name",
".",
"split",
"(",
"sep",
")",
"[",
"1",
":",
"]",
"return",
"join",
"(",
"*",
"components",
")",
"if",
"account_name",
"else",
"None"
] |
https://github.com/beancount/beancount/blob/cb3526a1af95b3b5be70347470c381b5a86055fe/beancount/core/account.py#L107-L119
|
|
dimagi/commcare-hq
|
d67ff1d3b4c51fa050c19e60c3253a79d3452a39
|
corehq/apps/accounting/invoicing.py
|
python
|
LineItemFactory._is_partial_invoice
|
(self)
|
return not (
self.invoice.date_end.day == self._days_in_billing_period
and self.invoice.date_start.day == 1
)
|
[] |
def _is_partial_invoice(self):
return not (
self.invoice.date_end.day == self._days_in_billing_period
and self.invoice.date_start.day == 1
)
|
[
"def",
"_is_partial_invoice",
"(",
"self",
")",
":",
"return",
"not",
"(",
"self",
".",
"invoice",
".",
"date_end",
".",
"day",
"==",
"self",
".",
"_days_in_billing_period",
"and",
"self",
".",
"invoice",
".",
"date_start",
".",
"day",
"==",
"1",
")"
] |
https://github.com/dimagi/commcare-hq/blob/d67ff1d3b4c51fa050c19e60c3253a79d3452a39/corehq/apps/accounting/invoicing.py#L579-L583
|
|||
wal-e/wal-e
|
6c43976e13c619ebdddd0d869301c42ed131e983
|
wal_e/copyfileobj.py
|
python
|
copyfileobj
|
(src, dst, length=None, exception=OSError, bufsize=None)
|
return
|
Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
|
Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
|
[
"Copy",
"length",
"bytes",
"from",
"fileobj",
"src",
"to",
"fileobj",
"dst",
".",
"If",
"length",
"is",
"None",
"copy",
"the",
"entire",
"content",
"."
] |
def copyfileobj(src, dst, length=None, exception=OSError, bufsize=None):
"""Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
"""
if bufsize is None:
bufsize = pipebuf.PIPE_BUF_BYTES
if length == 0:
return
if length is None:
shutil.copyfileobj(src, dst, bufsize)
return
blocks, remainder = divmod(length, bufsize)
for b in range(blocks):
buf = src.read(bufsize)
if len(buf) < bufsize:
raise exception("unexpected end of data")
dst.write(buf)
if remainder != 0:
buf = src.read(remainder)
if len(buf) < remainder:
raise exception("unexpected end of data")
dst.write(buf)
return
|
[
"def",
"copyfileobj",
"(",
"src",
",",
"dst",
",",
"length",
"=",
"None",
",",
"exception",
"=",
"OSError",
",",
"bufsize",
"=",
"None",
")",
":",
"if",
"bufsize",
"is",
"None",
":",
"bufsize",
"=",
"pipebuf",
".",
"PIPE_BUF_BYTES",
"if",
"length",
"==",
"0",
":",
"return",
"if",
"length",
"is",
"None",
":",
"shutil",
".",
"copyfileobj",
"(",
"src",
",",
"dst",
",",
"bufsize",
")",
"return",
"blocks",
",",
"remainder",
"=",
"divmod",
"(",
"length",
",",
"bufsize",
")",
"for",
"b",
"in",
"range",
"(",
"blocks",
")",
":",
"buf",
"=",
"src",
".",
"read",
"(",
"bufsize",
")",
"if",
"len",
"(",
"buf",
")",
"<",
"bufsize",
":",
"raise",
"exception",
"(",
"\"unexpected end of data\"",
")",
"dst",
".",
"write",
"(",
"buf",
")",
"if",
"remainder",
"!=",
"0",
":",
"buf",
"=",
"src",
".",
"read",
"(",
"remainder",
")",
"if",
"len",
"(",
"buf",
")",
"<",
"remainder",
":",
"raise",
"exception",
"(",
"\"unexpected end of data\"",
")",
"dst",
".",
"write",
"(",
"buf",
")",
"return"
] |
https://github.com/wal-e/wal-e/blob/6c43976e13c619ebdddd0d869301c42ed131e983/wal_e/copyfileobj.py#L6-L31
|
|
oracle/oci-python-sdk
|
3c1604e4e212008fb6718e2f68cdb5ef71fd5793
|
src/oci/regions.py
|
python
|
is_region_short_name
|
(region)
|
return False
|
[] |
def is_region_short_name(region):
region = region.lower()
if region in REGIONS_SHORT_NAMES:
return True
if region in REGIONS:
return False
if _check_and_add_region_metadata(region):
# Above call will return true if the requested region is now known, after considering additional sources
# Check is needed if region short code was passed
if region in REGIONS_SHORT_NAMES:
return True
return False
|
[
"def",
"is_region_short_name",
"(",
"region",
")",
":",
"region",
"=",
"region",
".",
"lower",
"(",
")",
"if",
"region",
"in",
"REGIONS_SHORT_NAMES",
":",
"return",
"True",
"if",
"region",
"in",
"REGIONS",
":",
"return",
"False",
"if",
"_check_and_add_region_metadata",
"(",
"region",
")",
":",
"# Above call will return true if the requested region is now known, after considering additional sources",
"# Check is needed if region short code was passed",
"if",
"region",
"in",
"REGIONS_SHORT_NAMES",
":",
"return",
"True",
"return",
"False"
] |
https://github.com/oracle/oci-python-sdk/blob/3c1604e4e212008fb6718e2f68cdb5ef71fd5793/src/oci/regions.py#L81-L95
|
|||
CheckPointSW/Karta
|
b845928487b50a5b41acd532ae0399177a4356aa
|
src/thumbs_up/analyzers/analyzer.py
|
python
|
Analyzer.delCodePtr
|
(self, src, dest)
|
Delete a code pointer (probably was found to be a False Positive).
Args:
src (int) effective address for the pointer's location
dest (int): effective address for the (assumed) pointed code address
|
Delete a code pointer (probably was found to be a False Positive).
|
[
"Delete",
"a",
"code",
"pointer",
"(",
"probably",
"was",
"found",
"to",
"be",
"a",
"False",
"Positive",
")",
"."
] |
def delCodePtr(self, src, dest):
"""Delete a code pointer (probably was found to be a False Positive).
Args:
src (int) effective address for the pointer's location
dest (int): effective address for the (assumed) pointed code address
"""
idc.del_dref(src, dest)
idc.del_cref(src, dest, 0)
ida_bytes.del_items(src, 0, self.addressSize())
|
[
"def",
"delCodePtr",
"(",
"self",
",",
"src",
",",
"dest",
")",
":",
"idc",
".",
"del_dref",
"(",
"src",
",",
"dest",
")",
"idc",
".",
"del_cref",
"(",
"src",
",",
"dest",
",",
"0",
")",
"ida_bytes",
".",
"del_items",
"(",
"src",
",",
"0",
",",
"self",
".",
"addressSize",
"(",
")",
")"
] |
https://github.com/CheckPointSW/Karta/blob/b845928487b50a5b41acd532ae0399177a4356aa/src/thumbs_up/analyzers/analyzer.py#L208-L217
|
||
haiwen/seahub
|
e92fcd44e3e46260597d8faa9347cb8222b8b10d
|
seahub/share/models.py
|
python
|
PrivateFileDirShareManager.delete_private_file_dir_share
|
(self, from_user, to_user, repo_id, path)
|
[] |
def delete_private_file_dir_share(self, from_user, to_user, repo_id, path):
"""
"""
super(PrivateFileDirShareManager, self).filter(
from_user=from_user, to_user=to_user, repo_id=repo_id,
path=path).delete()
|
[
"def",
"delete_private_file_dir_share",
"(",
"self",
",",
"from_user",
",",
"to_user",
",",
"repo_id",
",",
"path",
")",
":",
"super",
"(",
"PrivateFileDirShareManager",
",",
"self",
")",
".",
"filter",
"(",
"from_user",
"=",
"from_user",
",",
"to_user",
"=",
"to_user",
",",
"repo_id",
"=",
"repo_id",
",",
"path",
"=",
"path",
")",
".",
"delete",
"(",
")"
] |
https://github.com/haiwen/seahub/blob/e92fcd44e3e46260597d8faa9347cb8222b8b10d/seahub/share/models.py#L573-L578
|
||||
j4mie/micromodels
|
43db93afa3f7e067df13db41fa861fe0682e79c4
|
micromodels/models.py
|
python
|
Model.from_dict
|
(cls, D, is_json=False)
|
return instance
|
This factory for :class:`Model`
takes either a native Python dictionary or a JSON dictionary/object
if ``is_json`` is ``True``. The dictionary passed does not need to
contain all of the values that the Model declares.
|
This factory for :class:`Model`
takes either a native Python dictionary or a JSON dictionary/object
if ``is_json`` is ``True``. The dictionary passed does not need to
contain all of the values that the Model declares.
|
[
"This",
"factory",
"for",
":",
"class",
":",
"Model",
"takes",
"either",
"a",
"native",
"Python",
"dictionary",
"or",
"a",
"JSON",
"dictionary",
"/",
"object",
"if",
"is_json",
"is",
"True",
".",
"The",
"dictionary",
"passed",
"does",
"not",
"need",
"to",
"contain",
"all",
"of",
"the",
"values",
"that",
"the",
"Model",
"declares",
"."
] |
def from_dict(cls, D, is_json=False):
'''This factory for :class:`Model`
takes either a native Python dictionary or a JSON dictionary/object
if ``is_json`` is ``True``. The dictionary passed does not need to
contain all of the values that the Model declares.
'''
instance = cls()
instance.set_data(D, is_json=is_json)
return instance
|
[
"def",
"from_dict",
"(",
"cls",
",",
"D",
",",
"is_json",
"=",
"False",
")",
":",
"instance",
"=",
"cls",
"(",
")",
"instance",
".",
"set_data",
"(",
"D",
",",
"is_json",
"=",
"is_json",
")",
"return",
"instance"
] |
https://github.com/j4mie/micromodels/blob/43db93afa3f7e067df13db41fa861fe0682e79c4/micromodels/models.py#L65-L74
|
|
JimmXinu/FanFicFare
|
bc149a2deb2636320fe50a3e374af6eef8f61889
|
fanficfare/adapters/adapter_fanfictalkcom.py
|
python
|
FanfictalkComAdapter.getSiteURLPattern
|
(self)
|
return r"https?://(archive\.hp)?"+re.escape(self.getSiteDomain())+r"(/archive)?/viewstory\.php\?sid=\d+$"
|
[] |
def getSiteURLPattern(self):
return r"https?://(archive\.hp)?"+re.escape(self.getSiteDomain())+r"(/archive)?/viewstory\.php\?sid=\d+$"
|
[
"def",
"getSiteURLPattern",
"(",
"self",
")",
":",
"return",
"r\"https?://(archive\\.hp)?\"",
"+",
"re",
".",
"escape",
"(",
"self",
".",
"getSiteDomain",
"(",
")",
")",
"+",
"r\"(/archive)?/viewstory\\.php\\?sid=\\d+$\""
] |
https://github.com/JimmXinu/FanFicFare/blob/bc149a2deb2636320fe50a3e374af6eef8f61889/fanficfare/adapters/adapter_fanfictalkcom.py#L76-L77
|
|||
PyCQA/pylint
|
3fc855f9d0fa8e6410be5a23cf954ffd5471b4eb
|
pylint/checkers/typecheck.py
|
python
|
TypeChecker._check_uninferable_call
|
(self, node)
|
Check that the given uninferable Call node does not
call an actual function.
|
Check that the given uninferable Call node does not
call an actual function.
|
[
"Check",
"that",
"the",
"given",
"uninferable",
"Call",
"node",
"does",
"not",
"call",
"an",
"actual",
"function",
"."
] |
def _check_uninferable_call(self, node):
"""Check that the given uninferable Call node does not
call an actual function.
"""
if not isinstance(node.func, nodes.Attribute):
return
# Look for properties. First, obtain
# the lhs of the Attribute node and search the attribute
# there. If that attribute is a property or a subclass of properties,
# then most likely it's not callable.
expr = node.func.expr
klass = safe_infer(expr)
if (
klass is None
or klass is astroid.Uninferable
or not isinstance(klass, astroid.Instance)
):
return
try:
attrs = klass._proxied.getattr(node.func.attrname)
except astroid.NotFoundError:
return
for attr in attrs:
if attr is astroid.Uninferable:
continue
if not isinstance(attr, nodes.FunctionDef):
continue
# Decorated, see if it is decorated with a property.
# Also, check the returns and see if they are callable.
if decorated_with_property(attr):
try:
all_returns_are_callable = all(
return_node.callable() or return_node is astroid.Uninferable
for return_node in attr.infer_call_result(node)
)
except astroid.InferenceError:
continue
if not all_returns_are_callable:
self.add_message(
"not-callable", node=node, args=node.func.as_string()
)
break
|
[
"def",
"_check_uninferable_call",
"(",
"self",
",",
"node",
")",
":",
"if",
"not",
"isinstance",
"(",
"node",
".",
"func",
",",
"nodes",
".",
"Attribute",
")",
":",
"return",
"# Look for properties. First, obtain",
"# the lhs of the Attribute node and search the attribute",
"# there. If that attribute is a property or a subclass of properties,",
"# then most likely it's not callable.",
"expr",
"=",
"node",
".",
"func",
".",
"expr",
"klass",
"=",
"safe_infer",
"(",
"expr",
")",
"if",
"(",
"klass",
"is",
"None",
"or",
"klass",
"is",
"astroid",
".",
"Uninferable",
"or",
"not",
"isinstance",
"(",
"klass",
",",
"astroid",
".",
"Instance",
")",
")",
":",
"return",
"try",
":",
"attrs",
"=",
"klass",
".",
"_proxied",
".",
"getattr",
"(",
"node",
".",
"func",
".",
"attrname",
")",
"except",
"astroid",
".",
"NotFoundError",
":",
"return",
"for",
"attr",
"in",
"attrs",
":",
"if",
"attr",
"is",
"astroid",
".",
"Uninferable",
":",
"continue",
"if",
"not",
"isinstance",
"(",
"attr",
",",
"nodes",
".",
"FunctionDef",
")",
":",
"continue",
"# Decorated, see if it is decorated with a property.",
"# Also, check the returns and see if they are callable.",
"if",
"decorated_with_property",
"(",
"attr",
")",
":",
"try",
":",
"all_returns_are_callable",
"=",
"all",
"(",
"return_node",
".",
"callable",
"(",
")",
"or",
"return_node",
"is",
"astroid",
".",
"Uninferable",
"for",
"return_node",
"in",
"attr",
".",
"infer_call_result",
"(",
"node",
")",
")",
"except",
"astroid",
".",
"InferenceError",
":",
"continue",
"if",
"not",
"all_returns_are_callable",
":",
"self",
".",
"add_message",
"(",
"\"not-callable\"",
",",
"node",
"=",
"node",
",",
"args",
"=",
"node",
".",
"func",
".",
"as_string",
"(",
")",
")",
"break"
] |
https://github.com/PyCQA/pylint/blob/3fc855f9d0fa8e6410be5a23cf954ffd5471b4eb/pylint/checkers/typecheck.py#L1213-L1261
|
||
n1nj4sec/pupy
|
a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39
|
pupy/pupylib/PupyOffload.py
|
python
|
PupyOffloadSocket.getpeername
|
(self)
|
return self._raddr
|
[] |
def getpeername(self):
return self._raddr
|
[
"def",
"getpeername",
"(",
"self",
")",
":",
"return",
"self",
".",
"_raddr"
] |
https://github.com/n1nj4sec/pupy/blob/a5d766ea81fdfe3bc2c38c9bdaf10e9b75af3b39/pupy/pupylib/PupyOffload.py#L151-L152
|
|||
yangxue0827/FPN_Tensorflow
|
c72110d2803455e6e55020f69144d9490a3d39ad
|
libs/networks/slim_nets/resnet_v2.py
|
python
|
resnet_v2_101
|
(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
spatial_squeeze=False,
reuse=None,
scope='resnet_v2_101')
|
return resnet_v2(inputs, blocks, num_classes, is_training=is_training,
global_pool=global_pool, output_stride=output_stride,
include_root_block=True, spatial_squeeze=spatial_squeeze,
reuse=reuse, scope=scope)
|
ResNet-101 model of [1]. See resnet_v2() for arg and return description.
|
ResNet-101 model of [1]. See resnet_v2() for arg and return description.
|
[
"ResNet",
"-",
"101",
"model",
"of",
"[",
"1",
"]",
".",
"See",
"resnet_v2",
"()",
"for",
"arg",
"and",
"return",
"description",
"."
] |
def resnet_v2_101(inputs,
num_classes=None,
is_training=True,
global_pool=True,
output_stride=None,
spatial_squeeze=False,
reuse=None,
scope='resnet_v2_101'):
"""ResNet-101 model of [1]. See resnet_v2() for arg and return description."""
blocks = [
resnet_v2_block('block1', base_depth=64, num_units=3, stride=2),
resnet_v2_block('block2', base_depth=128, num_units=4, stride=2),
resnet_v2_block('block3', base_depth=256, num_units=23, stride=2),
resnet_v2_block('block4', base_depth=512, num_units=3, stride=1),
]
return resnet_v2(inputs, blocks, num_classes, is_training=is_training,
global_pool=global_pool, output_stride=output_stride,
include_root_block=True, spatial_squeeze=spatial_squeeze,
reuse=reuse, scope=scope)
|
[
"def",
"resnet_v2_101",
"(",
"inputs",
",",
"num_classes",
"=",
"None",
",",
"is_training",
"=",
"True",
",",
"global_pool",
"=",
"True",
",",
"output_stride",
"=",
"None",
",",
"spatial_squeeze",
"=",
"False",
",",
"reuse",
"=",
"None",
",",
"scope",
"=",
"'resnet_v2_101'",
")",
":",
"blocks",
"=",
"[",
"resnet_v2_block",
"(",
"'block1'",
",",
"base_depth",
"=",
"64",
",",
"num_units",
"=",
"3",
",",
"stride",
"=",
"2",
")",
",",
"resnet_v2_block",
"(",
"'block2'",
",",
"base_depth",
"=",
"128",
",",
"num_units",
"=",
"4",
",",
"stride",
"=",
"2",
")",
",",
"resnet_v2_block",
"(",
"'block3'",
",",
"base_depth",
"=",
"256",
",",
"num_units",
"=",
"23",
",",
"stride",
"=",
"2",
")",
",",
"resnet_v2_block",
"(",
"'block4'",
",",
"base_depth",
"=",
"512",
",",
"num_units",
"=",
"3",
",",
"stride",
"=",
"1",
")",
",",
"]",
"return",
"resnet_v2",
"(",
"inputs",
",",
"blocks",
",",
"num_classes",
",",
"is_training",
"=",
"is_training",
",",
"global_pool",
"=",
"global_pool",
",",
"output_stride",
"=",
"output_stride",
",",
"include_root_block",
"=",
"True",
",",
"spatial_squeeze",
"=",
"spatial_squeeze",
",",
"reuse",
"=",
"reuse",
",",
"scope",
"=",
"scope",
")"
] |
https://github.com/yangxue0827/FPN_Tensorflow/blob/c72110d2803455e6e55020f69144d9490a3d39ad/libs/networks/slim_nets/resnet_v2.py#L270-L288
|
|
peter-u-diehl/stdp-mnist
|
d527ca3ee579d4f156d25ff160c0551a5ab82cf1
|
Diehl&Cook_spiking_MNIST.py
|
python
|
update_performance_plot
|
(im, performance, current_example_num, fig)
|
return im, performance
|
[] |
def update_performance_plot(im, performance, current_example_num, fig):
performance = get_current_performance(performance, current_example_num)
im.set_ydata(performance)
fig.canvas.draw()
return im, performance
|
[
"def",
"update_performance_plot",
"(",
"im",
",",
"performance",
",",
"current_example_num",
",",
"fig",
")",
":",
"performance",
"=",
"get_current_performance",
"(",
"performance",
",",
"current_example_num",
")",
"im",
".",
"set_ydata",
"(",
"performance",
")",
"fig",
".",
"canvas",
".",
"draw",
"(",
")",
"return",
"im",
",",
"performance"
] |
https://github.com/peter-u-diehl/stdp-mnist/blob/d527ca3ee579d4f156d25ff160c0551a5ab82cf1/Diehl&Cook_spiking_MNIST.py#L163-L167
|
|||
w3h/isf
|
6faf0a3df185465ec17369c90ccc16e2a03a1870
|
lib/thirdparty/scapy/utils.py
|
python
|
RawPcapReader.next
|
(self)
|
return pkt
|
implement the iterator protocol on a set of packets in a pcap file
|
implement the iterator protocol on a set of packets in a pcap file
|
[
"implement",
"the",
"iterator",
"protocol",
"on",
"a",
"set",
"of",
"packets",
"in",
"a",
"pcap",
"file"
] |
def next(self):
"""implement the iterator protocol on a set of packets in a pcap file"""
pkt = self.read_packet()
if pkt == None:
raise StopIteration
return pkt
|
[
"def",
"next",
"(",
"self",
")",
":",
"pkt",
"=",
"self",
".",
"read_packet",
"(",
")",
"if",
"pkt",
"==",
"None",
":",
"raise",
"StopIteration",
"return",
"pkt"
] |
https://github.com/w3h/isf/blob/6faf0a3df185465ec17369c90ccc16e2a03a1870/lib/thirdparty/scapy/utils.py#L645-L650
|
|
dpp/simply_lift
|
cf49f7dcce81c7f1557314dd0f0bb08aaedc73da
|
elyxer.py
|
python
|
ListItem.process
|
(self)
|
Set the correct type and contents.
|
Set the correct type and contents.
|
[
"Set",
"the",
"correct",
"type",
"and",
"contents",
"."
] |
def process(self):
"Set the correct type and contents."
self.type = self.header[1]
tag = TaggedText().complete(self.contents, 'li', True)
self.contents = [tag]
|
[
"def",
"process",
"(",
"self",
")",
":",
"self",
".",
"type",
"=",
"self",
".",
"header",
"[",
"1",
"]",
"tag",
"=",
"TaggedText",
"(",
")",
".",
"complete",
"(",
"self",
".",
"contents",
",",
"'li'",
",",
"True",
")",
"self",
".",
"contents",
"=",
"[",
"tag",
"]"
] |
https://github.com/dpp/simply_lift/blob/cf49f7dcce81c7f1557314dd0f0bb08aaedc73da/elyxer.py#L6553-L6557
|
||
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
Misc/diff_match_patch/diff_match_patch.py
|
python
|
diff_match_patch.diff_commonSuffix
|
(self, text1, text2)
|
return pointermid
|
Determine the common suffix of two strings.
Args:
text1: First string.
text2: Second string.
Returns:
The number of characters common to the end of each string.
|
Determine the common suffix of two strings.
|
[
"Determine",
"the",
"common",
"suffix",
"of",
"two",
"strings",
"."
] |
def diff_commonSuffix(self, text1, text2):
"""Determine the common suffix of two strings.
Args:
text1: First string.
text2: Second string.
Returns:
The number of characters common to the end of each string.
"""
# Quick check for common null cases.
if not text1 or not text2 or text1[-1] != text2[-1]:
return 0
# Binary search.
# Performance analysis: http://neil.fraser.name/news/2007/10/09/
pointermin = 0
pointermax = min(len(text1), len(text2))
pointermid = pointermax
pointerend = 0
while pointermin < pointermid:
if (text1[-pointermid:len(text1) - pointerend] ==
text2[-pointermid:len(text2) - pointerend]):
pointermin = pointermid
pointerend = pointermin
else:
pointermax = pointermid
pointermid = (pointermax - pointermin) // 2 + pointermin
return pointermid
|
[
"def",
"diff_commonSuffix",
"(",
"self",
",",
"text1",
",",
"text2",
")",
":",
"# Quick check for common null cases.",
"if",
"not",
"text1",
"or",
"not",
"text2",
"or",
"text1",
"[",
"-",
"1",
"]",
"!=",
"text2",
"[",
"-",
"1",
"]",
":",
"return",
"0",
"# Binary search.",
"# Performance analysis: http://neil.fraser.name/news/2007/10/09/",
"pointermin",
"=",
"0",
"pointermax",
"=",
"min",
"(",
"len",
"(",
"text1",
")",
",",
"len",
"(",
"text2",
")",
")",
"pointermid",
"=",
"pointermax",
"pointerend",
"=",
"0",
"while",
"pointermin",
"<",
"pointermid",
":",
"if",
"(",
"text1",
"[",
"-",
"pointermid",
":",
"len",
"(",
"text1",
")",
"-",
"pointerend",
"]",
"==",
"text2",
"[",
"-",
"pointermid",
":",
"len",
"(",
"text2",
")",
"-",
"pointerend",
"]",
")",
":",
"pointermin",
"=",
"pointermid",
"pointerend",
"=",
"pointermin",
"else",
":",
"pointermax",
"=",
"pointermid",
"pointermid",
"=",
"(",
"pointermax",
"-",
"pointermin",
")",
"//",
"2",
"+",
"pointermin",
"return",
"pointermid"
] |
https://github.com/fake-name/ReadableWebProxy/blob/ed5c7abe38706acc2684a1e6cd80242a03c5f010/Misc/diff_match_patch/diff_match_patch.py#L480-L507
|
|
codesociety/friartuck
|
450adae920ac64a4d3bca5258512295d3eaecea5
|
my_algo.py
|
python
|
order_for_robinhood
|
(context, security, weight, order_type=None)
|
This is a custom order method for this particular algorithm and
places orders based on:
(1) How much of each position in context.assets we currently hold
(2) How much cash we currently hold
This means that if you have existing positions (e.g. AAPL),
your positions in that security will not be taken into
account when calculating order amounts.
The portfolio value that we'll be ordering on is labeled
`valid_portfolio_value`.
If you'd like to use a Stop/Limit/Stop-Limit Order please follow the
following format:
STOP - order_type = OrderType(stop_price=y)
LIMIT - order_type = OrderType(limit_price=x)
STOPLIMIT - order_type = OrderType(limit_price=x, stop_price=y)
|
This is a custom order method for this particular algorithm and
places orders based on:
(1) How much of each position in context.assets we currently hold
(2) How much cash we currently hold
|
[
"This",
"is",
"a",
"custom",
"order",
"method",
"for",
"this",
"particular",
"algorithm",
"and",
"places",
"orders",
"based",
"on",
":",
"(",
"1",
")",
"How",
"much",
"of",
"each",
"position",
"in",
"context",
".",
"assets",
"we",
"currently",
"hold",
"(",
"2",
")",
"How",
"much",
"cash",
"we",
"currently",
"hold"
] |
def order_for_robinhood(context, security, weight, order_type=None):
"""
This is a custom order method for this particular algorithm and
places orders based on:
(1) How much of each position in context.assets we currently hold
(2) How much cash we currently hold
This means that if you have existing positions (e.g. AAPL),
your positions in that security will not be taken into
account when calculating order amounts.
The portfolio value that we'll be ordering on is labeled
`valid_portfolio_value`.
If you'd like to use a Stop/Limit/Stop-Limit Order please follow the
following format:
STOP - order_type = OrderType(stop_price=y)
LIMIT - order_type = OrderType(limit_price=x)
STOPLIMIT - order_type = OrderType(limit_price=x, stop_price=y)
"""
# We use .95 as the cash because all market orders are converted into
# limit orders with a 5% buffer. So any market order placed through
# Robinhood is submitted as a limit order with (last_traded_price * 1.05)
valid_portfolio_value = context.portfolio.cash * .95
# Calculate the percent of each security that we want to hold
percent_to_order = weight - get_percent_held(context, security, valid_portfolio_value)
# If within 1% of target weight, ignore.
if abs(percent_to_order) < .01:
log.info("Can't Make Order - Percent (%s) to order is less than 0.01 " % percent_to_order)
return
# Calculate the dollar value to order for this security
value_to_order = percent_to_order * valid_portfolio_value
if order_type:
return order_value(security, value_to_order, order_type=order_type, time_in_force='gtc')
else:
return order_value(security, value_to_order, time_in_force='gtc')
|
[
"def",
"order_for_robinhood",
"(",
"context",
",",
"security",
",",
"weight",
",",
"order_type",
"=",
"None",
")",
":",
"# We use .95 as the cash because all market orders are converted into",
"# limit orders with a 5% buffer. So any market order placed through",
"# Robinhood is submitted as a limit order with (last_traded_price * 1.05)",
"valid_portfolio_value",
"=",
"context",
".",
"portfolio",
".",
"cash",
"*",
".95",
"# Calculate the percent of each security that we want to hold",
"percent_to_order",
"=",
"weight",
"-",
"get_percent_held",
"(",
"context",
",",
"security",
",",
"valid_portfolio_value",
")",
"# If within 1% of target weight, ignore.",
"if",
"abs",
"(",
"percent_to_order",
")",
"<",
".01",
":",
"log",
".",
"info",
"(",
"\"Can't Make Order - Percent (%s) to order is less than 0.01 \"",
"%",
"percent_to_order",
")",
"return",
"# Calculate the dollar value to order for this security",
"value_to_order",
"=",
"percent_to_order",
"*",
"valid_portfolio_value",
"if",
"order_type",
":",
"return",
"order_value",
"(",
"security",
",",
"value_to_order",
",",
"order_type",
"=",
"order_type",
",",
"time_in_force",
"=",
"'gtc'",
")",
"else",
":",
"return",
"order_value",
"(",
"security",
",",
"value_to_order",
",",
"time_in_force",
"=",
"'gtc'",
")"
] |
https://github.com/codesociety/friartuck/blob/450adae920ac64a4d3bca5258512295d3eaecea5/my_algo.py#L138-L176
|
||
nadineproject/nadine
|
c41c8ef7ffe18f1853029c97eecc329039b4af6c
|
nadine/utils/payment_api.py
|
python
|
PaymentAPI.get_transactions
|
(self, year, month, day)
|
return clean_transactions
|
[] |
def get_transactions(self, year, month, day):
raw_transactions = self.entry_point.getTransactions(year, month, day)
clean_transactions = clean_transaction_list(raw_transactions)
return clean_transactions
|
[
"def",
"get_transactions",
"(",
"self",
",",
"year",
",",
"month",
",",
"day",
")",
":",
"raw_transactions",
"=",
"self",
".",
"entry_point",
".",
"getTransactions",
"(",
"year",
",",
"month",
",",
"day",
")",
"clean_transactions",
"=",
"clean_transaction_list",
"(",
"raw_transactions",
")",
"return",
"clean_transactions"
] |
https://github.com/nadineproject/nadine/blob/c41c8ef7ffe18f1853029c97eecc329039b4af6c/nadine/utils/payment_api.py#L42-L45
|
|||
usnistgov/fipy
|
6809b180b41a11de988a48655575df7e142c93b9
|
fipy/tools/dimensions/physicalField.py
|
python
|
PhysicalField.arccos
|
(self)
|
return PhysicalField(value=umath.arccos(self.inDimensionless()), unit = "rad")
|
Return the inverse cosine of the `PhysicalField` in radians
>>> print(PhysicalField(0).arccos().allclose("1.57079632679 rad"))
1
The input `PhysicalField` must be dimensionless
>>> print(numerix.round_(PhysicalField("1 m").arccos(), 6))
Traceback (most recent call last):
...
TypeError: Incompatible units
|
Return the inverse cosine of the `PhysicalField` in radians
|
[
"Return",
"the",
"inverse",
"cosine",
"of",
"the",
"PhysicalField",
"in",
"radians"
] |
def arccos(self):
"""
Return the inverse cosine of the `PhysicalField` in radians
>>> print(PhysicalField(0).arccos().allclose("1.57079632679 rad"))
1
The input `PhysicalField` must be dimensionless
>>> print(numerix.round_(PhysicalField("1 m").arccos(), 6))
Traceback (most recent call last):
...
TypeError: Incompatible units
"""
return PhysicalField(value=umath.arccos(self.inDimensionless()), unit = "rad")
|
[
"def",
"arccos",
"(",
"self",
")",
":",
"return",
"PhysicalField",
"(",
"value",
"=",
"umath",
".",
"arccos",
"(",
"self",
".",
"inDimensionless",
"(",
")",
")",
",",
"unit",
"=",
"\"rad\"",
")"
] |
https://github.com/usnistgov/fipy/blob/6809b180b41a11de988a48655575df7e142c93b9/fipy/tools/dimensions/physicalField.py#L965-L979
|
|
google-research/tensorflow_constrained_optimization
|
723d63f8567aaa988c4ce4761152beee2b462e1d
|
tensorflow_constrained_optimization/python/train/constrained_optimizer.py
|
python
|
ConstrainedOptimizerV2.num_constraints
|
(self, num_constraints)
|
Explicitly sets the number of constraints.
This function plays the same role as the (optional) num_constraints
constructor argument. Once the number of constraints has been set, the
internal state (e.g. the Lagrange multipliers) are fixed, and subsequent
calls to this method will fail if the number of constraints has changed.
If the num_constraints argument was not provided to the constructor, then
this method *must* be called before optimization can be performed.
Args:
num_constraints: int, the number of constraints in the
`ConstrainedMinimizationProblem` that will eventually be minimized.
Raises:
RuntimeError: if the internal state has already been created.
ValueError: if the number of constraints differs from its previous value.
|
Explicitly sets the number of constraints.
|
[
"Explicitly",
"sets",
"the",
"number",
"of",
"constraints",
"."
] |
def num_constraints(self, num_constraints):
"""Explicitly sets the number of constraints.
This function plays the same role as the (optional) num_constraints
constructor argument. Once the number of constraints has been set, the
internal state (e.g. the Lagrange multipliers) are fixed, and subsequent
calls to this method will fail if the number of constraints has changed.
If the num_constraints argument was not provided to the constructor, then
this method *must* be called before optimization can be performed.
Args:
num_constraints: int, the number of constraints in the
`ConstrainedMinimizationProblem` that will eventually be minimized.
Raises:
RuntimeError: if the internal state has already been created.
ValueError: if the number of constraints differs from its previous value.
"""
# Since get_loss_fn() can infer the number of constraints from a
# ConstrainedMinimizationProblem, it's possible that the state might have
# been created, even while self._num_constraints is None.
if self._formulation.is_state_created:
raise RuntimeError("num_constraints cannot be set after the internal "
"state has been created (by e.g. the variables or "
"minimize methods)")
if (self._num_constraints
is not None) and (num_constraints != self._num_constraints):
raise ValueError("num_constraints cannot be changed once it has been set")
self._num_constraints = num_constraints
|
[
"def",
"num_constraints",
"(",
"self",
",",
"num_constraints",
")",
":",
"# Since get_loss_fn() can infer the number of constraints from a",
"# ConstrainedMinimizationProblem, it's possible that the state might have",
"# been created, even while self._num_constraints is None.",
"if",
"self",
".",
"_formulation",
".",
"is_state_created",
":",
"raise",
"RuntimeError",
"(",
"\"num_constraints cannot be set after the internal \"",
"\"state has been created (by e.g. the variables or \"",
"\"minimize methods)\"",
")",
"if",
"(",
"self",
".",
"_num_constraints",
"is",
"not",
"None",
")",
"and",
"(",
"num_constraints",
"!=",
"self",
".",
"_num_constraints",
")",
":",
"raise",
"ValueError",
"(",
"\"num_constraints cannot be changed once it has been set\"",
")",
"self",
".",
"_num_constraints",
"=",
"num_constraints"
] |
https://github.com/google-research/tensorflow_constrained_optimization/blob/723d63f8567aaa988c4ce4761152beee2b462e1d/tensorflow_constrained_optimization/python/train/constrained_optimizer.py#L587-L617
|
||
robinhood/faust
|
01b4c0ad8390221db71751d80001b0fd879291e2
|
faust/sensors/prometheus.py
|
python
|
PrometheusMonitor.on_commit_completed
|
(self, consumer: ConsumerT,
state: typing.Any)
|
Call when consumer commit offset operation completed.
|
Call when consumer commit offset operation completed.
|
[
"Call",
"when",
"consumer",
"commit",
"offset",
"operation",
"completed",
"."
] |
def on_commit_completed(self, consumer: ConsumerT,
state: typing.Any) -> None:
"""Call when consumer commit offset operation completed."""
super().on_commit_completed(consumer, state)
self.consumer_commit_latency.observe(
self.ms_since(typing.cast(float, state)))
|
[
"def",
"on_commit_completed",
"(",
"self",
",",
"consumer",
":",
"ConsumerT",
",",
"state",
":",
"typing",
".",
"Any",
")",
"->",
"None",
":",
"super",
"(",
")",
".",
"on_commit_completed",
"(",
"consumer",
",",
"state",
")",
"self",
".",
"consumer_commit_latency",
".",
"observe",
"(",
"self",
".",
"ms_since",
"(",
"typing",
".",
"cast",
"(",
"float",
",",
"state",
")",
")",
")"
] |
https://github.com/robinhood/faust/blob/01b4c0ad8390221db71751d80001b0fd879291e2/faust/sensors/prometheus.py#L227-L232
|
||
larryhastings/gilectomy
|
4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a
|
Lib/tkinter/__init__.py
|
python
|
Misc._bind
|
(self, what, sequence, func, add, needcleanup=1)
|
Internal function.
|
Internal function.
|
[
"Internal",
"function",
"."
] |
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if isinstance(func, str):
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
|
[
"def",
"_bind",
"(",
"self",
",",
"what",
",",
"sequence",
",",
"func",
",",
"add",
",",
"needcleanup",
"=",
"1",
")",
":",
"if",
"isinstance",
"(",
"func",
",",
"str",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"what",
"+",
"(",
"sequence",
",",
"func",
")",
")",
"elif",
"func",
":",
"funcid",
"=",
"self",
".",
"_register",
"(",
"func",
",",
"self",
".",
"_substitute",
",",
"needcleanup",
")",
"cmd",
"=",
"(",
"'%sif {\"[%s %s]\" == \"break\"} break\\n'",
"%",
"(",
"add",
"and",
"'+'",
"or",
"''",
",",
"funcid",
",",
"self",
".",
"_subst_format_str",
")",
")",
"self",
".",
"tk",
".",
"call",
"(",
"what",
"+",
"(",
"sequence",
",",
"cmd",
")",
")",
"return",
"funcid",
"elif",
"sequence",
":",
"return",
"self",
".",
"tk",
".",
"call",
"(",
"what",
"+",
"(",
"sequence",
",",
")",
")",
"else",
":",
"return",
"self",
".",
"tk",
".",
"splitlist",
"(",
"self",
".",
"tk",
".",
"call",
"(",
"what",
")",
")"
] |
https://github.com/larryhastings/gilectomy/blob/4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a/Lib/tkinter/__init__.py#L1036-L1052
|
||
javayhu/Gank-Alfred-Workflow
|
aca39bd0c7bc0c494eee204e10bca61dab760ab7
|
source-v1/workflow/update.py
|
python
|
install_update
|
(github_slug, current_version)
|
return True
|
If a newer release is available, download and install it
:param github_slug: ``username/repo`` for workflow's GitHub repo
:param current_version: the currently installed version of the
workflow. :ref:`Semantic versioning <semver>` is required.
:type current_version: ``unicode``
If an update is available, it will be downloaded and installed.
:returns: ``True`` if an update is installed, else ``False``
|
If a newer release is available, download and install it
|
[
"If",
"a",
"newer",
"release",
"is",
"available",
"download",
"and",
"install",
"it"
] |
def install_update(github_slug, current_version):
"""If a newer release is available, download and install it
:param github_slug: ``username/repo`` for workflow's GitHub repo
:param current_version: the currently installed version of the
workflow. :ref:`Semantic versioning <semver>` is required.
:type current_version: ``unicode``
If an update is available, it will be downloaded and installed.
:returns: ``True`` if an update is installed, else ``False``
"""
# TODO: `github_slug` and `current_version` are both unusued.
update_data = wf().cached_data('__workflow_update_status', max_age=0)
if not update_data or not update_data.get('available'):
wf().logger.info('No update available')
return False
local_file = download_workflow(update_data['download_url'])
wf().logger.info('Installing updated workflow ...')
subprocess.call(['open', local_file])
update_data['available'] = False
wf().cache_data('__workflow_update_status', update_data)
return True
|
[
"def",
"install_update",
"(",
"github_slug",
",",
"current_version",
")",
":",
"# TODO: `github_slug` and `current_version` are both unusued.",
"update_data",
"=",
"wf",
"(",
")",
".",
"cached_data",
"(",
"'__workflow_update_status'",
",",
"max_age",
"=",
"0",
")",
"if",
"not",
"update_data",
"or",
"not",
"update_data",
".",
"get",
"(",
"'available'",
")",
":",
"wf",
"(",
")",
".",
"logger",
".",
"info",
"(",
"'No update available'",
")",
"return",
"False",
"local_file",
"=",
"download_workflow",
"(",
"update_data",
"[",
"'download_url'",
"]",
")",
"wf",
"(",
")",
".",
"logger",
".",
"info",
"(",
"'Installing updated workflow ...'",
")",
"subprocess",
".",
"call",
"(",
"[",
"'open'",
",",
"local_file",
"]",
")",
"update_data",
"[",
"'available'",
"]",
"=",
"False",
"wf",
"(",
")",
".",
"cache_data",
"(",
"'__workflow_update_status'",
",",
"update_data",
")",
"return",
"True"
] |
https://github.com/javayhu/Gank-Alfred-Workflow/blob/aca39bd0c7bc0c494eee204e10bca61dab760ab7/source-v1/workflow/update.py#L320-L348
|
|
khanhnamle1994/natural-language-processing
|
01d450d5ac002b0156ef4cf93a07cb508c1bcdc5
|
assignment1/.env/lib/python2.7/site-packages/pkg_resources/__init__.py
|
python
|
WorkingSet.subscribe
|
(self, callback, existing=True)
|
Invoke `callback` for all distributions
If `existing=True` (default),
call on all existing ones, as well.
|
Invoke `callback` for all distributions
|
[
"Invoke",
"callback",
"for",
"all",
"distributions"
] |
def subscribe(self, callback, existing=True):
"""Invoke `callback` for all distributions
If `existing=True` (default),
call on all existing ones, as well.
"""
if callback in self.callbacks:
return
self.callbacks.append(callback)
if not existing:
return
for dist in self:
callback(dist)
|
[
"def",
"subscribe",
"(",
"self",
",",
"callback",
",",
"existing",
"=",
"True",
")",
":",
"if",
"callback",
"in",
"self",
".",
"callbacks",
":",
"return",
"self",
".",
"callbacks",
".",
"append",
"(",
"callback",
")",
"if",
"not",
"existing",
":",
"return",
"for",
"dist",
"in",
"self",
":",
"callback",
"(",
"dist",
")"
] |
https://github.com/khanhnamle1994/natural-language-processing/blob/01d450d5ac002b0156ef4cf93a07cb508c1bcdc5/assignment1/.env/lib/python2.7/site-packages/pkg_resources/__init__.py#L991-L1003
|
||
IronLanguages/main
|
a949455434b1fda8c783289e897e78a9a0caabb5
|
External.LCA_RESTRICTED/Languages/IronPython/27/Lib/re.py
|
python
|
findall
|
(pattern, string, flags=0)
|
return _compile(pattern, flags).findall(string)
|
Return a list of all non-overlapping matches in the string.
If one or more groups are present in the pattern, return a
list of groups; this will be a list of tuples if the pattern
has more than one group.
Empty matches are included in the result.
|
Return a list of all non-overlapping matches in the string.
|
[
"Return",
"a",
"list",
"of",
"all",
"non",
"-",
"overlapping",
"matches",
"in",
"the",
"string",
"."
] |
def findall(pattern, string, flags=0):
"""Return a list of all non-overlapping matches in the string.
If one or more groups are present in the pattern, return a
list of groups; this will be a list of tuples if the pattern
has more than one group.
Empty matches are included in the result."""
return _compile(pattern, flags).findall(string)
|
[
"def",
"findall",
"(",
"pattern",
",",
"string",
",",
"flags",
"=",
"0",
")",
":",
"return",
"_compile",
"(",
"pattern",
",",
"flags",
")",
".",
"findall",
"(",
"string",
")"
] |
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/IronPython/27/Lib/re.py#L173-L181
|
|
caiiiac/Machine-Learning-with-Python
|
1a26c4467da41ca4ebc3d5bd789ea942ef79422f
|
MachineLearning/venv/lib/python3.5/site-packages/setuptools/command/py36compat.py
|
python
|
sdist_add_defaults.add_defaults
|
(self)
|
Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
|
Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
|
[
"Add",
"all",
"the",
"default",
"files",
"to",
"self",
".",
"filelist",
":",
"-",
"README",
"or",
"README",
".",
"txt",
"-",
"setup",
".",
"py",
"-",
"test",
"/",
"test",
"*",
".",
"py",
"-",
"all",
"pure",
"Python",
"modules",
"mentioned",
"in",
"setup",
"script",
"-",
"all",
"files",
"pointed",
"by",
"package_data",
"(",
"build_py",
")",
"-",
"all",
"files",
"defined",
"in",
"data_files",
".",
"-",
"all",
"files",
"defined",
"as",
"scripts",
".",
"-",
"all",
"C",
"sources",
"listed",
"as",
"part",
"of",
"extensions",
"or",
"C",
"libraries",
"in",
"the",
"setup",
"script",
"(",
"doesn",
"t",
"catch",
"C",
"headers!",
")",
"Warns",
"if",
"(",
"README",
"or",
"README",
".",
"txt",
")",
"or",
"setup",
".",
"py",
"are",
"missing",
";",
"everything",
"else",
"is",
"optional",
"."
] |
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
self._add_defaults_standards()
self._add_defaults_optional()
self._add_defaults_python()
self._add_defaults_data_files()
self._add_defaults_ext()
self._add_defaults_c_libs()
self._add_defaults_scripts()
|
[
"def",
"add_defaults",
"(",
"self",
")",
":",
"self",
".",
"_add_defaults_standards",
"(",
")",
"self",
".",
"_add_defaults_optional",
"(",
")",
"self",
".",
"_add_defaults_python",
"(",
")",
"self",
".",
"_add_defaults_data_files",
"(",
")",
"self",
".",
"_add_defaults_ext",
"(",
")",
"self",
".",
"_add_defaults_c_libs",
"(",
")",
"self",
".",
"_add_defaults_scripts",
"(",
")"
] |
https://github.com/caiiiac/Machine-Learning-with-Python/blob/1a26c4467da41ca4ebc3d5bd789ea942ef79422f/MachineLearning/venv/lib/python3.5/site-packages/setuptools/command/py36compat.py#L18-L38
|
||
spartan-array/spartan
|
fdcf059ce7e48688648d793d632dc5961f4e72b5
|
spartan/array/distarray.py
|
python
|
_tile_mapper
|
(tile_id, blob, array=None, user_fn=None, **kw)
|
return user_fn(ex, **kw)
|
Invoke ``user_fn`` on ``blob``, and construct tiles from the results.
|
Invoke ``user_fn`` on ``blob``, and construct tiles from the results.
|
[
"Invoke",
"user_fn",
"on",
"blob",
"and",
"construct",
"tiles",
"from",
"the",
"results",
"."
] |
def _tile_mapper(tile_id, blob, array=None, user_fn=None, **kw):
'''Invoke ``user_fn`` on ``blob``, and construct tiles from the results.'''
ex = array.extent_for_blob(tile_id)
return user_fn(ex, **kw)
|
[
"def",
"_tile_mapper",
"(",
"tile_id",
",",
"blob",
",",
"array",
"=",
"None",
",",
"user_fn",
"=",
"None",
",",
"*",
"*",
"kw",
")",
":",
"ex",
"=",
"array",
".",
"extent_for_blob",
"(",
"tile_id",
")",
"return",
"user_fn",
"(",
"ex",
",",
"*",
"*",
"kw",
")"
] |
https://github.com/spartan-array/spartan/blob/fdcf059ce7e48688648d793d632dc5961f4e72b5/spartan/array/distarray.py#L113-L116
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.