repo
stringlengths 7
54
| path
stringlengths 4
192
| url
stringlengths 87
284
| code
stringlengths 78
104k
| code_tokens
list | docstring
stringlengths 1
46.9k
| docstring_tokens
list | language
stringclasses 1
value | partition
stringclasses 3
values |
---|---|---|---|---|---|---|---|---|
KimiNewt/pyshark
|
src/pyshark/packet/fields.py
|
https://github.com/KimiNewt/pyshark/blob/089ea6208c4321f03bc548f491e00a053285918f/src/pyshark/packet/fields.py#L56-L64
|
def binary_value(self):
"""
Converts this field to binary (assuming it's a binary string)
"""
str_raw_value = str(self.raw_value)
if len(str_raw_value) % 2 == 1:
str_raw_value = '0' + str_raw_value
return binascii.unhexlify(str_raw_value)
|
[
"def",
"binary_value",
"(",
"self",
")",
":",
"str_raw_value",
"=",
"str",
"(",
"self",
".",
"raw_value",
")",
"if",
"len",
"(",
"str_raw_value",
")",
"%",
"2",
"==",
"1",
":",
"str_raw_value",
"=",
"'0'",
"+",
"str_raw_value",
"return",
"binascii",
".",
"unhexlify",
"(",
"str_raw_value",
")"
] |
Converts this field to binary (assuming it's a binary string)
|
[
"Converts",
"this",
"field",
"to",
"binary",
"(",
"assuming",
"it",
"s",
"a",
"binary",
"string",
")"
] |
python
|
train
|
ZELLMECHANIK-DRESDEN/dclab
|
dclab/features/bright.py
|
https://github.com/ZELLMECHANIK-DRESDEN/dclab/blob/79002c4356e7020c2ba73ab0a3819c9abd4affec/dclab/features/bright.py#L12-L85
|
def get_bright(mask, image, ret_data="avg,sd"):
"""Compute avg and/or std of the event brightness
The event brightness is defined by the gray-scale values of the
image data within the event mask area.
Parameters
----------
mask: ndarray or list of ndarrays of shape (M,N) and dtype bool
The mask values, True where the event is located in `image`.
image: ndarray or list of ndarrays of shape (M,N)
A 2D array that holds the image in form of grayscale values
of an event.
ret_data: str
A comma-separated list of metrices to compute
- "avg": compute the average
- "sd": compute the standard deviation
Selected metrics are returned in alphabetical order.
Returns
-------
bright_avg: float or ndarray of size N
Average image data within the contour
bright_std: float or ndarray of size N
Standard deviation of image data within the contour
"""
# This method is based on a pull request by Maik Herbig.
ret_avg = "avg" in ret_data
ret_std = "sd" in ret_data
if ret_avg + ret_std == 0:
raise ValueError("No valid metrices selected!")
if isinstance(mask, np.ndarray) and len(mask.shape) == 2:
# We have a single image
image = [image]
mask = [mask]
ret_list = False
else:
ret_list = True
length = min(len(mask), len(image))
# Results are stored in a separate array initialized with nans
if ret_avg:
avg = np.zeros(length, dtype=float) * np.nan
if ret_std:
std = np.zeros(length, dtype=float) * np.nan
for ii in range(length):
imgi = image[ii]
mski = mask[ii]
# Assign results
if ret_avg:
avg[ii] = np.mean(imgi[mski])
if ret_std:
std[ii] = np.std(imgi[mski])
results = []
# Keep alphabetical order
if ret_avg:
results.append(avg)
if ret_std:
results.append(std)
if not ret_list:
# Only return scalars
results = [r[0] for r in results]
if ret_avg+ret_std == 1:
# Only return one column
return results[0]
return results
|
[
"def",
"get_bright",
"(",
"mask",
",",
"image",
",",
"ret_data",
"=",
"\"avg,sd\"",
")",
":",
"# This method is based on a pull request by Maik Herbig.",
"ret_avg",
"=",
"\"avg\"",
"in",
"ret_data",
"ret_std",
"=",
"\"sd\"",
"in",
"ret_data",
"if",
"ret_avg",
"+",
"ret_std",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"No valid metrices selected!\"",
")",
"if",
"isinstance",
"(",
"mask",
",",
"np",
".",
"ndarray",
")",
"and",
"len",
"(",
"mask",
".",
"shape",
")",
"==",
"2",
":",
"# We have a single image",
"image",
"=",
"[",
"image",
"]",
"mask",
"=",
"[",
"mask",
"]",
"ret_list",
"=",
"False",
"else",
":",
"ret_list",
"=",
"True",
"length",
"=",
"min",
"(",
"len",
"(",
"mask",
")",
",",
"len",
"(",
"image",
")",
")",
"# Results are stored in a separate array initialized with nans",
"if",
"ret_avg",
":",
"avg",
"=",
"np",
".",
"zeros",
"(",
"length",
",",
"dtype",
"=",
"float",
")",
"*",
"np",
".",
"nan",
"if",
"ret_std",
":",
"std",
"=",
"np",
".",
"zeros",
"(",
"length",
",",
"dtype",
"=",
"float",
")",
"*",
"np",
".",
"nan",
"for",
"ii",
"in",
"range",
"(",
"length",
")",
":",
"imgi",
"=",
"image",
"[",
"ii",
"]",
"mski",
"=",
"mask",
"[",
"ii",
"]",
"# Assign results",
"if",
"ret_avg",
":",
"avg",
"[",
"ii",
"]",
"=",
"np",
".",
"mean",
"(",
"imgi",
"[",
"mski",
"]",
")",
"if",
"ret_std",
":",
"std",
"[",
"ii",
"]",
"=",
"np",
".",
"std",
"(",
"imgi",
"[",
"mski",
"]",
")",
"results",
"=",
"[",
"]",
"# Keep alphabetical order",
"if",
"ret_avg",
":",
"results",
".",
"append",
"(",
"avg",
")",
"if",
"ret_std",
":",
"results",
".",
"append",
"(",
"std",
")",
"if",
"not",
"ret_list",
":",
"# Only return scalars",
"results",
"=",
"[",
"r",
"[",
"0",
"]",
"for",
"r",
"in",
"results",
"]",
"if",
"ret_avg",
"+",
"ret_std",
"==",
"1",
":",
"# Only return one column",
"return",
"results",
"[",
"0",
"]",
"return",
"results"
] |
Compute avg and/or std of the event brightness
The event brightness is defined by the gray-scale values of the
image data within the event mask area.
Parameters
----------
mask: ndarray or list of ndarrays of shape (M,N) and dtype bool
The mask values, True where the event is located in `image`.
image: ndarray or list of ndarrays of shape (M,N)
A 2D array that holds the image in form of grayscale values
of an event.
ret_data: str
A comma-separated list of metrices to compute
- "avg": compute the average
- "sd": compute the standard deviation
Selected metrics are returned in alphabetical order.
Returns
-------
bright_avg: float or ndarray of size N
Average image data within the contour
bright_std: float or ndarray of size N
Standard deviation of image data within the contour
|
[
"Compute",
"avg",
"and",
"/",
"or",
"std",
"of",
"the",
"event",
"brightness"
] |
python
|
train
|
tensorflow/probability
|
tensorflow_probability/python/math/root_search.py
|
https://github.com/tensorflow/probability/blob/e87fe34111d68c35db0f9eeb4935f1ece9e1a8f5/tensorflow_probability/python/math/root_search.py#L44-L321
|
def secant_root(objective_fn,
initial_position,
next_position=None,
value_at_position=None,
position_tolerance=1e-8,
value_tolerance=1e-8,
max_iterations=50,
stopping_policy_fn=tf.reduce_all,
validate_args=False,
name=None):
r"""Finds root(s) of a function of single variable using the secant method.
The [secant method](https://en.wikipedia.org/wiki/Secant_method) is a
root-finding algorithm that uses a succession of roots of secant lines to
better approximate a root of a function. The secant method can be thought of
as a finite-difference approximation of Newton's method.
Args:
objective_fn: Python callable for which roots are searched. It must be a
callable of a single variable. `objective_fn` must return a `Tensor` of
the same shape and dtype as `initial_position`.
initial_position: `Tensor` or Python float representing the starting
position. The function will search for roots in the neighborhood of each
point. The shape of `initial_position` should match that of the input to
`objective_fn`.
next_position: Optional `Tensor` representing the next position in the
search. If specified, this argument must broadcast with the shape of
`initial_position` and have the same dtype. It will be used to compute the
first step to take when searching for roots. If not specified, a default
value will be used instead.
Default value: `initial_position * (1 + 1e-4) + sign(initial_position) *
1e-4`.
value_at_position: Optional `Tensor` or Pyhon float representing the value
of `objective_fn` at `initial_position`. If specified, this argument must
have the same shape and dtype as `initial_position`. If not specified, the
value will be evaluated during the search.
Default value: None.
position_tolerance: Optional `Tensor` representing the tolerance for the
estimated roots. If specified, this argument must broadcast with the shape
of `initial_position` and have the same dtype.
Default value: `1e-8`.
value_tolerance: Optional `Tensor` representing the tolerance used to check
for roots. If the absolute value of `objective_fn` is smaller than
`value_tolerance` at a given position, then that position is considered a
root for the function. If specified, this argument must broadcast with the
shape of `initial_position` and have the same dtype.
Default value: `1e-8`.
max_iterations: Optional `Tensor` or Python integer specifying the maximum
number of steps to perform for each initial position. Must broadcast with
the shape of `initial_position`.
Default value: `50`.
stopping_policy_fn: Python `callable` controlling the algorithm termination.
It must be a callable accepting a `Tensor` of booleans with the shape of
`initial_position` (each denoting whether the search is finished for each
starting point), and returning a scalar boolean `Tensor` (indicating
whether the overall search should stop). Typical values are
`tf.reduce_all` (which returns only when the search is finished for all
points), and `tf.reduce_any` (which returns as soon as the search is
finished for any point).
Default value: `tf.reduce_all` (returns only when the search is finished
for all points).
validate_args: Python `bool` indicating whether to validate arguments such
as `position_tolerance`, `value_tolerance`, and `max_iterations`.
Default value: `False`.
name: Python `str` name prefixed to ops created by this function.
Returns:
root_search_results: A Python `namedtuple` containing the following items:
estimated_root: `Tensor` containing the last position explored. If the
search was successful within the specified tolerance, this position is
a root of the objective function.
objective_at_estimated_root: `Tensor` containing the value of the
objective function at `position`. If the search was successful within
the specified tolerance, then this is close to 0.
num_iterations: The number of iterations performed.
Raises:
ValueError: if a non-callable `stopping_policy_fn` is passed.
#### Examples
```python
import tensorflow as tf
import tensorflow_probability as tfp
tf.enable_eager_execution()
# Example 1: Roots of a single function from two different starting points.
f = lambda x: (63 * x**5 - 70 * x**3 + 15 * x) / 8.
x = tf.constant([-1, 10], dtype=tf.float64)
tfp.math.secant_root(objective_fn=f, initial_position=x))
# ==> RootSearchResults(
estimated_root=array([-0.90617985, 0.90617985]),
objective_at_estimated_root=array([-4.81727769e-10, 7.44957651e-10]),
num_iterations=array([ 7, 24], dtype=int32))
tfp.math.secant_root(objective_fn=f,
initial_position=x,
stopping_policy_fn=tf.reduce_any)
# ==> RootSearchResults(
estimated_root=array([-0.90617985, 3.27379206]),
objective_at_estimated_root=array([-4.81727769e-10, 2.66058312e+03]),
num_iterations=array([7, 8], dtype=int32))
# Example 2: Roots of a multiplex function from a single starting point.
def f(x):
return tf.constant([0., 63. / 8], dtype=tf.float64) * x**5 \
+ tf.constant([5. / 2, -70. / 8], dtype=tf.float64) * x**3 \
+ tf.constant([-3. / 2, 15. / 8], dtype=tf.float64) * x
x = tf.constant([-1, -1], dtype=tf.float64)
tfp.math.secant_root(objective_fn=f, initial_position=x)
# ==> RootSearchResults(
estimated_root=array([-0.77459667, -0.90617985]),
objective_at_estimated_root=array([-7.81339438e-11, -4.81727769e-10]),
num_iterations=array([7, 7], dtype=int32))
# Example 3: Roots of a multiplex function from two starting points.
def f(x):
return tf.constant([0., 63. / 8], dtype=tf.float64) * x**5 \
+ tf.constant([5. / 2, -70. / 8], dtype=tf.float64) * x**3 \
+ tf.constant([-3. / 2, 15. / 8], dtype=tf.float64) * x
x = tf.constant([[-1, -1], [10, 10]], dtype=tf.float64)
tfp.math.secant_root(objective_fn=f, initial_position=x)
# ==> RootSearchResults(
estimated_root=array([
[-0.77459667, -0.90617985],
[ 0.77459667, 0.90617985]]),
objective_at_estimated_root=array([
[-7.81339438e-11, -4.81727769e-10],
[6.66025013e-11, 7.44957651e-10]]),
num_iterations=array([
[7, 7],
[16, 24]], dtype=int32))
```
"""
if not callable(stopping_policy_fn):
raise ValueError('stopping_policy_fn must be callable')
position = tf.convert_to_tensor(
value=initial_position,
name='position',
)
value_at_position = tf.convert_to_tensor(
value=value_at_position or objective_fn(position),
name='value_at_position',
dtype=position.dtype.base_dtype)
zero = tf.zeros_like(position)
position_tolerance = tf.convert_to_tensor(
value=position_tolerance, name='position_tolerance', dtype=position.dtype)
value_tolerance = tf.convert_to_tensor(
value=value_tolerance, name='value_tolerance', dtype=position.dtype)
num_iterations = tf.zeros_like(position, dtype=tf.int32)
max_iterations = tf.convert_to_tensor(value=max_iterations, dtype=tf.int32)
max_iterations = tf.broadcast_to(
max_iterations, name='max_iterations', shape=position.shape)
# Compute the step from `next_position` if present. This covers the case where
# a user has two starting points, which bound the root or has a specific step
# size in mind.
if next_position is None:
epsilon = tf.constant(1e-4, dtype=position.dtype, shape=position.shape)
step = position * epsilon + tf.sign(position) * epsilon
else:
step = next_position - initial_position
finished = tf.constant(False, shape=position.shape)
# Negate `stopping_condition` to determine if the search should continue.
# This means, in particular, that tf.reduce_*all* will return only when the
# search is finished for *all* starting points.
def _should_continue(position, value_at_position, num_iterations, step,
finished):
"""Indicates whether the overall search should continue.
Args:
position: `Tensor` containing the current root estimates.
value_at_position: `Tensor` containing the value of `objective_fn` at
`position`.
num_iterations: `Tensor` containing the current iteration index for each
point.
step: `Tensor` containing the size of the step to take for each point.
finished: `Tensor` indicating for which points the search is finished.
Returns:
A boolean value indicating whether the overall search should continue.
"""
del position, value_at_position, num_iterations, step # Unused
return ~tf.convert_to_tensor(
value=stopping_policy_fn(finished), name='should_stop', dtype=tf.bool)
# For each point in `position`, the search is stopped if either:
# (1) A root has been found
# (2) f(position) == f(position + step)
# (3) The maximum number of iterations has been reached
# In case (2), the search may be stopped both before the desired tolerance is
# achieved (or even a root is found), and the maximum number of iterations is
# reached.
def _body(position, value_at_position, num_iterations, step, finished):
"""Performs one iteration of the secant root-finding algorithm.
Args:
position: `Tensor` containing the current root estimates.
value_at_position: `Tensor` containing the value of `objective_fn` at
`position`.
num_iterations: `Tensor` containing the current iteration index for each
point.
step: `Tensor` containing the size of the step to take for each point.
finished: `Tensor` indicating for which points the search is finished.
Returns:
The `Tensor`s to use for the next iteration of the algorithm.
"""
# True if the search was already finished, or (1) or (3) just became true.
was_finished = finished | (num_iterations >= max_iterations) | (
tf.abs(step) < position_tolerance) | (
tf.abs(value_at_position) < value_tolerance)
# Compute the next position and the value at that point.
next_position = tf.where(was_finished, position, position + step)
value_at_next_position = tf.where(was_finished, value_at_position,
objective_fn(next_position))
# True if the search was already finished, or (2) just became true.
is_finished = tf.equal(value_at_position, value_at_next_position)
# Use the mid-point between the last two positions if (2) just became true.
next_position = tf.where(is_finished & ~was_finished,
(position + next_position) * 0.5, next_position)
# Once finished, stop updating the iteration index and set the step to zero.
num_iterations = tf.where(is_finished, num_iterations, num_iterations + 1)
next_step = tf.where(
is_finished, zero, step * value_at_next_position /
(value_at_position - value_at_next_position))
return (next_position, value_at_next_position, num_iterations, next_step,
is_finished)
with tf.compat.v1.name_scope(
name, 'secant_root',
[position, next_position, value_at_position, max_iterations]):
assertions = []
if validate_args:
assertions += [
tf.Assert(
tf.reduce_all(input_tensor=position_tolerance > zero),
[position_tolerance]),
tf.Assert(
tf.reduce_all(input_tensor=value_tolerance > zero),
[value_tolerance]),
tf.Assert(
tf.reduce_all(input_tensor=max_iterations >= num_iterations),
[max_iterations]),
]
with tf.control_dependencies(assertions):
root, value_at_root, num_iterations, _, _ = tf.while_loop(
cond=_should_continue,
body=_body,
loop_vars=[
position, value_at_position, num_iterations, step, finished
])
return RootSearchResults(
estimated_root=root,
objective_at_estimated_root=value_at_root,
num_iterations=num_iterations)
|
[
"def",
"secant_root",
"(",
"objective_fn",
",",
"initial_position",
",",
"next_position",
"=",
"None",
",",
"value_at_position",
"=",
"None",
",",
"position_tolerance",
"=",
"1e-8",
",",
"value_tolerance",
"=",
"1e-8",
",",
"max_iterations",
"=",
"50",
",",
"stopping_policy_fn",
"=",
"tf",
".",
"reduce_all",
",",
"validate_args",
"=",
"False",
",",
"name",
"=",
"None",
")",
":",
"if",
"not",
"callable",
"(",
"stopping_policy_fn",
")",
":",
"raise",
"ValueError",
"(",
"'stopping_policy_fn must be callable'",
")",
"position",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"initial_position",
",",
"name",
"=",
"'position'",
",",
")",
"value_at_position",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"value_at_position",
"or",
"objective_fn",
"(",
"position",
")",
",",
"name",
"=",
"'value_at_position'",
",",
"dtype",
"=",
"position",
".",
"dtype",
".",
"base_dtype",
")",
"zero",
"=",
"tf",
".",
"zeros_like",
"(",
"position",
")",
"position_tolerance",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"position_tolerance",
",",
"name",
"=",
"'position_tolerance'",
",",
"dtype",
"=",
"position",
".",
"dtype",
")",
"value_tolerance",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"value_tolerance",
",",
"name",
"=",
"'value_tolerance'",
",",
"dtype",
"=",
"position",
".",
"dtype",
")",
"num_iterations",
"=",
"tf",
".",
"zeros_like",
"(",
"position",
",",
"dtype",
"=",
"tf",
".",
"int32",
")",
"max_iterations",
"=",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"max_iterations",
",",
"dtype",
"=",
"tf",
".",
"int32",
")",
"max_iterations",
"=",
"tf",
".",
"broadcast_to",
"(",
"max_iterations",
",",
"name",
"=",
"'max_iterations'",
",",
"shape",
"=",
"position",
".",
"shape",
")",
"# Compute the step from `next_position` if present. This covers the case where",
"# a user has two starting points, which bound the root or has a specific step",
"# size in mind.",
"if",
"next_position",
"is",
"None",
":",
"epsilon",
"=",
"tf",
".",
"constant",
"(",
"1e-4",
",",
"dtype",
"=",
"position",
".",
"dtype",
",",
"shape",
"=",
"position",
".",
"shape",
")",
"step",
"=",
"position",
"*",
"epsilon",
"+",
"tf",
".",
"sign",
"(",
"position",
")",
"*",
"epsilon",
"else",
":",
"step",
"=",
"next_position",
"-",
"initial_position",
"finished",
"=",
"tf",
".",
"constant",
"(",
"False",
",",
"shape",
"=",
"position",
".",
"shape",
")",
"# Negate `stopping_condition` to determine if the search should continue.",
"# This means, in particular, that tf.reduce_*all* will return only when the",
"# search is finished for *all* starting points.",
"def",
"_should_continue",
"(",
"position",
",",
"value_at_position",
",",
"num_iterations",
",",
"step",
",",
"finished",
")",
":",
"\"\"\"Indicates whether the overall search should continue.\n\n Args:\n position: `Tensor` containing the current root estimates.\n value_at_position: `Tensor` containing the value of `objective_fn` at\n `position`.\n num_iterations: `Tensor` containing the current iteration index for each\n point.\n step: `Tensor` containing the size of the step to take for each point.\n finished: `Tensor` indicating for which points the search is finished.\n\n Returns:\n A boolean value indicating whether the overall search should continue.\n \"\"\"",
"del",
"position",
",",
"value_at_position",
",",
"num_iterations",
",",
"step",
"# Unused",
"return",
"~",
"tf",
".",
"convert_to_tensor",
"(",
"value",
"=",
"stopping_policy_fn",
"(",
"finished",
")",
",",
"name",
"=",
"'should_stop'",
",",
"dtype",
"=",
"tf",
".",
"bool",
")",
"# For each point in `position`, the search is stopped if either:",
"# (1) A root has been found",
"# (2) f(position) == f(position + step)",
"# (3) The maximum number of iterations has been reached",
"# In case (2), the search may be stopped both before the desired tolerance is",
"# achieved (or even a root is found), and the maximum number of iterations is",
"# reached.",
"def",
"_body",
"(",
"position",
",",
"value_at_position",
",",
"num_iterations",
",",
"step",
",",
"finished",
")",
":",
"\"\"\"Performs one iteration of the secant root-finding algorithm.\n\n Args:\n position: `Tensor` containing the current root estimates.\n value_at_position: `Tensor` containing the value of `objective_fn` at\n `position`.\n num_iterations: `Tensor` containing the current iteration index for each\n point.\n step: `Tensor` containing the size of the step to take for each point.\n finished: `Tensor` indicating for which points the search is finished.\n\n Returns:\n The `Tensor`s to use for the next iteration of the algorithm.\n \"\"\"",
"# True if the search was already finished, or (1) or (3) just became true.",
"was_finished",
"=",
"finished",
"|",
"(",
"num_iterations",
">=",
"max_iterations",
")",
"|",
"(",
"tf",
".",
"abs",
"(",
"step",
")",
"<",
"position_tolerance",
")",
"|",
"(",
"tf",
".",
"abs",
"(",
"value_at_position",
")",
"<",
"value_tolerance",
")",
"# Compute the next position and the value at that point.",
"next_position",
"=",
"tf",
".",
"where",
"(",
"was_finished",
",",
"position",
",",
"position",
"+",
"step",
")",
"value_at_next_position",
"=",
"tf",
".",
"where",
"(",
"was_finished",
",",
"value_at_position",
",",
"objective_fn",
"(",
"next_position",
")",
")",
"# True if the search was already finished, or (2) just became true.",
"is_finished",
"=",
"tf",
".",
"equal",
"(",
"value_at_position",
",",
"value_at_next_position",
")",
"# Use the mid-point between the last two positions if (2) just became true.",
"next_position",
"=",
"tf",
".",
"where",
"(",
"is_finished",
"&",
"~",
"was_finished",
",",
"(",
"position",
"+",
"next_position",
")",
"*",
"0.5",
",",
"next_position",
")",
"# Once finished, stop updating the iteration index and set the step to zero.",
"num_iterations",
"=",
"tf",
".",
"where",
"(",
"is_finished",
",",
"num_iterations",
",",
"num_iterations",
"+",
"1",
")",
"next_step",
"=",
"tf",
".",
"where",
"(",
"is_finished",
",",
"zero",
",",
"step",
"*",
"value_at_next_position",
"/",
"(",
"value_at_position",
"-",
"value_at_next_position",
")",
")",
"return",
"(",
"next_position",
",",
"value_at_next_position",
",",
"num_iterations",
",",
"next_step",
",",
"is_finished",
")",
"with",
"tf",
".",
"compat",
".",
"v1",
".",
"name_scope",
"(",
"name",
",",
"'secant_root'",
",",
"[",
"position",
",",
"next_position",
",",
"value_at_position",
",",
"max_iterations",
"]",
")",
":",
"assertions",
"=",
"[",
"]",
"if",
"validate_args",
":",
"assertions",
"+=",
"[",
"tf",
".",
"Assert",
"(",
"tf",
".",
"reduce_all",
"(",
"input_tensor",
"=",
"position_tolerance",
">",
"zero",
")",
",",
"[",
"position_tolerance",
"]",
")",
",",
"tf",
".",
"Assert",
"(",
"tf",
".",
"reduce_all",
"(",
"input_tensor",
"=",
"value_tolerance",
">",
"zero",
")",
",",
"[",
"value_tolerance",
"]",
")",
",",
"tf",
".",
"Assert",
"(",
"tf",
".",
"reduce_all",
"(",
"input_tensor",
"=",
"max_iterations",
">=",
"num_iterations",
")",
",",
"[",
"max_iterations",
"]",
")",
",",
"]",
"with",
"tf",
".",
"control_dependencies",
"(",
"assertions",
")",
":",
"root",
",",
"value_at_root",
",",
"num_iterations",
",",
"_",
",",
"_",
"=",
"tf",
".",
"while_loop",
"(",
"cond",
"=",
"_should_continue",
",",
"body",
"=",
"_body",
",",
"loop_vars",
"=",
"[",
"position",
",",
"value_at_position",
",",
"num_iterations",
",",
"step",
",",
"finished",
"]",
")",
"return",
"RootSearchResults",
"(",
"estimated_root",
"=",
"root",
",",
"objective_at_estimated_root",
"=",
"value_at_root",
",",
"num_iterations",
"=",
"num_iterations",
")"
] |
r"""Finds root(s) of a function of single variable using the secant method.
The [secant method](https://en.wikipedia.org/wiki/Secant_method) is a
root-finding algorithm that uses a succession of roots of secant lines to
better approximate a root of a function. The secant method can be thought of
as a finite-difference approximation of Newton's method.
Args:
objective_fn: Python callable for which roots are searched. It must be a
callable of a single variable. `objective_fn` must return a `Tensor` of
the same shape and dtype as `initial_position`.
initial_position: `Tensor` or Python float representing the starting
position. The function will search for roots in the neighborhood of each
point. The shape of `initial_position` should match that of the input to
`objective_fn`.
next_position: Optional `Tensor` representing the next position in the
search. If specified, this argument must broadcast with the shape of
`initial_position` and have the same dtype. It will be used to compute the
first step to take when searching for roots. If not specified, a default
value will be used instead.
Default value: `initial_position * (1 + 1e-4) + sign(initial_position) *
1e-4`.
value_at_position: Optional `Tensor` or Pyhon float representing the value
of `objective_fn` at `initial_position`. If specified, this argument must
have the same shape and dtype as `initial_position`. If not specified, the
value will be evaluated during the search.
Default value: None.
position_tolerance: Optional `Tensor` representing the tolerance for the
estimated roots. If specified, this argument must broadcast with the shape
of `initial_position` and have the same dtype.
Default value: `1e-8`.
value_tolerance: Optional `Tensor` representing the tolerance used to check
for roots. If the absolute value of `objective_fn` is smaller than
`value_tolerance` at a given position, then that position is considered a
root for the function. If specified, this argument must broadcast with the
shape of `initial_position` and have the same dtype.
Default value: `1e-8`.
max_iterations: Optional `Tensor` or Python integer specifying the maximum
number of steps to perform for each initial position. Must broadcast with
the shape of `initial_position`.
Default value: `50`.
stopping_policy_fn: Python `callable` controlling the algorithm termination.
It must be a callable accepting a `Tensor` of booleans with the shape of
`initial_position` (each denoting whether the search is finished for each
starting point), and returning a scalar boolean `Tensor` (indicating
whether the overall search should stop). Typical values are
`tf.reduce_all` (which returns only when the search is finished for all
points), and `tf.reduce_any` (which returns as soon as the search is
finished for any point).
Default value: `tf.reduce_all` (returns only when the search is finished
for all points).
validate_args: Python `bool` indicating whether to validate arguments such
as `position_tolerance`, `value_tolerance`, and `max_iterations`.
Default value: `False`.
name: Python `str` name prefixed to ops created by this function.
Returns:
root_search_results: A Python `namedtuple` containing the following items:
estimated_root: `Tensor` containing the last position explored. If the
search was successful within the specified tolerance, this position is
a root of the objective function.
objective_at_estimated_root: `Tensor` containing the value of the
objective function at `position`. If the search was successful within
the specified tolerance, then this is close to 0.
num_iterations: The number of iterations performed.
Raises:
ValueError: if a non-callable `stopping_policy_fn` is passed.
#### Examples
```python
import tensorflow as tf
import tensorflow_probability as tfp
tf.enable_eager_execution()
# Example 1: Roots of a single function from two different starting points.
f = lambda x: (63 * x**5 - 70 * x**3 + 15 * x) / 8.
x = tf.constant([-1, 10], dtype=tf.float64)
tfp.math.secant_root(objective_fn=f, initial_position=x))
# ==> RootSearchResults(
estimated_root=array([-0.90617985, 0.90617985]),
objective_at_estimated_root=array([-4.81727769e-10, 7.44957651e-10]),
num_iterations=array([ 7, 24], dtype=int32))
tfp.math.secant_root(objective_fn=f,
initial_position=x,
stopping_policy_fn=tf.reduce_any)
# ==> RootSearchResults(
estimated_root=array([-0.90617985, 3.27379206]),
objective_at_estimated_root=array([-4.81727769e-10, 2.66058312e+03]),
num_iterations=array([7, 8], dtype=int32))
# Example 2: Roots of a multiplex function from a single starting point.
def f(x):
return tf.constant([0., 63. / 8], dtype=tf.float64) * x**5 \
+ tf.constant([5. / 2, -70. / 8], dtype=tf.float64) * x**3 \
+ tf.constant([-3. / 2, 15. / 8], dtype=tf.float64) * x
x = tf.constant([-1, -1], dtype=tf.float64)
tfp.math.secant_root(objective_fn=f, initial_position=x)
# ==> RootSearchResults(
estimated_root=array([-0.77459667, -0.90617985]),
objective_at_estimated_root=array([-7.81339438e-11, -4.81727769e-10]),
num_iterations=array([7, 7], dtype=int32))
# Example 3: Roots of a multiplex function from two starting points.
def f(x):
return tf.constant([0., 63. / 8], dtype=tf.float64) * x**5 \
+ tf.constant([5. / 2, -70. / 8], dtype=tf.float64) * x**3 \
+ tf.constant([-3. / 2, 15. / 8], dtype=tf.float64) * x
x = tf.constant([[-1, -1], [10, 10]], dtype=tf.float64)
tfp.math.secant_root(objective_fn=f, initial_position=x)
# ==> RootSearchResults(
estimated_root=array([
[-0.77459667, -0.90617985],
[ 0.77459667, 0.90617985]]),
objective_at_estimated_root=array([
[-7.81339438e-11, -4.81727769e-10],
[6.66025013e-11, 7.44957651e-10]]),
num_iterations=array([
[7, 7],
[16, 24]], dtype=int32))
```
|
[
"r",
"Finds",
"root",
"(",
"s",
")",
"of",
"a",
"function",
"of",
"single",
"variable",
"using",
"the",
"secant",
"method",
"."
] |
python
|
test
|
marrow/WebCore
|
web/server/eventlet_.py
|
https://github.com/marrow/WebCore/blob/38d50f8022ca62976a1e5ff23f7714bd647b6532/web/server/eventlet_.py#L15-L22
|
def serve(application, host='127.0.0.1', port=8080):
"""Eventlet-based WSGI-HTTP server.
For a more fully-featured Eventlet-capable interface, see also [Spawning](http://pypi.python.org/pypi/Spawning/).
"""
# Instantiate the server with a bound port and with our application.
server(listen(host, int(port)), application)
|
[
"def",
"serve",
"(",
"application",
",",
"host",
"=",
"'127.0.0.1'",
",",
"port",
"=",
"8080",
")",
":",
"# Instantiate the server with a bound port and with our application.",
"server",
"(",
"listen",
"(",
"host",
",",
"int",
"(",
"port",
")",
")",
",",
"application",
")"
] |
Eventlet-based WSGI-HTTP server.
For a more fully-featured Eventlet-capable interface, see also [Spawning](http://pypi.python.org/pypi/Spawning/).
|
[
"Eventlet",
"-",
"based",
"WSGI",
"-",
"HTTP",
"server",
".",
"For",
"a",
"more",
"fully",
"-",
"featured",
"Eventlet",
"-",
"capable",
"interface",
"see",
"also",
"[",
"Spawning",
"]",
"(",
"http",
":",
"//",
"pypi",
".",
"python",
".",
"org",
"/",
"pypi",
"/",
"Spawning",
"/",
")",
"."
] |
python
|
train
|
nccgroup/opinel
|
opinel/utils/aws.py
|
https://github.com/nccgroup/opinel/blob/2d4f5b96e0a1f9cb0356629f4f87e4ed99ce2606/opinel/utils/aws.py#L108-L139
|
def handle_truncated_response(callback, params, entities):
"""
Handle truncated responses
:param callback:
:param params:
:param entities:
:return:
"""
results = {}
for entity in entities:
results[entity] = []
while True:
try:
marker_found = False
response = callback(**params)
for entity in entities:
if entity in response:
results[entity] = results[entity] + response[entity]
for marker_name in ['NextToken', 'Marker', 'PaginationToken']:
if marker_name in response and response[marker_name]:
params[marker_name] = response[marker_name]
marker_found = True
if not marker_found:
break
except Exception as e:
if is_throttled(e):
time.sleep(1)
else:
raise e
return results
|
[
"def",
"handle_truncated_response",
"(",
"callback",
",",
"params",
",",
"entities",
")",
":",
"results",
"=",
"{",
"}",
"for",
"entity",
"in",
"entities",
":",
"results",
"[",
"entity",
"]",
"=",
"[",
"]",
"while",
"True",
":",
"try",
":",
"marker_found",
"=",
"False",
"response",
"=",
"callback",
"(",
"*",
"*",
"params",
")",
"for",
"entity",
"in",
"entities",
":",
"if",
"entity",
"in",
"response",
":",
"results",
"[",
"entity",
"]",
"=",
"results",
"[",
"entity",
"]",
"+",
"response",
"[",
"entity",
"]",
"for",
"marker_name",
"in",
"[",
"'NextToken'",
",",
"'Marker'",
",",
"'PaginationToken'",
"]",
":",
"if",
"marker_name",
"in",
"response",
"and",
"response",
"[",
"marker_name",
"]",
":",
"params",
"[",
"marker_name",
"]",
"=",
"response",
"[",
"marker_name",
"]",
"marker_found",
"=",
"True",
"if",
"not",
"marker_found",
":",
"break",
"except",
"Exception",
"as",
"e",
":",
"if",
"is_throttled",
"(",
"e",
")",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"else",
":",
"raise",
"e",
"return",
"results"
] |
Handle truncated responses
:param callback:
:param params:
:param entities:
:return:
|
[
"Handle",
"truncated",
"responses"
] |
python
|
train
|
fkarb/xltable
|
xltable/workbook.py
|
https://github.com/fkarb/xltable/blob/7a592642d27ad5ee90d2aa8c26338abaa9d84bea/xltable/workbook.py#L62-L80
|
def to_xlsx(self, **kwargs):
"""
Write workbook to a .xlsx file using xlsxwriter.
Return a xlsxwriter.workbook.Workbook.
:param kwargs: Extra arguments passed to the xlsxwriter.Workbook
constructor.
"""
from xlsxwriter.workbook import Workbook as _Workbook
self.workbook_obj = _Workbook(**kwargs)
self.workbook_obj.set_calc_mode(self.calc_mode)
for worksheet in self.itersheets():
worksheet.to_xlsx(workbook=self)
self.workbook_obj.filename = self.filename
if self.filename:
self.workbook_obj.close()
return self.workbook_obj
|
[
"def",
"to_xlsx",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"xlsxwriter",
".",
"workbook",
"import",
"Workbook",
"as",
"_Workbook",
"self",
".",
"workbook_obj",
"=",
"_Workbook",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"workbook_obj",
".",
"set_calc_mode",
"(",
"self",
".",
"calc_mode",
")",
"for",
"worksheet",
"in",
"self",
".",
"itersheets",
"(",
")",
":",
"worksheet",
".",
"to_xlsx",
"(",
"workbook",
"=",
"self",
")",
"self",
".",
"workbook_obj",
".",
"filename",
"=",
"self",
".",
"filename",
"if",
"self",
".",
"filename",
":",
"self",
".",
"workbook_obj",
".",
"close",
"(",
")",
"return",
"self",
".",
"workbook_obj"
] |
Write workbook to a .xlsx file using xlsxwriter.
Return a xlsxwriter.workbook.Workbook.
:param kwargs: Extra arguments passed to the xlsxwriter.Workbook
constructor.
|
[
"Write",
"workbook",
"to",
"a",
".",
"xlsx",
"file",
"using",
"xlsxwriter",
".",
"Return",
"a",
"xlsxwriter",
".",
"workbook",
".",
"Workbook",
"."
] |
python
|
train
|
quantmind/dynts
|
dynts/lib/fallback/maths.py
|
https://github.com/quantmind/dynts/blob/21ac57c648bfec402fa6b1fe569496cf098fb5e8/dynts/lib/fallback/maths.py#L3-L33
|
def bindata(data, maxbins = 30, reduction = 0.1):
'''
data must be numeric list with a len above 20
This function counts the number of data points in a reduced array
'''
tole = 0.01
N = len(data)
assert N > 20
vmin = min(data)
vmax = max(data)
DV = vmax - vmin
tol = tole*DV
vmax += tol
if vmin >= 0:
vmin -= tol
vmin = max(0.0,vmin)
else:
vmin -= tol
n = min(maxbins,max(2,int(round(reduction*N))))
DV = vmax - vmin
bbin = npy.linspace(vmin,vmax,n+1)
sso = npy.searchsorted(bbin,npy.sort(data))
x = []
y = []
for i in range(0,n):
x.append(0.5*(bbin[i+1]+bbin[i]))
y.append(0.0)
dy = 1.0/N
for i in sso:
y[i-1] += dy/(bbin[i]-bbin[i-1])
return (x,y)
|
[
"def",
"bindata",
"(",
"data",
",",
"maxbins",
"=",
"30",
",",
"reduction",
"=",
"0.1",
")",
":",
"tole",
"=",
"0.01",
"N",
"=",
"len",
"(",
"data",
")",
"assert",
"N",
">",
"20",
"vmin",
"=",
"min",
"(",
"data",
")",
"vmax",
"=",
"max",
"(",
"data",
")",
"DV",
"=",
"vmax",
"-",
"vmin",
"tol",
"=",
"tole",
"*",
"DV",
"vmax",
"+=",
"tol",
"if",
"vmin",
">=",
"0",
":",
"vmin",
"-=",
"tol",
"vmin",
"=",
"max",
"(",
"0.0",
",",
"vmin",
")",
"else",
":",
"vmin",
"-=",
"tol",
"n",
"=",
"min",
"(",
"maxbins",
",",
"max",
"(",
"2",
",",
"int",
"(",
"round",
"(",
"reduction",
"*",
"N",
")",
")",
")",
")",
"DV",
"=",
"vmax",
"-",
"vmin",
"bbin",
"=",
"npy",
".",
"linspace",
"(",
"vmin",
",",
"vmax",
",",
"n",
"+",
"1",
")",
"sso",
"=",
"npy",
".",
"searchsorted",
"(",
"bbin",
",",
"npy",
".",
"sort",
"(",
"data",
")",
")",
"x",
"=",
"[",
"]",
"y",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"n",
")",
":",
"x",
".",
"append",
"(",
"0.5",
"*",
"(",
"bbin",
"[",
"i",
"+",
"1",
"]",
"+",
"bbin",
"[",
"i",
"]",
")",
")",
"y",
".",
"append",
"(",
"0.0",
")",
"dy",
"=",
"1.0",
"/",
"N",
"for",
"i",
"in",
"sso",
":",
"y",
"[",
"i",
"-",
"1",
"]",
"+=",
"dy",
"/",
"(",
"bbin",
"[",
"i",
"]",
"-",
"bbin",
"[",
"i",
"-",
"1",
"]",
")",
"return",
"(",
"x",
",",
"y",
")"
] |
data must be numeric list with a len above 20
This function counts the number of data points in a reduced array
|
[
"data",
"must",
"be",
"numeric",
"list",
"with",
"a",
"len",
"above",
"20",
"This",
"function",
"counts",
"the",
"number",
"of",
"data",
"points",
"in",
"a",
"reduced",
"array"
] |
python
|
train
|
biocore/burrito-fillings
|
bfillings/mafft.py
|
https://github.com/biocore/burrito-fillings/blob/02ab71a46119b40793bd56a4ae00ca15f6dc3329/bfillings/mafft.py#L411-L470
|
def align_two_alignments(aln1, aln2, moltype, params=None):
"""Returns an Alignment object from two existing Alignments.
aln1, aln2: cogent.core.alignment.Alignment objects, or data that can be
used to build them.
- Mafft profile alignment only works with aligned sequences. Alignment
object used to handle unaligned sequences.
params: dict of parameters to pass in to the Mafft app controller.
"""
#create SequenceCollection object from seqs
aln1 = Alignment(aln1,MolType=moltype)
#Create mapping between abbreviated IDs and full IDs
aln1_int_map, aln1_int_keys = aln1.getIntMap()
#Create SequenceCollection from int_map.
aln1_int_map = Alignment(aln1_int_map,MolType=moltype)
#create Alignment object from aln
aln2 = Alignment(aln2,MolType=moltype)
#Create mapping between abbreviated IDs and full IDs
aln2_int_map, aln2_int_keys = aln2.getIntMap(prefix='seqn_')
#Create SequenceCollection from int_map.
aln2_int_map = Alignment(aln2_int_map,MolType=moltype)
#Update aln1_int_keys with aln2_int_keys
aln1_int_keys.update(aln2_int_keys)
#Create Mafft app.
app = Mafft(InputHandler='_input_as_paths',\
params=params,
SuppressStderr=False)
app._command = 'mafft-profile'
aln1_path = app._tempfile_as_multiline_string(aln1_int_map.toFasta())
aln2_path = app._tempfile_as_multiline_string(aln2_int_map.toFasta())
filepaths = [aln1_path,aln2_path]
#Get results using int_map as input to app
res = app(filepaths)
#Get alignment as dict out of results
alignment = dict(parse_fasta(res['StdOut']))
#Make new dict mapping original IDs
new_alignment = {}
for k,v in alignment.items():
key = k.replace('_seed_','')
new_alignment[aln1_int_keys[key]]=v
#Create an Alignment object from alignment dict
new_alignment = Alignment(new_alignment,MolType=moltype)
#Clean up
res.cleanUp()
remove(aln1_path)
remove(aln2_path)
remove('pre')
remove('trace')
del(aln1,aln1_int_map,aln1_int_keys,\
aln2,aln2_int_map,aln2_int_keys,app,res,alignment)
return new_alignment
|
[
"def",
"align_two_alignments",
"(",
"aln1",
",",
"aln2",
",",
"moltype",
",",
"params",
"=",
"None",
")",
":",
"#create SequenceCollection object from seqs",
"aln1",
"=",
"Alignment",
"(",
"aln1",
",",
"MolType",
"=",
"moltype",
")",
"#Create mapping between abbreviated IDs and full IDs",
"aln1_int_map",
",",
"aln1_int_keys",
"=",
"aln1",
".",
"getIntMap",
"(",
")",
"#Create SequenceCollection from int_map.",
"aln1_int_map",
"=",
"Alignment",
"(",
"aln1_int_map",
",",
"MolType",
"=",
"moltype",
")",
"#create Alignment object from aln",
"aln2",
"=",
"Alignment",
"(",
"aln2",
",",
"MolType",
"=",
"moltype",
")",
"#Create mapping between abbreviated IDs and full IDs",
"aln2_int_map",
",",
"aln2_int_keys",
"=",
"aln2",
".",
"getIntMap",
"(",
"prefix",
"=",
"'seqn_'",
")",
"#Create SequenceCollection from int_map.",
"aln2_int_map",
"=",
"Alignment",
"(",
"aln2_int_map",
",",
"MolType",
"=",
"moltype",
")",
"#Update aln1_int_keys with aln2_int_keys",
"aln1_int_keys",
".",
"update",
"(",
"aln2_int_keys",
")",
"#Create Mafft app.",
"app",
"=",
"Mafft",
"(",
"InputHandler",
"=",
"'_input_as_paths'",
",",
"params",
"=",
"params",
",",
"SuppressStderr",
"=",
"False",
")",
"app",
".",
"_command",
"=",
"'mafft-profile'",
"aln1_path",
"=",
"app",
".",
"_tempfile_as_multiline_string",
"(",
"aln1_int_map",
".",
"toFasta",
"(",
")",
")",
"aln2_path",
"=",
"app",
".",
"_tempfile_as_multiline_string",
"(",
"aln2_int_map",
".",
"toFasta",
"(",
")",
")",
"filepaths",
"=",
"[",
"aln1_path",
",",
"aln2_path",
"]",
"#Get results using int_map as input to app",
"res",
"=",
"app",
"(",
"filepaths",
")",
"#Get alignment as dict out of results",
"alignment",
"=",
"dict",
"(",
"parse_fasta",
"(",
"res",
"[",
"'StdOut'",
"]",
")",
")",
"#Make new dict mapping original IDs",
"new_alignment",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"alignment",
".",
"items",
"(",
")",
":",
"key",
"=",
"k",
".",
"replace",
"(",
"'_seed_'",
",",
"''",
")",
"new_alignment",
"[",
"aln1_int_keys",
"[",
"key",
"]",
"]",
"=",
"v",
"#Create an Alignment object from alignment dict",
"new_alignment",
"=",
"Alignment",
"(",
"new_alignment",
",",
"MolType",
"=",
"moltype",
")",
"#Clean up",
"res",
".",
"cleanUp",
"(",
")",
"remove",
"(",
"aln1_path",
")",
"remove",
"(",
"aln2_path",
")",
"remove",
"(",
"'pre'",
")",
"remove",
"(",
"'trace'",
")",
"del",
"(",
"aln1",
",",
"aln1_int_map",
",",
"aln1_int_keys",
",",
"aln2",
",",
"aln2_int_map",
",",
"aln2_int_keys",
",",
"app",
",",
"res",
",",
"alignment",
")",
"return",
"new_alignment"
] |
Returns an Alignment object from two existing Alignments.
aln1, aln2: cogent.core.alignment.Alignment objects, or data that can be
used to build them.
- Mafft profile alignment only works with aligned sequences. Alignment
object used to handle unaligned sequences.
params: dict of parameters to pass in to the Mafft app controller.
|
[
"Returns",
"an",
"Alignment",
"object",
"from",
"two",
"existing",
"Alignments",
"."
] |
python
|
train
|
plivo/plivohelper-python
|
plivohelper.py
|
https://github.com/plivo/plivohelper-python/blob/a2f706d69e2138fbb973f792041341f662072d26/plivohelper.py#L181-L186
|
def hangup_all_calls(self):
"""REST Hangup All Live Calls Helper
"""
path = '/' + self.api_version + '/HangupAllCalls/'
method = 'POST'
return self.request(path, method)
|
[
"def",
"hangup_all_calls",
"(",
"self",
")",
":",
"path",
"=",
"'/'",
"+",
"self",
".",
"api_version",
"+",
"'/HangupAllCalls/'",
"method",
"=",
"'POST'",
"return",
"self",
".",
"request",
"(",
"path",
",",
"method",
")"
] |
REST Hangup All Live Calls Helper
|
[
"REST",
"Hangup",
"All",
"Live",
"Calls",
"Helper"
] |
python
|
valid
|
opencobra/cobrapy
|
cobra/io/sbml.py
|
https://github.com/opencobra/cobrapy/blob/9d1987cdb3a395cf4125a3439c3b002ff2be2009/cobra/io/sbml.py#L100-L102
|
def _clip(sid, prefix):
"""Clips a prefix from the beginning of a string if it exists."""
return sid[len(prefix):] if sid.startswith(prefix) else sid
|
[
"def",
"_clip",
"(",
"sid",
",",
"prefix",
")",
":",
"return",
"sid",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"if",
"sid",
".",
"startswith",
"(",
"prefix",
")",
"else",
"sid"
] |
Clips a prefix from the beginning of a string if it exists.
|
[
"Clips",
"a",
"prefix",
"from",
"the",
"beginning",
"of",
"a",
"string",
"if",
"it",
"exists",
"."
] |
python
|
valid
|
Qiskit/qiskit-terra
|
qiskit/pulse/samplers/decorators.py
|
https://github.com/Qiskit/qiskit-terra/blob/d4f58d903bc96341b816f7c35df936d6421267d1/qiskit/pulse/samplers/decorators.py#L184-L227
|
def sampler(sample_function: Callable) -> Callable:
"""Sampler decorator base method.
Samplers are used for converting an continuous function to a discretized pulse.
They operate on a function with the signature:
`def f(times: np.ndarray, *args, **kwargs) -> np.ndarray`
Where `times` is a numpy array of floats with length n_times and the output array
is a complex numpy array with length n_times. The output of the decorator is an
instance of `FunctionalPulse` with signature:
`def g(duration: int, *args, **kwargs) -> SamplePulse`
Note if your continuous pulse function outputs a `complex` scalar rather than a
`np.ndarray`, you should first vectorize it before applying a sampler.
This class implements the sampler boilerplate for the sampler.
Args:
sample_function: A sampler function to be decorated.
"""
def generate_sampler(continuous_pulse: Callable) -> Callable:
"""Return a decorated sampler function."""
@functools.wraps(continuous_pulse)
def call_sampler(duration: int, *args, **kwargs) -> commands.SamplePulse:
"""Replace the call to the continuous function with a call to the sampler applied
to the anlytic pulse function."""
sampled_pulse = sample_function(continuous_pulse, duration, *args, **kwargs)
return np.asarray(sampled_pulse, dtype=np.complex_)
# Update type annotations for wrapped continuous function to be discrete
call_sampler = _update_annotations(call_sampler)
# Update docstring with that of the sampler and include sampled function documentation.
call_sampler = _update_docstring(call_sampler, sample_function)
# Unset wrapped to return base sampler signature
# but still get rest of benefits of wraps
# such as __name__, __qualname__
call_sampler.__dict__.pop('__wrapped__')
# wrap with functional pulse
return commands.functional_pulse(call_sampler)
return generate_sampler
|
[
"def",
"sampler",
"(",
"sample_function",
":",
"Callable",
")",
"->",
"Callable",
":",
"def",
"generate_sampler",
"(",
"continuous_pulse",
":",
"Callable",
")",
"->",
"Callable",
":",
"\"\"\"Return a decorated sampler function.\"\"\"",
"@",
"functools",
".",
"wraps",
"(",
"continuous_pulse",
")",
"def",
"call_sampler",
"(",
"duration",
":",
"int",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"->",
"commands",
".",
"SamplePulse",
":",
"\"\"\"Replace the call to the continuous function with a call to the sampler applied\n to the anlytic pulse function.\"\"\"",
"sampled_pulse",
"=",
"sample_function",
"(",
"continuous_pulse",
",",
"duration",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"np",
".",
"asarray",
"(",
"sampled_pulse",
",",
"dtype",
"=",
"np",
".",
"complex_",
")",
"# Update type annotations for wrapped continuous function to be discrete",
"call_sampler",
"=",
"_update_annotations",
"(",
"call_sampler",
")",
"# Update docstring with that of the sampler and include sampled function documentation.",
"call_sampler",
"=",
"_update_docstring",
"(",
"call_sampler",
",",
"sample_function",
")",
"# Unset wrapped to return base sampler signature",
"# but still get rest of benefits of wraps",
"# such as __name__, __qualname__",
"call_sampler",
".",
"__dict__",
".",
"pop",
"(",
"'__wrapped__'",
")",
"# wrap with functional pulse",
"return",
"commands",
".",
"functional_pulse",
"(",
"call_sampler",
")",
"return",
"generate_sampler"
] |
Sampler decorator base method.
Samplers are used for converting an continuous function to a discretized pulse.
They operate on a function with the signature:
`def f(times: np.ndarray, *args, **kwargs) -> np.ndarray`
Where `times` is a numpy array of floats with length n_times and the output array
is a complex numpy array with length n_times. The output of the decorator is an
instance of `FunctionalPulse` with signature:
`def g(duration: int, *args, **kwargs) -> SamplePulse`
Note if your continuous pulse function outputs a `complex` scalar rather than a
`np.ndarray`, you should first vectorize it before applying a sampler.
This class implements the sampler boilerplate for the sampler.
Args:
sample_function: A sampler function to be decorated.
|
[
"Sampler",
"decorator",
"base",
"method",
"."
] |
python
|
test
|
python-xlib/python-xlib
|
Xlib/display.py
|
https://github.com/python-xlib/python-xlib/blob/8901e831737e79fe5645f48089d70e1d1046d2f2/Xlib/display.py#L637-L643
|
def list_fonts(self, pattern, max_names):
"""Return a list of font names matching pattern. No more than
max_names will be returned."""
r = request.ListFonts(display = self.display,
max_names = max_names,
pattern = pattern)
return r.fonts
|
[
"def",
"list_fonts",
"(",
"self",
",",
"pattern",
",",
"max_names",
")",
":",
"r",
"=",
"request",
".",
"ListFonts",
"(",
"display",
"=",
"self",
".",
"display",
",",
"max_names",
"=",
"max_names",
",",
"pattern",
"=",
"pattern",
")",
"return",
"r",
".",
"fonts"
] |
Return a list of font names matching pattern. No more than
max_names will be returned.
|
[
"Return",
"a",
"list",
"of",
"font",
"names",
"matching",
"pattern",
".",
"No",
"more",
"than",
"max_names",
"will",
"be",
"returned",
"."
] |
python
|
train
|
phoebe-project/phoebe2
|
phoebe/utils/__init__.py
|
https://github.com/phoebe-project/phoebe2/blob/e64b8be683977064e2d55dd1b3ac400f64c3e379/phoebe/utils/__init__.py#L3-L97
|
def get_basic_logger(clevel='WARNING',flevel='DEBUG',
style="default",filename=None,filemode='w'):
"""
Return a basic logger via a log file and/or terminal.
Example 1: log only to the console, accepting levels "INFO" and above
>>> logger = utils.get_basic_logger()
Example 2: log only to the console, accepting levels "DEBUG" and above
>>> logger = utils.get_basic_logger(clevel='DEBUG')
Example 3: log only to a file, accepting levels "DEBUG" and above
>>> logger = utils.get_basic_logger(clevel=None,filename='mylog.log')
Example 4: log only to a file, accepting levels "INFO" and above
>>> logger = utils.get_basic_logger(clevel=None,flevel='INFO',filename='mylog.log')
Example 5: log to the terminal (INFO and above) and file (DEBUG and above)
>>> logger = utils.get_basic_logger(filename='mylog.log')
The different logging styles are:
C{style='default'}::
Wed, 13 Feb 2013 08:47 root INFO Some information
C{style='grandpa'}::
# INFO Some information
C{style='minimal'}::
Some information
@param style: logger style
@type style: str, one of 'default','grandpa','minimal'
"""
name = ""
#-- define formats
if style=='default':
format = '%(asctime)s %(name)-12s %(levelname)-7s %(message)s'
datefmt = '%a, %d %b %Y %H:%M'
elif style=='grandpa':
format = '# %(levelname)-7s %(message)s'
datefmt = '%a, %d %b %Y %H:%M'
elif style=='minimal':
format = ''
datefmt = '%a, %d %b %Y %H:%M'
if style=='trace':
formatter = MyFormatter()
else:
formatter = logging.Formatter(fmt=format,datefmt=datefmt)
if clevel: clevel = logging.__dict__[clevel.upper()]
if flevel: flevel = logging.__dict__[flevel.upper()]
#-- set up basic configuration.
# The basicConfig sets up one default logger. If you give a filename, it's
# a FileHandler, otherwise a StreamHandler.
#-- If we want console and filename, first set up a basic FileHandler, then
# add terminal StreamHandler
if filename is not None:
if flevel is None:
level = clevel
else:
level = flevel
logging.basicConfig(level=level,
format=format,datefmt=datefmt,
filename=filename,filemode=filemode)
fh = logging.FileHandler(filename)
fh.setLevel(flevel)
fh.setFormatter(formatter)
logging.getLogger(name).addHandler(fh)
if filename is not None and clevel:
# define a Handler which writes INFO messages or higher to the sys.stderr
ch = logging.StreamHandler()
ch.setLevel(clevel)
# tell the handler to use this format
ch.setFormatter(formatter)
logging.getLogger(name).addHandler(ch)
#-- If we only want a console:
else:
logging.basicConfig(level=clevel,format=format,datefmt=datefmt,
filename=filename,filemode=filemode)
#-- fix filename logging
if filename is not None:
logging.getLogger(name).handlers[0].level = flevel
return logging.getLogger(name)
|
[
"def",
"get_basic_logger",
"(",
"clevel",
"=",
"'WARNING'",
",",
"flevel",
"=",
"'DEBUG'",
",",
"style",
"=",
"\"default\"",
",",
"filename",
"=",
"None",
",",
"filemode",
"=",
"'w'",
")",
":",
"name",
"=",
"\"\"",
"#-- define formats",
"if",
"style",
"==",
"'default'",
":",
"format",
"=",
"'%(asctime)s %(name)-12s %(levelname)-7s %(message)s'",
"datefmt",
"=",
"'%a, %d %b %Y %H:%M'",
"elif",
"style",
"==",
"'grandpa'",
":",
"format",
"=",
"'# %(levelname)-7s %(message)s'",
"datefmt",
"=",
"'%a, %d %b %Y %H:%M'",
"elif",
"style",
"==",
"'minimal'",
":",
"format",
"=",
"''",
"datefmt",
"=",
"'%a, %d %b %Y %H:%M'",
"if",
"style",
"==",
"'trace'",
":",
"formatter",
"=",
"MyFormatter",
"(",
")",
"else",
":",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"fmt",
"=",
"format",
",",
"datefmt",
"=",
"datefmt",
")",
"if",
"clevel",
":",
"clevel",
"=",
"logging",
".",
"__dict__",
"[",
"clevel",
".",
"upper",
"(",
")",
"]",
"if",
"flevel",
":",
"flevel",
"=",
"logging",
".",
"__dict__",
"[",
"flevel",
".",
"upper",
"(",
")",
"]",
"#-- set up basic configuration.",
"# The basicConfig sets up one default logger. If you give a filename, it's",
"# a FileHandler, otherwise a StreamHandler.",
"#-- If we want console and filename, first set up a basic FileHandler, then",
"# add terminal StreamHandler",
"if",
"filename",
"is",
"not",
"None",
":",
"if",
"flevel",
"is",
"None",
":",
"level",
"=",
"clevel",
"else",
":",
"level",
"=",
"flevel",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"level",
",",
"format",
"=",
"format",
",",
"datefmt",
"=",
"datefmt",
",",
"filename",
"=",
"filename",
",",
"filemode",
"=",
"filemode",
")",
"fh",
"=",
"logging",
".",
"FileHandler",
"(",
"filename",
")",
"fh",
".",
"setLevel",
"(",
"flevel",
")",
"fh",
".",
"setFormatter",
"(",
"formatter",
")",
"logging",
".",
"getLogger",
"(",
"name",
")",
".",
"addHandler",
"(",
"fh",
")",
"if",
"filename",
"is",
"not",
"None",
"and",
"clevel",
":",
"# define a Handler which writes INFO messages or higher to the sys.stderr",
"ch",
"=",
"logging",
".",
"StreamHandler",
"(",
")",
"ch",
".",
"setLevel",
"(",
"clevel",
")",
"# tell the handler to use this format",
"ch",
".",
"setFormatter",
"(",
"formatter",
")",
"logging",
".",
"getLogger",
"(",
"name",
")",
".",
"addHandler",
"(",
"ch",
")",
"#-- If we only want a console:",
"else",
":",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"clevel",
",",
"format",
"=",
"format",
",",
"datefmt",
"=",
"datefmt",
",",
"filename",
"=",
"filename",
",",
"filemode",
"=",
"filemode",
")",
"#-- fix filename logging",
"if",
"filename",
"is",
"not",
"None",
":",
"logging",
".",
"getLogger",
"(",
"name",
")",
".",
"handlers",
"[",
"0",
"]",
".",
"level",
"=",
"flevel",
"return",
"logging",
".",
"getLogger",
"(",
"name",
")"
] |
Return a basic logger via a log file and/or terminal.
Example 1: log only to the console, accepting levels "INFO" and above
>>> logger = utils.get_basic_logger()
Example 2: log only to the console, accepting levels "DEBUG" and above
>>> logger = utils.get_basic_logger(clevel='DEBUG')
Example 3: log only to a file, accepting levels "DEBUG" and above
>>> logger = utils.get_basic_logger(clevel=None,filename='mylog.log')
Example 4: log only to a file, accepting levels "INFO" and above
>>> logger = utils.get_basic_logger(clevel=None,flevel='INFO',filename='mylog.log')
Example 5: log to the terminal (INFO and above) and file (DEBUG and above)
>>> logger = utils.get_basic_logger(filename='mylog.log')
The different logging styles are:
C{style='default'}::
Wed, 13 Feb 2013 08:47 root INFO Some information
C{style='grandpa'}::
# INFO Some information
C{style='minimal'}::
Some information
@param style: logger style
@type style: str, one of 'default','grandpa','minimal'
|
[
"Return",
"a",
"basic",
"logger",
"via",
"a",
"log",
"file",
"and",
"/",
"or",
"terminal",
"."
] |
python
|
train
|
icgood/pymap
|
pymap/flags.py
|
https://github.com/icgood/pymap/blob/e77d9a54d760e3cbe044a548883bb4299ed61dc2/pymap/flags.py#L149-L158
|
def remove(self, uids: Iterable[int]) -> None:
"""Remove any session flags for the given message.
Args:
uids: The message UID values.
"""
for uid in uids:
self._recent.discard(uid)
self._flags.pop(uid, None)
|
[
"def",
"remove",
"(",
"self",
",",
"uids",
":",
"Iterable",
"[",
"int",
"]",
")",
"->",
"None",
":",
"for",
"uid",
"in",
"uids",
":",
"self",
".",
"_recent",
".",
"discard",
"(",
"uid",
")",
"self",
".",
"_flags",
".",
"pop",
"(",
"uid",
",",
"None",
")"
] |
Remove any session flags for the given message.
Args:
uids: The message UID values.
|
[
"Remove",
"any",
"session",
"flags",
"for",
"the",
"given",
"message",
"."
] |
python
|
train
|
fitnr/convertdate
|
convertdate/gregorian.py
|
https://github.com/fitnr/convertdate/blob/e920f168a87f99183b0aa7290d6c3af222582d43/convertdate/gregorian.py#L80-L118
|
def from_jd(jd):
'''Return Gregorian date in a (Y, M, D) tuple'''
wjd = floor(jd - 0.5) + 0.5
depoch = wjd - EPOCH
quadricent = floor(depoch / INTERCALATION_CYCLE_DAYS)
dqc = depoch % INTERCALATION_CYCLE_DAYS
cent = floor(dqc / LEAP_SUPPRESSION_DAYS)
dcent = dqc % LEAP_SUPPRESSION_DAYS
quad = floor(dcent / LEAP_CYCLE_DAYS)
dquad = dcent % LEAP_CYCLE_DAYS
yindex = floor(dquad / YEAR_DAYS)
year = (
quadricent * INTERCALATION_CYCLE_YEARS +
cent * LEAP_SUPPRESSION_YEARS +
quad * LEAP_CYCLE_YEARS + yindex
)
if not (cent == 4 or yindex == 4):
year += 1
yearday = wjd - to_jd(year, 1, 1)
leap = isleap(year)
if yearday < 58 + leap:
leap_adj = 0
elif leap:
leap_adj = 1
else:
leap_adj = 2
month = floor((((yearday + leap_adj) * 12) + 373) / 367)
day = int(wjd - to_jd(year, month, 1)) + 1
return (year, month, day)
|
[
"def",
"from_jd",
"(",
"jd",
")",
":",
"wjd",
"=",
"floor",
"(",
"jd",
"-",
"0.5",
")",
"+",
"0.5",
"depoch",
"=",
"wjd",
"-",
"EPOCH",
"quadricent",
"=",
"floor",
"(",
"depoch",
"/",
"INTERCALATION_CYCLE_DAYS",
")",
"dqc",
"=",
"depoch",
"%",
"INTERCALATION_CYCLE_DAYS",
"cent",
"=",
"floor",
"(",
"dqc",
"/",
"LEAP_SUPPRESSION_DAYS",
")",
"dcent",
"=",
"dqc",
"%",
"LEAP_SUPPRESSION_DAYS",
"quad",
"=",
"floor",
"(",
"dcent",
"/",
"LEAP_CYCLE_DAYS",
")",
"dquad",
"=",
"dcent",
"%",
"LEAP_CYCLE_DAYS",
"yindex",
"=",
"floor",
"(",
"dquad",
"/",
"YEAR_DAYS",
")",
"year",
"=",
"(",
"quadricent",
"*",
"INTERCALATION_CYCLE_YEARS",
"+",
"cent",
"*",
"LEAP_SUPPRESSION_YEARS",
"+",
"quad",
"*",
"LEAP_CYCLE_YEARS",
"+",
"yindex",
")",
"if",
"not",
"(",
"cent",
"==",
"4",
"or",
"yindex",
"==",
"4",
")",
":",
"year",
"+=",
"1",
"yearday",
"=",
"wjd",
"-",
"to_jd",
"(",
"year",
",",
"1",
",",
"1",
")",
"leap",
"=",
"isleap",
"(",
"year",
")",
"if",
"yearday",
"<",
"58",
"+",
"leap",
":",
"leap_adj",
"=",
"0",
"elif",
"leap",
":",
"leap_adj",
"=",
"1",
"else",
":",
"leap_adj",
"=",
"2",
"month",
"=",
"floor",
"(",
"(",
"(",
"(",
"yearday",
"+",
"leap_adj",
")",
"*",
"12",
")",
"+",
"373",
")",
"/",
"367",
")",
"day",
"=",
"int",
"(",
"wjd",
"-",
"to_jd",
"(",
"year",
",",
"month",
",",
"1",
")",
")",
"+",
"1",
"return",
"(",
"year",
",",
"month",
",",
"day",
")"
] |
Return Gregorian date in a (Y, M, D) tuple
|
[
"Return",
"Gregorian",
"date",
"in",
"a",
"(",
"Y",
"M",
"D",
")",
"tuple"
] |
python
|
train
|
Chilipp/psy-simple
|
psy_simple/plotters.py
|
https://github.com/Chilipp/psy-simple/blob/7d916406a6d3c3c27c0b7102f98fef07a4da0a61/psy_simple/plotters.py#L3350-L3352
|
def ycoord(self):
"""The y coordinate :class:`xarray.Variable`"""
return self.decoder.get_y(self.data, coords=self.data.coords)
|
[
"def",
"ycoord",
"(",
"self",
")",
":",
"return",
"self",
".",
"decoder",
".",
"get_y",
"(",
"self",
".",
"data",
",",
"coords",
"=",
"self",
".",
"data",
".",
"coords",
")"
] |
The y coordinate :class:`xarray.Variable`
|
[
"The",
"y",
"coordinate",
":",
"class",
":",
"xarray",
".",
"Variable"
] |
python
|
train
|
pgmpy/pgmpy
|
pgmpy/factors/discrete/DiscreteFactor.py
|
https://github.com/pgmpy/pgmpy/blob/9381a66aba3c3871d3ccd00672b148d17d63239e/pgmpy/factors/discrete/DiscreteFactor.py#L682-L711
|
def copy(self):
"""
Returns a copy of the factor.
Returns
-------
DiscreteFactor: copy of the factor
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> phi = DiscreteFactor(['x1', 'x2', 'x3'], [2, 3, 3], np.arange(18))
>>> phi_copy = phi.copy()
>>> phi_copy.variables
['x1', 'x2', 'x3']
>>> phi_copy.cardinality
array([2, 3, 3])
>>> phi_copy.values
array([[[ 0, 1, 2],
[ 3, 4, 5],
[ 6, 7, 8]],
[[ 9, 10, 11],
[12, 13, 14],
[15, 16, 17]]])
"""
# not creating a new copy of self.values and self.cardinality
# because __init__ methods does that.
return DiscreteFactor(self.scope(), self.cardinality, self.values, state_names=self.state_names)
|
[
"def",
"copy",
"(",
"self",
")",
":",
"# not creating a new copy of self.values and self.cardinality",
"# because __init__ methods does that.",
"return",
"DiscreteFactor",
"(",
"self",
".",
"scope",
"(",
")",
",",
"self",
".",
"cardinality",
",",
"self",
".",
"values",
",",
"state_names",
"=",
"self",
".",
"state_names",
")"
] |
Returns a copy of the factor.
Returns
-------
DiscreteFactor: copy of the factor
Examples
--------
>>> import numpy as np
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> phi = DiscreteFactor(['x1', 'x2', 'x3'], [2, 3, 3], np.arange(18))
>>> phi_copy = phi.copy()
>>> phi_copy.variables
['x1', 'x2', 'x3']
>>> phi_copy.cardinality
array([2, 3, 3])
>>> phi_copy.values
array([[[ 0, 1, 2],
[ 3, 4, 5],
[ 6, 7, 8]],
[[ 9, 10, 11],
[12, 13, 14],
[15, 16, 17]]])
|
[
"Returns",
"a",
"copy",
"of",
"the",
"factor",
"."
] |
python
|
train
|
nccgroup/Scout2
|
AWSScout2/services/emr.py
|
https://github.com/nccgroup/Scout2/blob/5d86d46d7ed91a92000496189e9cfa6b98243937/AWSScout2/services/emr.py#L19-L33
|
def parse_cluster(self, global_params, region, cluster):
"""
Parse a single EMR cluster
:param global_params: Parameters shared for all regions
:param region: Name of the AWS region
:param cluster: EMR cluster
"""
cluster_id = cluster['Id']
cluster = api_clients[region].describe_cluster(ClusterId = cluster_id)['Cluster']
cluster['id'] = cluster.pop('Id')
cluster['name'] = cluster.pop('Name')
vpc_id = 'TODO' # The EMR API won't disclose the VPC ID, so wait until all configs have been fetch and look up the VPC based on the subnet ID
manage_dictionary(self.vpcs, vpc_id, VPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].clusters[cluster_id] = cluster
|
[
"def",
"parse_cluster",
"(",
"self",
",",
"global_params",
",",
"region",
",",
"cluster",
")",
":",
"cluster_id",
"=",
"cluster",
"[",
"'Id'",
"]",
"cluster",
"=",
"api_clients",
"[",
"region",
"]",
".",
"describe_cluster",
"(",
"ClusterId",
"=",
"cluster_id",
")",
"[",
"'Cluster'",
"]",
"cluster",
"[",
"'id'",
"]",
"=",
"cluster",
".",
"pop",
"(",
"'Id'",
")",
"cluster",
"[",
"'name'",
"]",
"=",
"cluster",
".",
"pop",
"(",
"'Name'",
")",
"vpc_id",
"=",
"'TODO'",
"# The EMR API won't disclose the VPC ID, so wait until all configs have been fetch and look up the VPC based on the subnet ID",
"manage_dictionary",
"(",
"self",
".",
"vpcs",
",",
"vpc_id",
",",
"VPCConfig",
"(",
"self",
".",
"vpc_resource_types",
")",
")",
"self",
".",
"vpcs",
"[",
"vpc_id",
"]",
".",
"clusters",
"[",
"cluster_id",
"]",
"=",
"cluster"
] |
Parse a single EMR cluster
:param global_params: Parameters shared for all regions
:param region: Name of the AWS region
:param cluster: EMR cluster
|
[
"Parse",
"a",
"single",
"EMR",
"cluster"
] |
python
|
train
|
tornadoweb/tornado
|
tornado/http1connection.py
|
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/http1connection.py#L376-L465
|
def write_headers(
self,
start_line: Union[httputil.RequestStartLine, httputil.ResponseStartLine],
headers: httputil.HTTPHeaders,
chunk: bytes = None,
) -> "Future[None]":
"""Implements `.HTTPConnection.write_headers`."""
lines = []
if self.is_client:
assert isinstance(start_line, httputil.RequestStartLine)
self._request_start_line = start_line
lines.append(utf8("%s %s HTTP/1.1" % (start_line[0], start_line[1])))
# Client requests with a non-empty body must have either a
# Content-Length or a Transfer-Encoding.
self._chunking_output = (
start_line.method in ("POST", "PUT", "PATCH")
and "Content-Length" not in headers
and (
"Transfer-Encoding" not in headers
or headers["Transfer-Encoding"] == "chunked"
)
)
else:
assert isinstance(start_line, httputil.ResponseStartLine)
assert self._request_start_line is not None
assert self._request_headers is not None
self._response_start_line = start_line
lines.append(utf8("HTTP/1.1 %d %s" % (start_line[1], start_line[2])))
self._chunking_output = (
# TODO: should this use
# self._request_start_line.version or
# start_line.version?
self._request_start_line.version == "HTTP/1.1"
# 1xx, 204 and 304 responses have no body (not even a zero-length
# body), and so should not have either Content-Length or
# Transfer-Encoding headers.
and start_line.code not in (204, 304)
and (start_line.code < 100 or start_line.code >= 200)
# No need to chunk the output if a Content-Length is specified.
and "Content-Length" not in headers
# Applications are discouraged from touching Transfer-Encoding,
# but if they do, leave it alone.
and "Transfer-Encoding" not in headers
)
# If connection to a 1.1 client will be closed, inform client
if (
self._request_start_line.version == "HTTP/1.1"
and self._disconnect_on_finish
):
headers["Connection"] = "close"
# If a 1.0 client asked for keep-alive, add the header.
if (
self._request_start_line.version == "HTTP/1.0"
and self._request_headers.get("Connection", "").lower() == "keep-alive"
):
headers["Connection"] = "Keep-Alive"
if self._chunking_output:
headers["Transfer-Encoding"] = "chunked"
if not self.is_client and (
self._request_start_line.method == "HEAD"
or cast(httputil.ResponseStartLine, start_line).code == 304
):
self._expected_content_remaining = 0
elif "Content-Length" in headers:
self._expected_content_remaining = int(headers["Content-Length"])
else:
self._expected_content_remaining = None
# TODO: headers are supposed to be of type str, but we still have some
# cases that let bytes slip through. Remove these native_str calls when those
# are fixed.
header_lines = (
native_str(n) + ": " + native_str(v) for n, v in headers.get_all()
)
lines.extend(l.encode("latin1") for l in header_lines)
for line in lines:
if b"\n" in line:
raise ValueError("Newline in header: " + repr(line))
future = None
if self.stream.closed():
future = self._write_future = Future()
future.set_exception(iostream.StreamClosedError())
future.exception()
else:
future = self._write_future = Future()
data = b"\r\n".join(lines) + b"\r\n\r\n"
if chunk:
data += self._format_chunk(chunk)
self._pending_write = self.stream.write(data)
future_add_done_callback(self._pending_write, self._on_write_complete)
return future
|
[
"def",
"write_headers",
"(",
"self",
",",
"start_line",
":",
"Union",
"[",
"httputil",
".",
"RequestStartLine",
",",
"httputil",
".",
"ResponseStartLine",
"]",
",",
"headers",
":",
"httputil",
".",
"HTTPHeaders",
",",
"chunk",
":",
"bytes",
"=",
"None",
",",
")",
"->",
"\"Future[None]\"",
":",
"lines",
"=",
"[",
"]",
"if",
"self",
".",
"is_client",
":",
"assert",
"isinstance",
"(",
"start_line",
",",
"httputil",
".",
"RequestStartLine",
")",
"self",
".",
"_request_start_line",
"=",
"start_line",
"lines",
".",
"append",
"(",
"utf8",
"(",
"\"%s %s HTTP/1.1\"",
"%",
"(",
"start_line",
"[",
"0",
"]",
",",
"start_line",
"[",
"1",
"]",
")",
")",
")",
"# Client requests with a non-empty body must have either a",
"# Content-Length or a Transfer-Encoding.",
"self",
".",
"_chunking_output",
"=",
"(",
"start_line",
".",
"method",
"in",
"(",
"\"POST\"",
",",
"\"PUT\"",
",",
"\"PATCH\"",
")",
"and",
"\"Content-Length\"",
"not",
"in",
"headers",
"and",
"(",
"\"Transfer-Encoding\"",
"not",
"in",
"headers",
"or",
"headers",
"[",
"\"Transfer-Encoding\"",
"]",
"==",
"\"chunked\"",
")",
")",
"else",
":",
"assert",
"isinstance",
"(",
"start_line",
",",
"httputil",
".",
"ResponseStartLine",
")",
"assert",
"self",
".",
"_request_start_line",
"is",
"not",
"None",
"assert",
"self",
".",
"_request_headers",
"is",
"not",
"None",
"self",
".",
"_response_start_line",
"=",
"start_line",
"lines",
".",
"append",
"(",
"utf8",
"(",
"\"HTTP/1.1 %d %s\"",
"%",
"(",
"start_line",
"[",
"1",
"]",
",",
"start_line",
"[",
"2",
"]",
")",
")",
")",
"self",
".",
"_chunking_output",
"=",
"(",
"# TODO: should this use",
"# self._request_start_line.version or",
"# start_line.version?",
"self",
".",
"_request_start_line",
".",
"version",
"==",
"\"HTTP/1.1\"",
"# 1xx, 204 and 304 responses have no body (not even a zero-length",
"# body), and so should not have either Content-Length or",
"# Transfer-Encoding headers.",
"and",
"start_line",
".",
"code",
"not",
"in",
"(",
"204",
",",
"304",
")",
"and",
"(",
"start_line",
".",
"code",
"<",
"100",
"or",
"start_line",
".",
"code",
">=",
"200",
")",
"# No need to chunk the output if a Content-Length is specified.",
"and",
"\"Content-Length\"",
"not",
"in",
"headers",
"# Applications are discouraged from touching Transfer-Encoding,",
"# but if they do, leave it alone.",
"and",
"\"Transfer-Encoding\"",
"not",
"in",
"headers",
")",
"# If connection to a 1.1 client will be closed, inform client",
"if",
"(",
"self",
".",
"_request_start_line",
".",
"version",
"==",
"\"HTTP/1.1\"",
"and",
"self",
".",
"_disconnect_on_finish",
")",
":",
"headers",
"[",
"\"Connection\"",
"]",
"=",
"\"close\"",
"# If a 1.0 client asked for keep-alive, add the header.",
"if",
"(",
"self",
".",
"_request_start_line",
".",
"version",
"==",
"\"HTTP/1.0\"",
"and",
"self",
".",
"_request_headers",
".",
"get",
"(",
"\"Connection\"",
",",
"\"\"",
")",
".",
"lower",
"(",
")",
"==",
"\"keep-alive\"",
")",
":",
"headers",
"[",
"\"Connection\"",
"]",
"=",
"\"Keep-Alive\"",
"if",
"self",
".",
"_chunking_output",
":",
"headers",
"[",
"\"Transfer-Encoding\"",
"]",
"=",
"\"chunked\"",
"if",
"not",
"self",
".",
"is_client",
"and",
"(",
"self",
".",
"_request_start_line",
".",
"method",
"==",
"\"HEAD\"",
"or",
"cast",
"(",
"httputil",
".",
"ResponseStartLine",
",",
"start_line",
")",
".",
"code",
"==",
"304",
")",
":",
"self",
".",
"_expected_content_remaining",
"=",
"0",
"elif",
"\"Content-Length\"",
"in",
"headers",
":",
"self",
".",
"_expected_content_remaining",
"=",
"int",
"(",
"headers",
"[",
"\"Content-Length\"",
"]",
")",
"else",
":",
"self",
".",
"_expected_content_remaining",
"=",
"None",
"# TODO: headers are supposed to be of type str, but we still have some",
"# cases that let bytes slip through. Remove these native_str calls when those",
"# are fixed.",
"header_lines",
"=",
"(",
"native_str",
"(",
"n",
")",
"+",
"\": \"",
"+",
"native_str",
"(",
"v",
")",
"for",
"n",
",",
"v",
"in",
"headers",
".",
"get_all",
"(",
")",
")",
"lines",
".",
"extend",
"(",
"l",
".",
"encode",
"(",
"\"latin1\"",
")",
"for",
"l",
"in",
"header_lines",
")",
"for",
"line",
"in",
"lines",
":",
"if",
"b\"\\n\"",
"in",
"line",
":",
"raise",
"ValueError",
"(",
"\"Newline in header: \"",
"+",
"repr",
"(",
"line",
")",
")",
"future",
"=",
"None",
"if",
"self",
".",
"stream",
".",
"closed",
"(",
")",
":",
"future",
"=",
"self",
".",
"_write_future",
"=",
"Future",
"(",
")",
"future",
".",
"set_exception",
"(",
"iostream",
".",
"StreamClosedError",
"(",
")",
")",
"future",
".",
"exception",
"(",
")",
"else",
":",
"future",
"=",
"self",
".",
"_write_future",
"=",
"Future",
"(",
")",
"data",
"=",
"b\"\\r\\n\"",
".",
"join",
"(",
"lines",
")",
"+",
"b\"\\r\\n\\r\\n\"",
"if",
"chunk",
":",
"data",
"+=",
"self",
".",
"_format_chunk",
"(",
"chunk",
")",
"self",
".",
"_pending_write",
"=",
"self",
".",
"stream",
".",
"write",
"(",
"data",
")",
"future_add_done_callback",
"(",
"self",
".",
"_pending_write",
",",
"self",
".",
"_on_write_complete",
")",
"return",
"future"
] |
Implements `.HTTPConnection.write_headers`.
|
[
"Implements",
".",
"HTTPConnection",
".",
"write_headers",
"."
] |
python
|
train
|
tcalmant/ipopo
|
pelix/ipopo/decorators.py
|
https://github.com/tcalmant/ipopo/blob/2f9ae0c44cd9c34ef1a9d50837b3254e75678eb1/pelix/ipopo/decorators.py#L260-L317
|
def _ipopo_setup_field_callback(cls, context):
# type: (type, FactoryContext) -> None
"""
Sets up the class _field_callback dictionary
:param cls: The class to handle
:param context: The factory class context
"""
assert inspect.isclass(cls)
assert isinstance(context, FactoryContext)
if context.field_callbacks is not None:
callbacks = context.field_callbacks.copy()
else:
callbacks = {}
functions = inspect.getmembers(cls, inspect.isroutine)
for name, func in functions:
if not hasattr(func, constants.IPOPO_METHOD_FIELD_CALLBACKS):
# No attribute, get the next member
continue
method_callbacks = getattr(func, constants.IPOPO_METHOD_FIELD_CALLBACKS)
if not isinstance(method_callbacks, list):
# Invalid content
_logger.warning(
"Invalid attribute %s in %s",
constants.IPOPO_METHOD_FIELD_CALLBACKS,
name,
)
continue
# Keeping it allows inheritance : by removing it, only the first
# child will see the attribute -> Don't remove it
# Store the call backs
for kind, field, if_valid in method_callbacks:
fields_cbs = callbacks.setdefault(field, {})
if kind in fields_cbs and not is_from_parent(
cls, fields_cbs[kind][0].__name__
):
_logger.warning(
"Redefining the callback %s in '%s'. "
"Previous callback : '%s' (%s). "
"New callback : %s",
kind,
name,
fields_cbs[kind][0].__name__,
fields_cbs[kind][0],
func,
)
fields_cbs[kind] = (func, if_valid)
# Update the factory context
context.field_callbacks.clear()
context.field_callbacks.update(callbacks)
|
[
"def",
"_ipopo_setup_field_callback",
"(",
"cls",
",",
"context",
")",
":",
"# type: (type, FactoryContext) -> None",
"assert",
"inspect",
".",
"isclass",
"(",
"cls",
")",
"assert",
"isinstance",
"(",
"context",
",",
"FactoryContext",
")",
"if",
"context",
".",
"field_callbacks",
"is",
"not",
"None",
":",
"callbacks",
"=",
"context",
".",
"field_callbacks",
".",
"copy",
"(",
")",
"else",
":",
"callbacks",
"=",
"{",
"}",
"functions",
"=",
"inspect",
".",
"getmembers",
"(",
"cls",
",",
"inspect",
".",
"isroutine",
")",
"for",
"name",
",",
"func",
"in",
"functions",
":",
"if",
"not",
"hasattr",
"(",
"func",
",",
"constants",
".",
"IPOPO_METHOD_FIELD_CALLBACKS",
")",
":",
"# No attribute, get the next member",
"continue",
"method_callbacks",
"=",
"getattr",
"(",
"func",
",",
"constants",
".",
"IPOPO_METHOD_FIELD_CALLBACKS",
")",
"if",
"not",
"isinstance",
"(",
"method_callbacks",
",",
"list",
")",
":",
"# Invalid content",
"_logger",
".",
"warning",
"(",
"\"Invalid attribute %s in %s\"",
",",
"constants",
".",
"IPOPO_METHOD_FIELD_CALLBACKS",
",",
"name",
",",
")",
"continue",
"# Keeping it allows inheritance : by removing it, only the first",
"# child will see the attribute -> Don't remove it",
"# Store the call backs",
"for",
"kind",
",",
"field",
",",
"if_valid",
"in",
"method_callbacks",
":",
"fields_cbs",
"=",
"callbacks",
".",
"setdefault",
"(",
"field",
",",
"{",
"}",
")",
"if",
"kind",
"in",
"fields_cbs",
"and",
"not",
"is_from_parent",
"(",
"cls",
",",
"fields_cbs",
"[",
"kind",
"]",
"[",
"0",
"]",
".",
"__name__",
")",
":",
"_logger",
".",
"warning",
"(",
"\"Redefining the callback %s in '%s'. \"",
"\"Previous callback : '%s' (%s). \"",
"\"New callback : %s\"",
",",
"kind",
",",
"name",
",",
"fields_cbs",
"[",
"kind",
"]",
"[",
"0",
"]",
".",
"__name__",
",",
"fields_cbs",
"[",
"kind",
"]",
"[",
"0",
"]",
",",
"func",
",",
")",
"fields_cbs",
"[",
"kind",
"]",
"=",
"(",
"func",
",",
"if_valid",
")",
"# Update the factory context",
"context",
".",
"field_callbacks",
".",
"clear",
"(",
")",
"context",
".",
"field_callbacks",
".",
"update",
"(",
"callbacks",
")"
] |
Sets up the class _field_callback dictionary
:param cls: The class to handle
:param context: The factory class context
|
[
"Sets",
"up",
"the",
"class",
"_field_callback",
"dictionary"
] |
python
|
train
|
ARMmbed/mbed-cloud-sdk-python
|
src/mbed_cloud/_backends/device_directory/apis/default_api.py
|
https://github.com/ARMmbed/mbed-cloud-sdk-python/blob/c0af86fb2cdd4dc7ed26f236139241067d293509/src/mbed_cloud/_backends/device_directory/apis/default_api.py#L36-L56
|
def device_create(self, device, **kwargs): # noqa: E501
"""Create a device # noqa: E501
Create a new device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.device_create(device, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param DeviceDataPostRequest device: (required)
:return: DeviceData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('asynchronous'):
return self.device_create_with_http_info(device, **kwargs) # noqa: E501
else:
(data) = self.device_create_with_http_info(device, **kwargs) # noqa: E501
return data
|
[
"def",
"device_create",
"(",
"self",
",",
"device",
",",
"*",
"*",
"kwargs",
")",
":",
"# noqa: E501",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'asynchronous'",
")",
":",
"return",
"self",
".",
"device_create_with_http_info",
"(",
"device",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"else",
":",
"(",
"data",
")",
"=",
"self",
".",
"device_create_with_http_info",
"(",
"device",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"return",
"data"
] |
Create a device # noqa: E501
Create a new device. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.device_create(device, asynchronous=True)
>>> result = thread.get()
:param asynchronous bool
:param DeviceDataPostRequest device: (required)
:return: DeviceData
If the method is called asynchronously,
returns the request thread.
|
[
"Create",
"a",
"device",
"#",
"noqa",
":",
"E501"
] |
python
|
train
|
cloudbase/python-hnvclient
|
hnv/client.py
|
https://github.com/cloudbase/python-hnvclient/blob/b019452af01db22629809b8930357a2ebf6494be/hnv/client.py#L2023-L2045
|
def process_raw_data(cls, raw_data):
"""Create a new model using raw API response."""
properties = raw_data.get("properties", {})
backend_ip_configurations = []
for raw_content in properties.get("backendIPConfigurations", []):
resource = Resource.from_raw_data(raw_content)
backend_ip_configurations.append(resource)
properties["backendIPConfigurations"] = backend_ip_configurations
load_balancing_rules = []
for raw_content in properties.get("loadBalancingRules", []):
resource = Resource.from_raw_data(raw_content)
load_balancing_rules.append(resource)
properties["loadBalancingRules"] = load_balancing_rules
outbound_nat_rules = []
for raw_content in properties.get("outboundNatRules", []):
resource = Resource.from_raw_data(raw_content)
outbound_nat_rules.append(resource)
properties["outboundNatRules"] = outbound_nat_rules
return super(BackendAddressPools, cls).process_raw_data(raw_data)
|
[
"def",
"process_raw_data",
"(",
"cls",
",",
"raw_data",
")",
":",
"properties",
"=",
"raw_data",
".",
"get",
"(",
"\"properties\"",
",",
"{",
"}",
")",
"backend_ip_configurations",
"=",
"[",
"]",
"for",
"raw_content",
"in",
"properties",
".",
"get",
"(",
"\"backendIPConfigurations\"",
",",
"[",
"]",
")",
":",
"resource",
"=",
"Resource",
".",
"from_raw_data",
"(",
"raw_content",
")",
"backend_ip_configurations",
".",
"append",
"(",
"resource",
")",
"properties",
"[",
"\"backendIPConfigurations\"",
"]",
"=",
"backend_ip_configurations",
"load_balancing_rules",
"=",
"[",
"]",
"for",
"raw_content",
"in",
"properties",
".",
"get",
"(",
"\"loadBalancingRules\"",
",",
"[",
"]",
")",
":",
"resource",
"=",
"Resource",
".",
"from_raw_data",
"(",
"raw_content",
")",
"load_balancing_rules",
".",
"append",
"(",
"resource",
")",
"properties",
"[",
"\"loadBalancingRules\"",
"]",
"=",
"load_balancing_rules",
"outbound_nat_rules",
"=",
"[",
"]",
"for",
"raw_content",
"in",
"properties",
".",
"get",
"(",
"\"outboundNatRules\"",
",",
"[",
"]",
")",
":",
"resource",
"=",
"Resource",
".",
"from_raw_data",
"(",
"raw_content",
")",
"outbound_nat_rules",
".",
"append",
"(",
"resource",
")",
"properties",
"[",
"\"outboundNatRules\"",
"]",
"=",
"outbound_nat_rules",
"return",
"super",
"(",
"BackendAddressPools",
",",
"cls",
")",
".",
"process_raw_data",
"(",
"raw_data",
")"
] |
Create a new model using raw API response.
|
[
"Create",
"a",
"new",
"model",
"using",
"raw",
"API",
"response",
"."
] |
python
|
train
|
iotile/coretools
|
iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sgilink.py
|
https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/sgilink.py#L42-L53
|
def generate(env):
"""Add Builders and construction variables for MIPSPro to an Environment."""
link.generate(env)
env['LINK'] = env.Detect(linkers) or 'cc'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
# __RPATH is set to $_RPATH in the platform specification if that
# platform supports it.
env['RPATHPREFIX'] = '-rpath '
env['RPATHSUFFIX'] = ''
env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"link",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'LINK'",
"]",
"=",
"env",
".",
"Detect",
"(",
"linkers",
")",
"or",
"'cc'",
"env",
"[",
"'SHLINKFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'$LINKFLAGS -shared'",
")",
"# __RPATH is set to $_RPATH in the platform specification if that",
"# platform supports it.",
"env",
"[",
"'RPATHPREFIX'",
"]",
"=",
"'-rpath '",
"env",
"[",
"'RPATHSUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'_RPATH'",
"]",
"=",
"'${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}'"
] |
Add Builders and construction variables for MIPSPro to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"MIPSPro",
"to",
"an",
"Environment",
"."
] |
python
|
train
|
tango-controls/pytango
|
tango/utils.py
|
https://github.com/tango-controls/pytango/blob/9cf78c517c9cdc1081ff6d080a9646a740cc1d36/tango/utils.py#L1578-L1596
|
def _get_value(self, evt):
"""Internal usage only"""
if evt.err:
e = evt.errors[0]
return "[%s] %s" % (e.reason, e.desc)
if isinstance(evt, EventData):
return "[%s] %s" % (
evt.attr_value.quality, str(evt.attr_value.value))
elif isinstance(evt, AttrConfEventData):
cfg = evt.attr_conf
return "label='%s'; unit='%s'" % (cfg.label, cfg.unit)
elif isinstance(evt, DataReadyEventData):
return ""
elif isinstance(evt, PipeEventData):
return evt.pipe_value
elif isinstance(evt, DevIntrChangeEventData):
print("utils::_get_value()")
return
|
[
"def",
"_get_value",
"(",
"self",
",",
"evt",
")",
":",
"if",
"evt",
".",
"err",
":",
"e",
"=",
"evt",
".",
"errors",
"[",
"0",
"]",
"return",
"\"[%s] %s\"",
"%",
"(",
"e",
".",
"reason",
",",
"e",
".",
"desc",
")",
"if",
"isinstance",
"(",
"evt",
",",
"EventData",
")",
":",
"return",
"\"[%s] %s\"",
"%",
"(",
"evt",
".",
"attr_value",
".",
"quality",
",",
"str",
"(",
"evt",
".",
"attr_value",
".",
"value",
")",
")",
"elif",
"isinstance",
"(",
"evt",
",",
"AttrConfEventData",
")",
":",
"cfg",
"=",
"evt",
".",
"attr_conf",
"return",
"\"label='%s'; unit='%s'\"",
"%",
"(",
"cfg",
".",
"label",
",",
"cfg",
".",
"unit",
")",
"elif",
"isinstance",
"(",
"evt",
",",
"DataReadyEventData",
")",
":",
"return",
"\"\"",
"elif",
"isinstance",
"(",
"evt",
",",
"PipeEventData",
")",
":",
"return",
"evt",
".",
"pipe_value",
"elif",
"isinstance",
"(",
"evt",
",",
"DevIntrChangeEventData",
")",
":",
"print",
"(",
"\"utils::_get_value()\"",
")",
"return"
] |
Internal usage only
|
[
"Internal",
"usage",
"only"
] |
python
|
train
|
horazont/aioxmpp
|
aioxmpp/service.py
|
https://github.com/horazont/aioxmpp/blob/22a68e5e1d23f2a4dee470092adbd4672f9ef061/aioxmpp/service.py#L1073-L1080
|
def presence_handler(type_, from_):
"""
Deprecated alias of :func:`.dispatcher.presence_handler`.
.. deprecated:: 0.9
"""
import aioxmpp.dispatcher
return aioxmpp.dispatcher.presence_handler(type_, from_)
|
[
"def",
"presence_handler",
"(",
"type_",
",",
"from_",
")",
":",
"import",
"aioxmpp",
".",
"dispatcher",
"return",
"aioxmpp",
".",
"dispatcher",
".",
"presence_handler",
"(",
"type_",
",",
"from_",
")"
] |
Deprecated alias of :func:`.dispatcher.presence_handler`.
.. deprecated:: 0.9
|
[
"Deprecated",
"alias",
"of",
":",
"func",
":",
".",
"dispatcher",
".",
"presence_handler",
"."
] |
python
|
train
|
GPflow/GPflow
|
gpflow/expectations.py
|
https://github.com/GPflow/GPflow/blob/549394f0b1b0696c7b521a065e49bdae6e7acf27/gpflow/expectations.py#L237-L245
|
def _expectation(p, kern, none1, none2, none3, nghp=None):
"""
Compute the expectation:
<diag(K_{X, X})>_p(X)
- K_{.,.} :: RBF kernel
:return: N
"""
return kern.Kdiag(p.mu)
|
[
"def",
"_expectation",
"(",
"p",
",",
"kern",
",",
"none1",
",",
"none2",
",",
"none3",
",",
"nghp",
"=",
"None",
")",
":",
"return",
"kern",
".",
"Kdiag",
"(",
"p",
".",
"mu",
")"
] |
Compute the expectation:
<diag(K_{X, X})>_p(X)
- K_{.,.} :: RBF kernel
:return: N
|
[
"Compute",
"the",
"expectation",
":",
"<diag",
"(",
"K_",
"{",
"X",
"X",
"}",
")",
">",
"_p",
"(",
"X",
")",
"-",
"K_",
"{",
".",
".",
"}",
"::",
"RBF",
"kernel"
] |
python
|
train
|
mieubrisse/wunderpy2
|
wunderpy2/wunderclient.py
|
https://github.com/mieubrisse/wunderpy2/blob/7106b6c13ca45ef4d56f805753c93258d5b822c2/wunderpy2/wunderclient.py#L79-L85
|
def update_task(self, task_id, revision, title=None, assignee_id=None, completed=None, recurrence_type=None, recurrence_count=None, due_date=None, starred=None, remove=None):
'''
Updates the task with the given ID to have the given information
NOTE: The 'remove' parameter is an optional list of parameters to remove from the given task, e.g. ['due_date']
'''
return tasks_endpoint.update_task(self, task_id, revision, title=title, assignee_id=assignee_id, completed=completed, recurrence_type=recurrence_type, recurrence_count=recurrence_count, due_date=due_date, starred=starred, remove=remove)
|
[
"def",
"update_task",
"(",
"self",
",",
"task_id",
",",
"revision",
",",
"title",
"=",
"None",
",",
"assignee_id",
"=",
"None",
",",
"completed",
"=",
"None",
",",
"recurrence_type",
"=",
"None",
",",
"recurrence_count",
"=",
"None",
",",
"due_date",
"=",
"None",
",",
"starred",
"=",
"None",
",",
"remove",
"=",
"None",
")",
":",
"return",
"tasks_endpoint",
".",
"update_task",
"(",
"self",
",",
"task_id",
",",
"revision",
",",
"title",
"=",
"title",
",",
"assignee_id",
"=",
"assignee_id",
",",
"completed",
"=",
"completed",
",",
"recurrence_type",
"=",
"recurrence_type",
",",
"recurrence_count",
"=",
"recurrence_count",
",",
"due_date",
"=",
"due_date",
",",
"starred",
"=",
"starred",
",",
"remove",
"=",
"remove",
")"
] |
Updates the task with the given ID to have the given information
NOTE: The 'remove' parameter is an optional list of parameters to remove from the given task, e.g. ['due_date']
|
[
"Updates",
"the",
"task",
"with",
"the",
"given",
"ID",
"to",
"have",
"the",
"given",
"information",
"NOTE",
":",
"The",
"remove",
"parameter",
"is",
"an",
"optional",
"list",
"of",
"parameters",
"to",
"remove",
"from",
"the",
"given",
"task",
"e",
".",
"g",
".",
"[",
"due_date",
"]"
] |
python
|
train
|
CloudGenix/sdk-python
|
cloudgenix/interactive.py
|
https://github.com/CloudGenix/sdk-python/blob/1b2f92582b6a19769134914793bfd00e4caa074b/cloudgenix/interactive.py#L634-L666
|
def quick_confirm(prompt, default_value):
"""
Function to display a quick confirmation for user input
**Parameters:**
- **prompt:** Text to display before confirm
- **default_value:** Default value for no entry
**Returns:** 'y', 'n', or Default value.
"""
valid = False
value = default_value.lower()
while not valid:
input_val = compat_input(prompt + "[{0}]: ".format(default_value))
if input_val == "":
value = default_value.lower()
valid = True
else:
try:
if input_val.lower() in ['y', 'n']:
value = input_val.lower()
valid = True
else:
print("ERROR: enter 'Y' or 'N'.")
valid = False
except ValueError:
print("ERROR: enter 'Y' or 'N'.")
valid = False
return value
|
[
"def",
"quick_confirm",
"(",
"prompt",
",",
"default_value",
")",
":",
"valid",
"=",
"False",
"value",
"=",
"default_value",
".",
"lower",
"(",
")",
"while",
"not",
"valid",
":",
"input_val",
"=",
"compat_input",
"(",
"prompt",
"+",
"\"[{0}]: \"",
".",
"format",
"(",
"default_value",
")",
")",
"if",
"input_val",
"==",
"\"\"",
":",
"value",
"=",
"default_value",
".",
"lower",
"(",
")",
"valid",
"=",
"True",
"else",
":",
"try",
":",
"if",
"input_val",
".",
"lower",
"(",
")",
"in",
"[",
"'y'",
",",
"'n'",
"]",
":",
"value",
"=",
"input_val",
".",
"lower",
"(",
")",
"valid",
"=",
"True",
"else",
":",
"print",
"(",
"\"ERROR: enter 'Y' or 'N'.\"",
")",
"valid",
"=",
"False",
"except",
"ValueError",
":",
"print",
"(",
"\"ERROR: enter 'Y' or 'N'.\"",
")",
"valid",
"=",
"False",
"return",
"value"
] |
Function to display a quick confirmation for user input
**Parameters:**
- **prompt:** Text to display before confirm
- **default_value:** Default value for no entry
**Returns:** 'y', 'n', or Default value.
|
[
"Function",
"to",
"display",
"a",
"quick",
"confirmation",
"for",
"user",
"input"
] |
python
|
train
|
eng-tools/sfsimodels
|
sfsimodels/loader.py
|
https://github.com/eng-tools/sfsimodels/blob/65a690ca440d61307f5a9b8478e4704f203a5925/sfsimodels/loader.py#L114-L138
|
def load_frame_building_sample_data():
"""
Sample data for the BuildingFrame object
:return:
"""
number_of_storeys = 6
interstorey_height = 3.4 # m
masses = 40.0e3 # kg
n_bays = 3
fb = models.BuildingFrame(number_of_storeys, n_bays)
fb.interstorey_heights = interstorey_height * np.ones(number_of_storeys)
fb.floor_length = 18.0 # m
fb.floor_width = 16.0 # m
fb.storey_masses = masses * np.ones(number_of_storeys) # kg
fb.bay_lengths = [6., 6.0, 6.0]
fb.set_beam_prop("depth", [0.5, 0.5, 0.5], repeat="up")
fb.set_beam_prop("width", [0.4, 0.4, 0.4], repeat="up")
fb.set_column_prop("width", [0.5, 0.5, 0.5, 0.5], repeat="up")
fb.set_column_prop("depth", [0.5, 0.5, 0.5, 0.5], repeat="up")
fb.n_seismic_frames = 3
fb.n_gravity_frames = 0
return fb
|
[
"def",
"load_frame_building_sample_data",
"(",
")",
":",
"number_of_storeys",
"=",
"6",
"interstorey_height",
"=",
"3.4",
"# m",
"masses",
"=",
"40.0e3",
"# kg",
"n_bays",
"=",
"3",
"fb",
"=",
"models",
".",
"BuildingFrame",
"(",
"number_of_storeys",
",",
"n_bays",
")",
"fb",
".",
"interstorey_heights",
"=",
"interstorey_height",
"*",
"np",
".",
"ones",
"(",
"number_of_storeys",
")",
"fb",
".",
"floor_length",
"=",
"18.0",
"# m",
"fb",
".",
"floor_width",
"=",
"16.0",
"# m",
"fb",
".",
"storey_masses",
"=",
"masses",
"*",
"np",
".",
"ones",
"(",
"number_of_storeys",
")",
"# kg",
"fb",
".",
"bay_lengths",
"=",
"[",
"6.",
",",
"6.0",
",",
"6.0",
"]",
"fb",
".",
"set_beam_prop",
"(",
"\"depth\"",
",",
"[",
"0.5",
",",
"0.5",
",",
"0.5",
"]",
",",
"repeat",
"=",
"\"up\"",
")",
"fb",
".",
"set_beam_prop",
"(",
"\"width\"",
",",
"[",
"0.4",
",",
"0.4",
",",
"0.4",
"]",
",",
"repeat",
"=",
"\"up\"",
")",
"fb",
".",
"set_column_prop",
"(",
"\"width\"",
",",
"[",
"0.5",
",",
"0.5",
",",
"0.5",
",",
"0.5",
"]",
",",
"repeat",
"=",
"\"up\"",
")",
"fb",
".",
"set_column_prop",
"(",
"\"depth\"",
",",
"[",
"0.5",
",",
"0.5",
",",
"0.5",
",",
"0.5",
"]",
",",
"repeat",
"=",
"\"up\"",
")",
"fb",
".",
"n_seismic_frames",
"=",
"3",
"fb",
".",
"n_gravity_frames",
"=",
"0",
"return",
"fb"
] |
Sample data for the BuildingFrame object
:return:
|
[
"Sample",
"data",
"for",
"the",
"BuildingFrame",
"object"
] |
python
|
train
|
quantumlib/Cirq
|
cirq/_compat.py
|
https://github.com/quantumlib/Cirq/blob/0827da80dd7880e5b923eb69407e980ed9bc0bd2/cirq/_compat.py#L22-L40
|
def proper_repr(value: Any) -> str:
"""Overrides sympy and numpy returning repr strings that don't parse."""
if isinstance(value, sympy.Basic):
result = sympy.srepr(value)
# HACK: work around https://github.com/sympy/sympy/issues/16074
# (only handles a few cases)
fixed_tokens = [
'Symbol', 'pi', 'Mul', 'Add', 'Mod', 'Integer', 'Float', 'Rational'
]
for token in fixed_tokens:
result = result.replace(token, 'sympy.' + token)
return result
if isinstance(value, np.ndarray):
return 'np.array({!r})'.format(value.tolist())
return repr(value)
|
[
"def",
"proper_repr",
"(",
"value",
":",
"Any",
")",
"->",
"str",
":",
"if",
"isinstance",
"(",
"value",
",",
"sympy",
".",
"Basic",
")",
":",
"result",
"=",
"sympy",
".",
"srepr",
"(",
"value",
")",
"# HACK: work around https://github.com/sympy/sympy/issues/16074",
"# (only handles a few cases)",
"fixed_tokens",
"=",
"[",
"'Symbol'",
",",
"'pi'",
",",
"'Mul'",
",",
"'Add'",
",",
"'Mod'",
",",
"'Integer'",
",",
"'Float'",
",",
"'Rational'",
"]",
"for",
"token",
"in",
"fixed_tokens",
":",
"result",
"=",
"result",
".",
"replace",
"(",
"token",
",",
"'sympy.'",
"+",
"token",
")",
"return",
"result",
"if",
"isinstance",
"(",
"value",
",",
"np",
".",
"ndarray",
")",
":",
"return",
"'np.array({!r})'",
".",
"format",
"(",
"value",
".",
"tolist",
"(",
")",
")",
"return",
"repr",
"(",
"value",
")"
] |
Overrides sympy and numpy returning repr strings that don't parse.
|
[
"Overrides",
"sympy",
"and",
"numpy",
"returning",
"repr",
"strings",
"that",
"don",
"t",
"parse",
"."
] |
python
|
train
|
Midnighter/dependency-info
|
src/depinfo/info.py
|
https://github.com/Midnighter/dependency-info/blob/15bcada0a1d6c047cbe10b844d5bd909ea8cc752/src/depinfo/info.py#L47-L65
|
def get_pkg_info(
package_name, additional=("pip", "flit", "pbr", "setuptools", "wheel")
):
"""Return build and package dependencies as a dict."""
dist_index = build_dist_index(pkg_resources.working_set)
root = dist_index[package_name]
tree = construct_tree(dist_index)
dependencies = {pkg.name: pkg.installed_version for pkg in tree[root]}
# Add the initial package itself.
root = root.as_requirement()
dependencies[root.name] = root.installed_version
# Retrieve information on additional packages such as build tools.
for name in additional:
try:
pkg = dist_index[name].as_requirement()
dependencies[pkg.name] = pkg.installed_version
except KeyError:
continue
return dependencies
|
[
"def",
"get_pkg_info",
"(",
"package_name",
",",
"additional",
"=",
"(",
"\"pip\"",
",",
"\"flit\"",
",",
"\"pbr\"",
",",
"\"setuptools\"",
",",
"\"wheel\"",
")",
")",
":",
"dist_index",
"=",
"build_dist_index",
"(",
"pkg_resources",
".",
"working_set",
")",
"root",
"=",
"dist_index",
"[",
"package_name",
"]",
"tree",
"=",
"construct_tree",
"(",
"dist_index",
")",
"dependencies",
"=",
"{",
"pkg",
".",
"name",
":",
"pkg",
".",
"installed_version",
"for",
"pkg",
"in",
"tree",
"[",
"root",
"]",
"}",
"# Add the initial package itself.",
"root",
"=",
"root",
".",
"as_requirement",
"(",
")",
"dependencies",
"[",
"root",
".",
"name",
"]",
"=",
"root",
".",
"installed_version",
"# Retrieve information on additional packages such as build tools.",
"for",
"name",
"in",
"additional",
":",
"try",
":",
"pkg",
"=",
"dist_index",
"[",
"name",
"]",
".",
"as_requirement",
"(",
")",
"dependencies",
"[",
"pkg",
".",
"name",
"]",
"=",
"pkg",
".",
"installed_version",
"except",
"KeyError",
":",
"continue",
"return",
"dependencies"
] |
Return build and package dependencies as a dict.
|
[
"Return",
"build",
"and",
"package",
"dependencies",
"as",
"a",
"dict",
"."
] |
python
|
train
|
dcramer/mock-django
|
mock_django/query.py
|
https://github.com/dcramer/mock-django/blob/1168d3255e0d67fbf74a9c71feaccbdafef59d21/mock_django/query.py#L21-L109
|
def QuerySetMock(model, *return_value):
"""
Get a SharedMock that returns self for most attributes and a new copy of
itself for any method that ordinarily generates QuerySets.
Set the results to two items:
>>> class Post(object): pass
>>> objects = QuerySetMock(Post, 'return', 'values')
>>> assert list(objects.filter()) == list(objects.all())
Force an exception:
>>> objects = QuerySetMock(Post, Exception())
Chain calls:
>>> objects.all().filter(filter_arg='dummy')
"""
def make_get(self, model):
def _get(*a, **k):
results = list(self)
if len(results) > 1:
raise model.MultipleObjectsReturned
try:
return results[0]
except IndexError:
raise model.DoesNotExist
return _get
def make_qs_returning_method(self):
def _qs_returning_method(*a, **k):
return copy.deepcopy(self)
return _qs_returning_method
def make_getitem(self):
def _getitem(k):
if isinstance(k, slice):
self.__start = k.start
self.__stop = k.stop
else:
return list(self)[k]
return self
return _getitem
def make_iterator(self):
def _iterator(*a, **k):
if len(return_value) == 1 and isinstance(return_value[0], Exception):
raise return_value[0]
start = getattr(self, '__start', None)
stop = getattr(self, '__stop', None)
for x in return_value[start:stop]:
yield x
return _iterator
actual_model = model
if actual_model:
model = mock.MagicMock(spec=actual_model())
else:
model = mock.MagicMock()
m = SharedMock(reserved=['count', 'exists'] + QUERYSET_RETURNING_METHODS)
m.__start = None
m.__stop = None
m.__iter__.side_effect = lambda: iter(m.iterator())
m.__getitem__.side_effect = make_getitem(m)
if hasattr(m, "__nonzero__"):
# Python 2
m.__nonzero__.side_effect = lambda: bool(return_value)
m.exists.side_effect = m.__nonzero__
else:
# Python 3
m.__bool__.side_effect = lambda: bool(return_value)
m.exists.side_effect = m.__bool__
m.__len__.side_effect = lambda: len(return_value)
m.count.side_effect = m.__len__
m.model = model
m.get = make_get(m, actual_model)
for method_name in QUERYSET_RETURNING_METHODS:
setattr(m, method_name, make_qs_returning_method(m))
# Note since this is a SharedMock, *all* auto-generated child
# attributes will have the same side_effect ... might not make
# sense for some like count().
m.iterator.side_effect = make_iterator(m)
return m
|
[
"def",
"QuerySetMock",
"(",
"model",
",",
"*",
"return_value",
")",
":",
"def",
"make_get",
"(",
"self",
",",
"model",
")",
":",
"def",
"_get",
"(",
"*",
"a",
",",
"*",
"*",
"k",
")",
":",
"results",
"=",
"list",
"(",
"self",
")",
"if",
"len",
"(",
"results",
")",
">",
"1",
":",
"raise",
"model",
".",
"MultipleObjectsReturned",
"try",
":",
"return",
"results",
"[",
"0",
"]",
"except",
"IndexError",
":",
"raise",
"model",
".",
"DoesNotExist",
"return",
"_get",
"def",
"make_qs_returning_method",
"(",
"self",
")",
":",
"def",
"_qs_returning_method",
"(",
"*",
"a",
",",
"*",
"*",
"k",
")",
":",
"return",
"copy",
".",
"deepcopy",
"(",
"self",
")",
"return",
"_qs_returning_method",
"def",
"make_getitem",
"(",
"self",
")",
":",
"def",
"_getitem",
"(",
"k",
")",
":",
"if",
"isinstance",
"(",
"k",
",",
"slice",
")",
":",
"self",
".",
"__start",
"=",
"k",
".",
"start",
"self",
".",
"__stop",
"=",
"k",
".",
"stop",
"else",
":",
"return",
"list",
"(",
"self",
")",
"[",
"k",
"]",
"return",
"self",
"return",
"_getitem",
"def",
"make_iterator",
"(",
"self",
")",
":",
"def",
"_iterator",
"(",
"*",
"a",
",",
"*",
"*",
"k",
")",
":",
"if",
"len",
"(",
"return_value",
")",
"==",
"1",
"and",
"isinstance",
"(",
"return_value",
"[",
"0",
"]",
",",
"Exception",
")",
":",
"raise",
"return_value",
"[",
"0",
"]",
"start",
"=",
"getattr",
"(",
"self",
",",
"'__start'",
",",
"None",
")",
"stop",
"=",
"getattr",
"(",
"self",
",",
"'__stop'",
",",
"None",
")",
"for",
"x",
"in",
"return_value",
"[",
"start",
":",
"stop",
"]",
":",
"yield",
"x",
"return",
"_iterator",
"actual_model",
"=",
"model",
"if",
"actual_model",
":",
"model",
"=",
"mock",
".",
"MagicMock",
"(",
"spec",
"=",
"actual_model",
"(",
")",
")",
"else",
":",
"model",
"=",
"mock",
".",
"MagicMock",
"(",
")",
"m",
"=",
"SharedMock",
"(",
"reserved",
"=",
"[",
"'count'",
",",
"'exists'",
"]",
"+",
"QUERYSET_RETURNING_METHODS",
")",
"m",
".",
"__start",
"=",
"None",
"m",
".",
"__stop",
"=",
"None",
"m",
".",
"__iter__",
".",
"side_effect",
"=",
"lambda",
":",
"iter",
"(",
"m",
".",
"iterator",
"(",
")",
")",
"m",
".",
"__getitem__",
".",
"side_effect",
"=",
"make_getitem",
"(",
"m",
")",
"if",
"hasattr",
"(",
"m",
",",
"\"__nonzero__\"",
")",
":",
"# Python 2",
"m",
".",
"__nonzero__",
".",
"side_effect",
"=",
"lambda",
":",
"bool",
"(",
"return_value",
")",
"m",
".",
"exists",
".",
"side_effect",
"=",
"m",
".",
"__nonzero__",
"else",
":",
"# Python 3",
"m",
".",
"__bool__",
".",
"side_effect",
"=",
"lambda",
":",
"bool",
"(",
"return_value",
")",
"m",
".",
"exists",
".",
"side_effect",
"=",
"m",
".",
"__bool__",
"m",
".",
"__len__",
".",
"side_effect",
"=",
"lambda",
":",
"len",
"(",
"return_value",
")",
"m",
".",
"count",
".",
"side_effect",
"=",
"m",
".",
"__len__",
"m",
".",
"model",
"=",
"model",
"m",
".",
"get",
"=",
"make_get",
"(",
"m",
",",
"actual_model",
")",
"for",
"method_name",
"in",
"QUERYSET_RETURNING_METHODS",
":",
"setattr",
"(",
"m",
",",
"method_name",
",",
"make_qs_returning_method",
"(",
"m",
")",
")",
"# Note since this is a SharedMock, *all* auto-generated child",
"# attributes will have the same side_effect ... might not make",
"# sense for some like count().",
"m",
".",
"iterator",
".",
"side_effect",
"=",
"make_iterator",
"(",
"m",
")",
"return",
"m"
] |
Get a SharedMock that returns self for most attributes and a new copy of
itself for any method that ordinarily generates QuerySets.
Set the results to two items:
>>> class Post(object): pass
>>> objects = QuerySetMock(Post, 'return', 'values')
>>> assert list(objects.filter()) == list(objects.all())
Force an exception:
>>> objects = QuerySetMock(Post, Exception())
Chain calls:
>>> objects.all().filter(filter_arg='dummy')
|
[
"Get",
"a",
"SharedMock",
"that",
"returns",
"self",
"for",
"most",
"attributes",
"and",
"a",
"new",
"copy",
"of",
"itself",
"for",
"any",
"method",
"that",
"ordinarily",
"generates",
"QuerySets",
"."
] |
python
|
train
|
miyakogi/wdom
|
wdom/element.py
|
https://github.com/miyakogi/wdom/blob/a21bcd23e94baceee71161829f6897bee3fd39c1/wdom/element.py#L212-L218
|
def html(self) -> str:
"""Return html representation."""
if isinstance(self.value, bool):
val = 'true' if self.value else 'false'
else:
val = str(self.value)
return 'draggable="{}"'.format(val)
|
[
"def",
"html",
"(",
"self",
")",
"->",
"str",
":",
"if",
"isinstance",
"(",
"self",
".",
"value",
",",
"bool",
")",
":",
"val",
"=",
"'true'",
"if",
"self",
".",
"value",
"else",
"'false'",
"else",
":",
"val",
"=",
"str",
"(",
"self",
".",
"value",
")",
"return",
"'draggable=\"{}\"'",
".",
"format",
"(",
"val",
")"
] |
Return html representation.
|
[
"Return",
"html",
"representation",
"."
] |
python
|
train
|
edelbluth/blackred
|
src/blackred/blackred.py
|
https://github.com/edelbluth/blackred/blob/57a655e4d4eca60ce16e7b338079355049a87b49/src/blackred/blackred.py#L289-L299
|
def get_watchlist_ttl(self, item: str) -> int:
"""
Get the amount of time a specific item will remain on the watchlist.
:param str item: The item to get the TTL for on the watchlist
:return: Time in seconds. Returns None for a non-existing element
:rtype: int
"""
assert item is not None
item = self._encode_item(item)
return self.__get_ttl(self.__redis_conf['watchlist_template'].format(item))
|
[
"def",
"get_watchlist_ttl",
"(",
"self",
",",
"item",
":",
"str",
")",
"->",
"int",
":",
"assert",
"item",
"is",
"not",
"None",
"item",
"=",
"self",
".",
"_encode_item",
"(",
"item",
")",
"return",
"self",
".",
"__get_ttl",
"(",
"self",
".",
"__redis_conf",
"[",
"'watchlist_template'",
"]",
".",
"format",
"(",
"item",
")",
")"
] |
Get the amount of time a specific item will remain on the watchlist.
:param str item: The item to get the TTL for on the watchlist
:return: Time in seconds. Returns None for a non-existing element
:rtype: int
|
[
"Get",
"the",
"amount",
"of",
"time",
"a",
"specific",
"item",
"will",
"remain",
"on",
"the",
"watchlist",
"."
] |
python
|
train
|
atztogo/phonopy
|
phonopy/harmonic/dynmat_to_fc.py
|
https://github.com/atztogo/phonopy/blob/869cc2ba9e7d495d5f4cf6942415ab3fc9e2a10f/phonopy/harmonic/dynmat_to_fc.py#L42-L62
|
def get_commensurate_points(supercell_matrix): # wrt primitive cell
"""Commensurate q-points are returned.
Parameters
----------
supercell_matrix : array_like
Supercell matrix with respect to primitive cell basis vectors.
shape=(3, 3)
dtype=intc
"""
smat = np.array(supercell_matrix, dtype=int)
rec_primitive = PhonopyAtoms(numbers=[1],
scaled_positions=[[0, 0, 0]],
cell=np.diag([1, 1, 1]),
pbc=True)
rec_supercell = get_supercell(rec_primitive, smat.T)
q_pos = rec_supercell.get_scaled_positions()
return np.array(np.where(q_pos > 1 - 1e-15, q_pos - 1, q_pos),
dtype='double', order='C')
|
[
"def",
"get_commensurate_points",
"(",
"supercell_matrix",
")",
":",
"# wrt primitive cell",
"smat",
"=",
"np",
".",
"array",
"(",
"supercell_matrix",
",",
"dtype",
"=",
"int",
")",
"rec_primitive",
"=",
"PhonopyAtoms",
"(",
"numbers",
"=",
"[",
"1",
"]",
",",
"scaled_positions",
"=",
"[",
"[",
"0",
",",
"0",
",",
"0",
"]",
"]",
",",
"cell",
"=",
"np",
".",
"diag",
"(",
"[",
"1",
",",
"1",
",",
"1",
"]",
")",
",",
"pbc",
"=",
"True",
")",
"rec_supercell",
"=",
"get_supercell",
"(",
"rec_primitive",
",",
"smat",
".",
"T",
")",
"q_pos",
"=",
"rec_supercell",
".",
"get_scaled_positions",
"(",
")",
"return",
"np",
".",
"array",
"(",
"np",
".",
"where",
"(",
"q_pos",
">",
"1",
"-",
"1e-15",
",",
"q_pos",
"-",
"1",
",",
"q_pos",
")",
",",
"dtype",
"=",
"'double'",
",",
"order",
"=",
"'C'",
")"
] |
Commensurate q-points are returned.
Parameters
----------
supercell_matrix : array_like
Supercell matrix with respect to primitive cell basis vectors.
shape=(3, 3)
dtype=intc
|
[
"Commensurate",
"q",
"-",
"points",
"are",
"returned",
"."
] |
python
|
train
|
spyder-ide/spyder
|
spyder/plugins/variableexplorer/widgets/dataframeeditor.py
|
https://github.com/spyder-ide/spyder/blob/f76836ce1b924bcc4efd3f74f2960d26a4e528e0/spyder/plugins/variableexplorer/widgets/dataframeeditor.py#L655-L663
|
def columnCount(self, index=QModelIndex()):
"""DataFrame column number"""
if self.axis == 0:
if self.total_cols <= self.cols_loaded:
return self.total_cols
else:
return self.cols_loaded
else:
return max(1, self._shape[1])
|
[
"def",
"columnCount",
"(",
"self",
",",
"index",
"=",
"QModelIndex",
"(",
")",
")",
":",
"if",
"self",
".",
"axis",
"==",
"0",
":",
"if",
"self",
".",
"total_cols",
"<=",
"self",
".",
"cols_loaded",
":",
"return",
"self",
".",
"total_cols",
"else",
":",
"return",
"self",
".",
"cols_loaded",
"else",
":",
"return",
"max",
"(",
"1",
",",
"self",
".",
"_shape",
"[",
"1",
"]",
")"
] |
DataFrame column number
|
[
"DataFrame",
"column",
"number"
] |
python
|
train
|
OpenGov/python_data_wrap
|
datawrap/tableloader.py
|
https://github.com/OpenGov/python_data_wrap/blob/7de38bb30d7a500adc336a4a7999528d753e5600/datawrap/tableloader.py#L16-L51
|
def read(file_name, file_contents=None, on_demand=False):
'''
Loads an arbitrary file type (xlsx, xls, or csv like) and returns
a list of 2D tables. For csv files this will be a list of one table,
but excel formats can have many tables/worksheets.
TODO:
Add wrapper which can be closed/exited on each file type which cleans
up the file handler.
Args:
file_name: The name of the local file, or the holder for the
extension type when the file_contents are supplied.
file_contents: The file-like object holding contents of file_name.
If left as None, then file_name is directly loaded.
on_demand: Requests that a yielder be used in place of a full data
copy.
'''
try:
if re.search(XML_EXT_REGEX, file_name):
return get_data_excel_xml(file_name, file_contents=file_contents, on_demand=on_demand)
if re.search(XLSX_EXT_REGEX, file_name):
return get_data_xlsx(file_name, file_contents=file_contents, on_demand=on_demand)
elif re.search(XLS_EXT_REGEX, file_name):
return get_data_xls(file_name, file_contents=file_contents, on_demand=on_demand)
elif re.search(CSV_EXT_REGEX, file_name):
return get_data_csv(file_name, file_contents=file_contents, on_demand=on_demand)
else:
try:
return get_data_csv(file_name, file_contents=file_contents, on_demand=on_demand)
except:
raise ValueError("Unable to load file '{}' as csv".format(file_name))
except xlrd.XLRDError as e:
if "<?xml" in str(e):
return get_data_excel_xml(file_name, file_contents=file_contents, on_demand=on_demand)
raise
|
[
"def",
"read",
"(",
"file_name",
",",
"file_contents",
"=",
"None",
",",
"on_demand",
"=",
"False",
")",
":",
"try",
":",
"if",
"re",
".",
"search",
"(",
"XML_EXT_REGEX",
",",
"file_name",
")",
":",
"return",
"get_data_excel_xml",
"(",
"file_name",
",",
"file_contents",
"=",
"file_contents",
",",
"on_demand",
"=",
"on_demand",
")",
"if",
"re",
".",
"search",
"(",
"XLSX_EXT_REGEX",
",",
"file_name",
")",
":",
"return",
"get_data_xlsx",
"(",
"file_name",
",",
"file_contents",
"=",
"file_contents",
",",
"on_demand",
"=",
"on_demand",
")",
"elif",
"re",
".",
"search",
"(",
"XLS_EXT_REGEX",
",",
"file_name",
")",
":",
"return",
"get_data_xls",
"(",
"file_name",
",",
"file_contents",
"=",
"file_contents",
",",
"on_demand",
"=",
"on_demand",
")",
"elif",
"re",
".",
"search",
"(",
"CSV_EXT_REGEX",
",",
"file_name",
")",
":",
"return",
"get_data_csv",
"(",
"file_name",
",",
"file_contents",
"=",
"file_contents",
",",
"on_demand",
"=",
"on_demand",
")",
"else",
":",
"try",
":",
"return",
"get_data_csv",
"(",
"file_name",
",",
"file_contents",
"=",
"file_contents",
",",
"on_demand",
"=",
"on_demand",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"Unable to load file '{}' as csv\"",
".",
"format",
"(",
"file_name",
")",
")",
"except",
"xlrd",
".",
"XLRDError",
"as",
"e",
":",
"if",
"\"<?xml\"",
"in",
"str",
"(",
"e",
")",
":",
"return",
"get_data_excel_xml",
"(",
"file_name",
",",
"file_contents",
"=",
"file_contents",
",",
"on_demand",
"=",
"on_demand",
")",
"raise"
] |
Loads an arbitrary file type (xlsx, xls, or csv like) and returns
a list of 2D tables. For csv files this will be a list of one table,
but excel formats can have many tables/worksheets.
TODO:
Add wrapper which can be closed/exited on each file type which cleans
up the file handler.
Args:
file_name: The name of the local file, or the holder for the
extension type when the file_contents are supplied.
file_contents: The file-like object holding contents of file_name.
If left as None, then file_name is directly loaded.
on_demand: Requests that a yielder be used in place of a full data
copy.
|
[
"Loads",
"an",
"arbitrary",
"file",
"type",
"(",
"xlsx",
"xls",
"or",
"csv",
"like",
")",
"and",
"returns",
"a",
"list",
"of",
"2D",
"tables",
".",
"For",
"csv",
"files",
"this",
"will",
"be",
"a",
"list",
"of",
"one",
"table",
"but",
"excel",
"formats",
"can",
"have",
"many",
"tables",
"/",
"worksheets",
"."
] |
python
|
train
|
fastai/fastai
|
fastai/callbacks/tensorboard.py
|
https://github.com/fastai/fastai/blob/9fb84a5cdefe5a766cdb792b8f5d8971737b7e67/fastai/callbacks/tensorboard.py#L120-L124
|
def _write_gen_model_stats(self, iteration:int)->None:
"Writes gradient statistics for generator to Tensorboard."
generator = self.learn.gan_trainer.generator
self.stats_writer.write(model=generator, iteration=iteration, tbwriter=self.tbwriter, name='gen_model_stats')
self.gen_stats_updated = True
|
[
"def",
"_write_gen_model_stats",
"(",
"self",
",",
"iteration",
":",
"int",
")",
"->",
"None",
":",
"generator",
"=",
"self",
".",
"learn",
".",
"gan_trainer",
".",
"generator",
"self",
".",
"stats_writer",
".",
"write",
"(",
"model",
"=",
"generator",
",",
"iteration",
"=",
"iteration",
",",
"tbwriter",
"=",
"self",
".",
"tbwriter",
",",
"name",
"=",
"'gen_model_stats'",
")",
"self",
".",
"gen_stats_updated",
"=",
"True"
] |
Writes gradient statistics for generator to Tensorboard.
|
[
"Writes",
"gradient",
"statistics",
"for",
"generator",
"to",
"Tensorboard",
"."
] |
python
|
train
|
UDST/orca
|
orca/orca.py
|
https://github.com/UDST/orca/blob/07b34aeef13cc87c966b2e30cbe7e76cc9d3622c/orca/orca.py#L1908-L2007
|
def run(steps, iter_vars=None, data_out=None, out_interval=1,
out_base_tables=None, out_run_tables=None, compress=False,
out_base_local=True, out_run_local=True):
"""
Run steps in series, optionally repeatedly over some sequence.
The current iteration variable is set as a global injectable
called ``iter_var``.
Parameters
----------
steps : list of str
List of steps to run identified by their name.
iter_vars : iterable, optional
The values of `iter_vars` will be made available as an injectable
called ``iter_var`` when repeatedly running `steps`.
data_out : str, optional
An optional filename to which all tables injected into any step
in `steps` will be saved every `out_interval` iterations.
File will be a pandas HDF data store.
out_interval : int, optional
Iteration interval on which to save data to `data_out`. For example,
2 will save out every 2 iterations, 5 every 5 iterations.
Default is every iteration.
The results of the first and last iterations are always included.
The input (base) tables are also included and prefixed with `base/`,
these represent the state of the system before any steps have been
executed.
The interval is defined relative to the first iteration. For example,
a run begining in 2015 with an out_interval of 2, will write out
results for 2015, 2017, etc.
out_base_tables: list of str, optional, default None
List of base tables to write. If not provided, tables injected
into 'steps' will be written.
out_run_tables: list of str, optional, default None
List of run tables to write. If not provided, tables injected
into 'steps' will be written.
compress: boolean, optional, default False
Whether to compress output file using standard HDF5 zlib compression.
Compression yields much smaller files using slightly more CPU.
out_base_local: boolean, optional, default True
For tables in out_base_tables, whether to store only local columns (True)
or both, local and computed columns (False).
out_run_local: boolean, optional, default True
For tables in out_run_tables, whether to store only local columns (True)
or both, local and computed columns (False).
"""
iter_vars = iter_vars or [None]
max_i = len(iter_vars)
# get the tables to write out
if out_base_tables is None or out_run_tables is None:
step_tables = get_step_table_names(steps)
if out_base_tables is None:
out_base_tables = step_tables
if out_run_tables is None:
out_run_tables = step_tables
# write out the base (inputs)
if data_out:
add_injectable('iter_var', iter_vars[0])
write_tables(data_out, out_base_tables, 'base', compress=compress, local=out_base_local)
# run the steps
for i, var in enumerate(iter_vars, start=1):
add_injectable('iter_var', var)
if var is not None:
print('Running iteration {} with iteration value {!r}'.format(
i, var))
logger.debug(
'running iteration {} with iteration value {!r}'.format(
i, var))
t1 = time.time()
for j, step_name in enumerate(steps):
add_injectable('iter_step', iter_step(j, step_name))
print('Running step {!r}'.format(step_name))
with log_start_finish(
'run step {!r}'.format(step_name), logger,
logging.INFO):
step = get_step(step_name)
t2 = time.time()
step()
print("Time to execute step '{}': {:.2f} s".format(
step_name, time.time() - t2))
clear_cache(scope=_CS_STEP)
print(
('Total time to execute iteration {} '
'with iteration value {!r}: '
'{:.2f} s').format(i, var, time.time() - t1))
# write out the results for the current iteration
if data_out:
if (i - 1) % out_interval == 0 or i == max_i:
write_tables(data_out, out_run_tables, var, compress=compress, local=out_run_local)
clear_cache(scope=_CS_ITER)
|
[
"def",
"run",
"(",
"steps",
",",
"iter_vars",
"=",
"None",
",",
"data_out",
"=",
"None",
",",
"out_interval",
"=",
"1",
",",
"out_base_tables",
"=",
"None",
",",
"out_run_tables",
"=",
"None",
",",
"compress",
"=",
"False",
",",
"out_base_local",
"=",
"True",
",",
"out_run_local",
"=",
"True",
")",
":",
"iter_vars",
"=",
"iter_vars",
"or",
"[",
"None",
"]",
"max_i",
"=",
"len",
"(",
"iter_vars",
")",
"# get the tables to write out",
"if",
"out_base_tables",
"is",
"None",
"or",
"out_run_tables",
"is",
"None",
":",
"step_tables",
"=",
"get_step_table_names",
"(",
"steps",
")",
"if",
"out_base_tables",
"is",
"None",
":",
"out_base_tables",
"=",
"step_tables",
"if",
"out_run_tables",
"is",
"None",
":",
"out_run_tables",
"=",
"step_tables",
"# write out the base (inputs)",
"if",
"data_out",
":",
"add_injectable",
"(",
"'iter_var'",
",",
"iter_vars",
"[",
"0",
"]",
")",
"write_tables",
"(",
"data_out",
",",
"out_base_tables",
",",
"'base'",
",",
"compress",
"=",
"compress",
",",
"local",
"=",
"out_base_local",
")",
"# run the steps",
"for",
"i",
",",
"var",
"in",
"enumerate",
"(",
"iter_vars",
",",
"start",
"=",
"1",
")",
":",
"add_injectable",
"(",
"'iter_var'",
",",
"var",
")",
"if",
"var",
"is",
"not",
"None",
":",
"print",
"(",
"'Running iteration {} with iteration value {!r}'",
".",
"format",
"(",
"i",
",",
"var",
")",
")",
"logger",
".",
"debug",
"(",
"'running iteration {} with iteration value {!r}'",
".",
"format",
"(",
"i",
",",
"var",
")",
")",
"t1",
"=",
"time",
".",
"time",
"(",
")",
"for",
"j",
",",
"step_name",
"in",
"enumerate",
"(",
"steps",
")",
":",
"add_injectable",
"(",
"'iter_step'",
",",
"iter_step",
"(",
"j",
",",
"step_name",
")",
")",
"print",
"(",
"'Running step {!r}'",
".",
"format",
"(",
"step_name",
")",
")",
"with",
"log_start_finish",
"(",
"'run step {!r}'",
".",
"format",
"(",
"step_name",
")",
",",
"logger",
",",
"logging",
".",
"INFO",
")",
":",
"step",
"=",
"get_step",
"(",
"step_name",
")",
"t2",
"=",
"time",
".",
"time",
"(",
")",
"step",
"(",
")",
"print",
"(",
"\"Time to execute step '{}': {:.2f} s\"",
".",
"format",
"(",
"step_name",
",",
"time",
".",
"time",
"(",
")",
"-",
"t2",
")",
")",
"clear_cache",
"(",
"scope",
"=",
"_CS_STEP",
")",
"print",
"(",
"(",
"'Total time to execute iteration {} '",
"'with iteration value {!r}: '",
"'{:.2f} s'",
")",
".",
"format",
"(",
"i",
",",
"var",
",",
"time",
".",
"time",
"(",
")",
"-",
"t1",
")",
")",
"# write out the results for the current iteration",
"if",
"data_out",
":",
"if",
"(",
"i",
"-",
"1",
")",
"%",
"out_interval",
"==",
"0",
"or",
"i",
"==",
"max_i",
":",
"write_tables",
"(",
"data_out",
",",
"out_run_tables",
",",
"var",
",",
"compress",
"=",
"compress",
",",
"local",
"=",
"out_run_local",
")",
"clear_cache",
"(",
"scope",
"=",
"_CS_ITER",
")"
] |
Run steps in series, optionally repeatedly over some sequence.
The current iteration variable is set as a global injectable
called ``iter_var``.
Parameters
----------
steps : list of str
List of steps to run identified by their name.
iter_vars : iterable, optional
The values of `iter_vars` will be made available as an injectable
called ``iter_var`` when repeatedly running `steps`.
data_out : str, optional
An optional filename to which all tables injected into any step
in `steps` will be saved every `out_interval` iterations.
File will be a pandas HDF data store.
out_interval : int, optional
Iteration interval on which to save data to `data_out`. For example,
2 will save out every 2 iterations, 5 every 5 iterations.
Default is every iteration.
The results of the first and last iterations are always included.
The input (base) tables are also included and prefixed with `base/`,
these represent the state of the system before any steps have been
executed.
The interval is defined relative to the first iteration. For example,
a run begining in 2015 with an out_interval of 2, will write out
results for 2015, 2017, etc.
out_base_tables: list of str, optional, default None
List of base tables to write. If not provided, tables injected
into 'steps' will be written.
out_run_tables: list of str, optional, default None
List of run tables to write. If not provided, tables injected
into 'steps' will be written.
compress: boolean, optional, default False
Whether to compress output file using standard HDF5 zlib compression.
Compression yields much smaller files using slightly more CPU.
out_base_local: boolean, optional, default True
For tables in out_base_tables, whether to store only local columns (True)
or both, local and computed columns (False).
out_run_local: boolean, optional, default True
For tables in out_run_tables, whether to store only local columns (True)
or both, local and computed columns (False).
|
[
"Run",
"steps",
"in",
"series",
"optionally",
"repeatedly",
"over",
"some",
"sequence",
".",
"The",
"current",
"iteration",
"variable",
"is",
"set",
"as",
"a",
"global",
"injectable",
"called",
"iter_var",
"."
] |
python
|
train
|
quantmind/pulsar
|
pulsar/apps/data/channels.py
|
https://github.com/quantmind/pulsar/blob/fee44e871954aa6ca36d00bb5a3739abfdb89b26/pulsar/apps/data/channels.py#L329-L341
|
def register(self, event, callback):
"""Register a ``callback`` for ``event``
"""
pattern = self.channels.event_pattern(event)
entry = self.callbacks.get(pattern)
if not entry:
entry = event_callbacks(event, pattern, re.compile(pattern), [])
self.callbacks[entry.pattern] = entry
if callback not in entry.callbacks:
entry.callbacks.append(callback)
return entry
|
[
"def",
"register",
"(",
"self",
",",
"event",
",",
"callback",
")",
":",
"pattern",
"=",
"self",
".",
"channels",
".",
"event_pattern",
"(",
"event",
")",
"entry",
"=",
"self",
".",
"callbacks",
".",
"get",
"(",
"pattern",
")",
"if",
"not",
"entry",
":",
"entry",
"=",
"event_callbacks",
"(",
"event",
",",
"pattern",
",",
"re",
".",
"compile",
"(",
"pattern",
")",
",",
"[",
"]",
")",
"self",
".",
"callbacks",
"[",
"entry",
".",
"pattern",
"]",
"=",
"entry",
"if",
"callback",
"not",
"in",
"entry",
".",
"callbacks",
":",
"entry",
".",
"callbacks",
".",
"append",
"(",
"callback",
")",
"return",
"entry"
] |
Register a ``callback`` for ``event``
|
[
"Register",
"a",
"callback",
"for",
"event"
] |
python
|
train
|
librosa/librosa
|
librosa/effects.py
|
https://github.com/librosa/librosa/blob/180e8e6eb8f958fa6b20b8cba389f7945d508247/librosa/effects.py#L310-L387
|
def remix(y, intervals, align_zeros=True):
'''Remix an audio signal by re-ordering time intervals.
Parameters
----------
y : np.ndarray [shape=(t,) or (2, t)]
Audio time series
intervals : iterable of tuples (start, end)
An iterable (list-like or generator) where the `i`th item
`intervals[i]` indicates the start and end (in samples)
of a slice of `y`.
align_zeros : boolean
If `True`, interval boundaries are mapped to the closest
zero-crossing in `y`. If `y` is stereo, zero-crossings
are computed after converting to mono.
Returns
-------
y_remix : np.ndarray [shape=(d,) or (2, d)]
`y` remixed in the order specified by `intervals`
Examples
--------
Load in the example track and reverse the beats
>>> y, sr = librosa.load(librosa.util.example_audio_file())
Compute beats
>>> _, beat_frames = librosa.beat.beat_track(y=y, sr=sr,
... hop_length=512)
Convert from frames to sample indices
>>> beat_samples = librosa.frames_to_samples(beat_frames)
Generate intervals from consecutive events
>>> intervals = librosa.util.frame(beat_samples, frame_length=2,
... hop_length=1).T
Reverse the beat intervals
>>> y_out = librosa.effects.remix(y, intervals[::-1])
'''
# Validate the audio buffer
util.valid_audio(y, mono=False)
y_out = []
if align_zeros:
y_mono = core.to_mono(y)
zeros = np.nonzero(core.zero_crossings(y_mono))[-1]
# Force end-of-signal onto zeros
zeros = np.append(zeros, [len(y_mono)])
clip = [slice(None)] * y.ndim
for interval in intervals:
if align_zeros:
interval = zeros[util.match_events(interval, zeros)]
clip[-1] = slice(interval[0], interval[1])
y_out.append(y[tuple(clip)])
return np.concatenate(y_out, axis=-1)
|
[
"def",
"remix",
"(",
"y",
",",
"intervals",
",",
"align_zeros",
"=",
"True",
")",
":",
"# Validate the audio buffer",
"util",
".",
"valid_audio",
"(",
"y",
",",
"mono",
"=",
"False",
")",
"y_out",
"=",
"[",
"]",
"if",
"align_zeros",
":",
"y_mono",
"=",
"core",
".",
"to_mono",
"(",
"y",
")",
"zeros",
"=",
"np",
".",
"nonzero",
"(",
"core",
".",
"zero_crossings",
"(",
"y_mono",
")",
")",
"[",
"-",
"1",
"]",
"# Force end-of-signal onto zeros",
"zeros",
"=",
"np",
".",
"append",
"(",
"zeros",
",",
"[",
"len",
"(",
"y_mono",
")",
"]",
")",
"clip",
"=",
"[",
"slice",
"(",
"None",
")",
"]",
"*",
"y",
".",
"ndim",
"for",
"interval",
"in",
"intervals",
":",
"if",
"align_zeros",
":",
"interval",
"=",
"zeros",
"[",
"util",
".",
"match_events",
"(",
"interval",
",",
"zeros",
")",
"]",
"clip",
"[",
"-",
"1",
"]",
"=",
"slice",
"(",
"interval",
"[",
"0",
"]",
",",
"interval",
"[",
"1",
"]",
")",
"y_out",
".",
"append",
"(",
"y",
"[",
"tuple",
"(",
"clip",
")",
"]",
")",
"return",
"np",
".",
"concatenate",
"(",
"y_out",
",",
"axis",
"=",
"-",
"1",
")"
] |
Remix an audio signal by re-ordering time intervals.
Parameters
----------
y : np.ndarray [shape=(t,) or (2, t)]
Audio time series
intervals : iterable of tuples (start, end)
An iterable (list-like or generator) where the `i`th item
`intervals[i]` indicates the start and end (in samples)
of a slice of `y`.
align_zeros : boolean
If `True`, interval boundaries are mapped to the closest
zero-crossing in `y`. If `y` is stereo, zero-crossings
are computed after converting to mono.
Returns
-------
y_remix : np.ndarray [shape=(d,) or (2, d)]
`y` remixed in the order specified by `intervals`
Examples
--------
Load in the example track and reverse the beats
>>> y, sr = librosa.load(librosa.util.example_audio_file())
Compute beats
>>> _, beat_frames = librosa.beat.beat_track(y=y, sr=sr,
... hop_length=512)
Convert from frames to sample indices
>>> beat_samples = librosa.frames_to_samples(beat_frames)
Generate intervals from consecutive events
>>> intervals = librosa.util.frame(beat_samples, frame_length=2,
... hop_length=1).T
Reverse the beat intervals
>>> y_out = librosa.effects.remix(y, intervals[::-1])
|
[
"Remix",
"an",
"audio",
"signal",
"by",
"re",
"-",
"ordering",
"time",
"intervals",
"."
] |
python
|
test
|
Nike-Inc/cerberus-python-client
|
cerberus/client.py
|
https://github.com/Nike-Inc/cerberus-python-client/blob/ef38356822e722fcb6a6ed4a1b38a5b493e753ae/cerberus/client.py#L95-L104
|
def get_roles(self):
"""Return all the roles (IAM or User Groups) that can be granted to a safe deposit box.
Roles are permission levels that are granted to IAM or User Groups. Associating the id for the write role
would allow that IAM or User Group to write in the safe deposit box."""
roles_resp = get_with_retry(self.cerberus_url + '/v1/role',
headers=self.HEADERS)
throw_if_bad_response(roles_resp)
return roles_resp.json()
|
[
"def",
"get_roles",
"(",
"self",
")",
":",
"roles_resp",
"=",
"get_with_retry",
"(",
"self",
".",
"cerberus_url",
"+",
"'/v1/role'",
",",
"headers",
"=",
"self",
".",
"HEADERS",
")",
"throw_if_bad_response",
"(",
"roles_resp",
")",
"return",
"roles_resp",
".",
"json",
"(",
")"
] |
Return all the roles (IAM or User Groups) that can be granted to a safe deposit box.
Roles are permission levels that are granted to IAM or User Groups. Associating the id for the write role
would allow that IAM or User Group to write in the safe deposit box.
|
[
"Return",
"all",
"the",
"roles",
"(",
"IAM",
"or",
"User",
"Groups",
")",
"that",
"can",
"be",
"granted",
"to",
"a",
"safe",
"deposit",
"box",
"."
] |
python
|
train
|
linkhub-sdk/popbill.py
|
popbill/statementService.py
|
https://github.com/linkhub-sdk/popbill.py/blob/68a0dd7f7a937603318e93be321fde73c50b96cc/popbill/statementService.py#L237-L263
|
def cancel(self, CorpNum, ItemCode, MgtKey, Memo=None, UserID=None):
""" 발행취소
args
CorpNum : 팝빌회원 사업자번호
ItemCode : 명세서 종류 코드
[121 - 거래명세서], [122 - 청구서], [123 - 견적서],
[124 - 발주서], [125 - 입금표], [126 - 영수증]
MgtKey : 파트너 문서관리번호
Memo : 처리메모
UserID : 팝빌회원 아이디
return
처리결과. consist of code and message
raise
PopbillException
"""
if MgtKey == None or MgtKey == "":
raise PopbillException(-99999999, "관리번호가 입력되지 않았습니다.")
if ItemCode == None or ItemCode == "":
raise PopbillException(-99999999, "명세서 종류 코드가 입력되지 않았습니다.")
postData = ''
if Memo != None and Memo != '':
postData = self._stringtify({"memo": Memo})
return self._httppost('/Statement/' + str(ItemCode) + '/' + MgtKey, postData, CorpNum, UserID, "CANCEL")
|
[
"def",
"cancel",
"(",
"self",
",",
"CorpNum",
",",
"ItemCode",
",",
"MgtKey",
",",
"Memo",
"=",
"None",
",",
"UserID",
"=",
"None",
")",
":",
"if",
"MgtKey",
"==",
"None",
"or",
"MgtKey",
"==",
"\"\"",
":",
"raise",
"PopbillException",
"(",
"-",
"99999999",
",",
"\"관리번호가 입력되지 않았습니다.\")\r",
"",
"if",
"ItemCode",
"==",
"None",
"or",
"ItemCode",
"==",
"\"\"",
":",
"raise",
"PopbillException",
"(",
"-",
"99999999",
",",
"\"명세서 종류 코드가 입력되지 않았습니다.\")\r",
"",
"postData",
"=",
"''",
"if",
"Memo",
"!=",
"None",
"and",
"Memo",
"!=",
"''",
":",
"postData",
"=",
"self",
".",
"_stringtify",
"(",
"{",
"\"memo\"",
":",
"Memo",
"}",
")",
"return",
"self",
".",
"_httppost",
"(",
"'/Statement/'",
"+",
"str",
"(",
"ItemCode",
")",
"+",
"'/'",
"+",
"MgtKey",
",",
"postData",
",",
"CorpNum",
",",
"UserID",
",",
"\"CANCEL\"",
")"
] |
발행취소
args
CorpNum : 팝빌회원 사업자번호
ItemCode : 명세서 종류 코드
[121 - 거래명세서], [122 - 청구서], [123 - 견적서],
[124 - 발주서], [125 - 입금표], [126 - 영수증]
MgtKey : 파트너 문서관리번호
Memo : 처리메모
UserID : 팝빌회원 아이디
return
처리결과. consist of code and message
raise
PopbillException
|
[
"발행취소",
"args",
"CorpNum",
":",
"팝빌회원",
"사업자번호",
"ItemCode",
":",
"명세서",
"종류",
"코드",
"[",
"121",
"-",
"거래명세서",
"]",
"[",
"122",
"-",
"청구서",
"]",
"[",
"123",
"-",
"견적서",
"]",
"[",
"124",
"-",
"발주서",
"]",
"[",
"125",
"-",
"입금표",
"]",
"[",
"126",
"-",
"영수증",
"]",
"MgtKey",
":",
"파트너",
"문서관리번호",
"Memo",
":",
"처리메모",
"UserID",
":",
"팝빌회원",
"아이디",
"return",
"처리결과",
".",
"consist",
"of",
"code",
"and",
"message",
"raise",
"PopbillException"
] |
python
|
train
|
chaoss/grimoirelab-perceval
|
perceval/backends/core/github.py
|
https://github.com/chaoss/grimoirelab-perceval/blob/41c908605e88b7ebc3a536c643fa0f212eaf9e0e/perceval/backends/core/github.py#L367-L374
|
def __get_issue_assignees(self, raw_assignees):
"""Get issue assignees"""
assignees = []
for ra in raw_assignees:
assignees.append(self.__get_user(ra['login']))
return assignees
|
[
"def",
"__get_issue_assignees",
"(",
"self",
",",
"raw_assignees",
")",
":",
"assignees",
"=",
"[",
"]",
"for",
"ra",
"in",
"raw_assignees",
":",
"assignees",
".",
"append",
"(",
"self",
".",
"__get_user",
"(",
"ra",
"[",
"'login'",
"]",
")",
")",
"return",
"assignees"
] |
Get issue assignees
|
[
"Get",
"issue",
"assignees"
] |
python
|
test
|
MacHu-GWU/angora-project
|
angora/filesystem/filesystem.py
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/filesystem/filesystem.py#L852-L878
|
def from_path_by_size(dir_path, min_size=0, max_size=1 << 40):
"""Create a new FileCollection, and select all files that size in
a range::
dir_path = "your/path"
# select by file size larger than 100MB
fc = FileCollection.from_path_by_size(
dir_path, min_size=100*1024*1024)
# select by file size smaller than 100MB
fc = FileCollection.from_path_by_size(
dir_path, max_size=100*1024*1024)
# select by file size from 1MB to 100MB
fc = FileCollection.from_path_by_size(
dir_path, min_size=1024*1024, max_size=100*1024*1024)
"""
def filter(winfile):
if (winfile.size_on_disk >= min_size) and \
(winfile.size_on_disk <= max_size):
return True
else:
return False
return FileCollection.from_path_by_criterion(
dir_path, filter, keepboth=False)
|
[
"def",
"from_path_by_size",
"(",
"dir_path",
",",
"min_size",
"=",
"0",
",",
"max_size",
"=",
"1",
"<<",
"40",
")",
":",
"def",
"filter",
"(",
"winfile",
")",
":",
"if",
"(",
"winfile",
".",
"size_on_disk",
">=",
"min_size",
")",
"and",
"(",
"winfile",
".",
"size_on_disk",
"<=",
"max_size",
")",
":",
"return",
"True",
"else",
":",
"return",
"False",
"return",
"FileCollection",
".",
"from_path_by_criterion",
"(",
"dir_path",
",",
"filter",
",",
"keepboth",
"=",
"False",
")"
] |
Create a new FileCollection, and select all files that size in
a range::
dir_path = "your/path"
# select by file size larger than 100MB
fc = FileCollection.from_path_by_size(
dir_path, min_size=100*1024*1024)
# select by file size smaller than 100MB
fc = FileCollection.from_path_by_size(
dir_path, max_size=100*1024*1024)
# select by file size from 1MB to 100MB
fc = FileCollection.from_path_by_size(
dir_path, min_size=1024*1024, max_size=100*1024*1024)
|
[
"Create",
"a",
"new",
"FileCollection",
"and",
"select",
"all",
"files",
"that",
"size",
"in",
"a",
"range",
"::",
"dir_path",
"=",
"your",
"/",
"path",
"#",
"select",
"by",
"file",
"size",
"larger",
"than",
"100MB",
"fc",
"=",
"FileCollection",
".",
"from_path_by_size",
"(",
"dir_path",
"min_size",
"=",
"100",
"*",
"1024",
"*",
"1024",
")",
"#",
"select",
"by",
"file",
"size",
"smaller",
"than",
"100MB",
"fc",
"=",
"FileCollection",
".",
"from_path_by_size",
"(",
"dir_path",
"max_size",
"=",
"100",
"*",
"1024",
"*",
"1024",
")",
"#",
"select",
"by",
"file",
"size",
"from",
"1MB",
"to",
"100MB",
"fc",
"=",
"FileCollection",
".",
"from_path_by_size",
"(",
"dir_path",
"min_size",
"=",
"1024",
"*",
"1024",
"max_size",
"=",
"100",
"*",
"1024",
"*",
"1024",
")"
] |
python
|
train
|
rollbar/pyrollbar
|
rollbar/__init__.py
|
https://github.com/rollbar/pyrollbar/blob/33ef2e723a33d09dd6302f978f4a3908be95b9d2/rollbar/__init__.py#L140-L161
|
def get_request():
"""
Get the current request object. Implementation varies on
library support. Modified below when we know which framework
is being used.
"""
# TODO(cory): add in a generic _get_locals_request() which
# will iterate up through the call stack and look for a variable
# that appears to be valid request object.
for fn in (_get_bottle_request,
_get_flask_request,
_get_pyramid_request,
_get_pylons_request):
try:
req = fn()
if req is not None:
return req
except:
pass
return None
|
[
"def",
"get_request",
"(",
")",
":",
"# TODO(cory): add in a generic _get_locals_request() which",
"# will iterate up through the call stack and look for a variable",
"# that appears to be valid request object.",
"for",
"fn",
"in",
"(",
"_get_bottle_request",
",",
"_get_flask_request",
",",
"_get_pyramid_request",
",",
"_get_pylons_request",
")",
":",
"try",
":",
"req",
"=",
"fn",
"(",
")",
"if",
"req",
"is",
"not",
"None",
":",
"return",
"req",
"except",
":",
"pass",
"return",
"None"
] |
Get the current request object. Implementation varies on
library support. Modified below when we know which framework
is being used.
|
[
"Get",
"the",
"current",
"request",
"object",
".",
"Implementation",
"varies",
"on",
"library",
"support",
".",
"Modified",
"below",
"when",
"we",
"know",
"which",
"framework",
"is",
"being",
"used",
"."
] |
python
|
test
|
cisco-sas/kitty
|
kitty/model/low_level/aliases.py
|
https://github.com/cisco-sas/kitty/blob/cb0760989dcdfe079e43ac574d872d0b18953a32/kitty/model/low_level/aliases.py#L144-L146
|
def BE8(value, min_value=None, max_value=None, fuzzable=True, name=None, full_range=False):
'''8-bit field, Big endian encoded'''
return UInt8(value, min_value=min_value, max_value=max_value, encoder=ENC_INT_BE, fuzzable=fuzzable, name=name, full_range=full_range)
|
[
"def",
"BE8",
"(",
"value",
",",
"min_value",
"=",
"None",
",",
"max_value",
"=",
"None",
",",
"fuzzable",
"=",
"True",
",",
"name",
"=",
"None",
",",
"full_range",
"=",
"False",
")",
":",
"return",
"UInt8",
"(",
"value",
",",
"min_value",
"=",
"min_value",
",",
"max_value",
"=",
"max_value",
",",
"encoder",
"=",
"ENC_INT_BE",
",",
"fuzzable",
"=",
"fuzzable",
",",
"name",
"=",
"name",
",",
"full_range",
"=",
"full_range",
")"
] |
8-bit field, Big endian encoded
|
[
"8",
"-",
"bit",
"field",
"Big",
"endian",
"encoded"
] |
python
|
train
|
rndusr/torf
|
torf/_torrent.py
|
https://github.com/rndusr/torf/blob/df0363232daacd3f8c91aafddaa0623b8c28cbd2/torf/_torrent.py#L593-L641
|
def generate(self, callback=None, interval=0):
"""
Hash pieces and report progress to `callback`
This method sets ``pieces`` in :attr:`metainfo`\ ``['info']`` when all
pieces are hashed successfully.
:param callable callback: Callable with signature ``(torrent, filepath,
pieces_done, pieces_total)``; if `callback` returns anything else
than None, hashing is canceled
:param float interval: Minimum number of seconds between calls to
`callback` (if 0, `callback` is called once per piece)
:raises PathEmptyError: if :attr:`path` contains only empty
files/directories
:raises PathNotFoundError: if :attr:`path` does not exist
:raises ReadError: if :attr:`path` or any file beneath it is not
readable
:return: ``True`` if all pieces were successfully hashed, ``False``
otherwise
"""
if self.path is None:
raise RuntimeError('generate() called with no path specified')
elif self.size <= 0:
raise error.PathEmptyError(self.path)
elif not os.path.exists(self.path):
raise error.PathNotFoundError(self.path)
if callback is not None:
cancel = lambda *status: callback(*status) is not None
else:
cancel = lambda *status: False
if os.path.isfile(self.path):
pieces = self._set_pieces_singlefile()
elif os.path.isdir(self.path):
pieces = self._set_pieces_multifile()
# Iterate over hashed pieces and send status information
last_cb_call = 0
for filepath,pieces_done,pieces_total in pieces:
now = time.time()
if now - last_cb_call >= interval or \
pieces_done >= pieces_total:
last_cb_call = now
if cancel(self, filepath, pieces_done, pieces_total):
return False
return True
|
[
"def",
"generate",
"(",
"self",
",",
"callback",
"=",
"None",
",",
"interval",
"=",
"0",
")",
":",
"if",
"self",
".",
"path",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"'generate() called with no path specified'",
")",
"elif",
"self",
".",
"size",
"<=",
"0",
":",
"raise",
"error",
".",
"PathEmptyError",
"(",
"self",
".",
"path",
")",
"elif",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"path",
")",
":",
"raise",
"error",
".",
"PathNotFoundError",
"(",
"self",
".",
"path",
")",
"if",
"callback",
"is",
"not",
"None",
":",
"cancel",
"=",
"lambda",
"*",
"status",
":",
"callback",
"(",
"*",
"status",
")",
"is",
"not",
"None",
"else",
":",
"cancel",
"=",
"lambda",
"*",
"status",
":",
"False",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"self",
".",
"path",
")",
":",
"pieces",
"=",
"self",
".",
"_set_pieces_singlefile",
"(",
")",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"path",
")",
":",
"pieces",
"=",
"self",
".",
"_set_pieces_multifile",
"(",
")",
"# Iterate over hashed pieces and send status information",
"last_cb_call",
"=",
"0",
"for",
"filepath",
",",
"pieces_done",
",",
"pieces_total",
"in",
"pieces",
":",
"now",
"=",
"time",
".",
"time",
"(",
")",
"if",
"now",
"-",
"last_cb_call",
">=",
"interval",
"or",
"pieces_done",
">=",
"pieces_total",
":",
"last_cb_call",
"=",
"now",
"if",
"cancel",
"(",
"self",
",",
"filepath",
",",
"pieces_done",
",",
"pieces_total",
")",
":",
"return",
"False",
"return",
"True"
] |
Hash pieces and report progress to `callback`
This method sets ``pieces`` in :attr:`metainfo`\ ``['info']`` when all
pieces are hashed successfully.
:param callable callback: Callable with signature ``(torrent, filepath,
pieces_done, pieces_total)``; if `callback` returns anything else
than None, hashing is canceled
:param float interval: Minimum number of seconds between calls to
`callback` (if 0, `callback` is called once per piece)
:raises PathEmptyError: if :attr:`path` contains only empty
files/directories
:raises PathNotFoundError: if :attr:`path` does not exist
:raises ReadError: if :attr:`path` or any file beneath it is not
readable
:return: ``True`` if all pieces were successfully hashed, ``False``
otherwise
|
[
"Hash",
"pieces",
"and",
"report",
"progress",
"to",
"callback"
] |
python
|
train
|
tk0miya/tk.phpautodoc
|
src/phply/phpparse.py
|
https://github.com/tk0miya/tk.phpautodoc/blob/cf789f64abaf76351485cee231a075227e665fb6/src/phply/phpparse.py#L378-L384
|
def p_foreach_statement(p):
'''foreach_statement : statement
| COLON inner_statement_list ENDFOREACH SEMI'''
if len(p) == 2:
p[0] = p[1]
else:
p[0] = ast.Block(p[2], lineno=p.lineno(1))
|
[
"def",
"p_foreach_statement",
"(",
"p",
")",
":",
"if",
"len",
"(",
"p",
")",
"==",
"2",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"else",
":",
"p",
"[",
"0",
"]",
"=",
"ast",
".",
"Block",
"(",
"p",
"[",
"2",
"]",
",",
"lineno",
"=",
"p",
".",
"lineno",
"(",
"1",
")",
")"
] |
foreach_statement : statement
| COLON inner_statement_list ENDFOREACH SEMI
|
[
"foreach_statement",
":",
"statement",
"|",
"COLON",
"inner_statement_list",
"ENDFOREACH",
"SEMI"
] |
python
|
train
|
mitsei/dlkit
|
dlkit/services/relationship.py
|
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/services/relationship.py#L1129-L1136
|
def save_relationship(self, relationship_form, *args, **kwargs):
"""Pass through to provider RelationshipAdminSession.update_relationship"""
# Implemented from kitosid template for -
# osid.resource.ResourceAdminSession.update_resource
if relationship_form.is_for_update():
return self.update_relationship(relationship_form, *args, **kwargs)
else:
return self.create_relationship(relationship_form, *args, **kwargs)
|
[
"def",
"save_relationship",
"(",
"self",
",",
"relationship_form",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Implemented from kitosid template for -",
"# osid.resource.ResourceAdminSession.update_resource",
"if",
"relationship_form",
".",
"is_for_update",
"(",
")",
":",
"return",
"self",
".",
"update_relationship",
"(",
"relationship_form",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"self",
".",
"create_relationship",
"(",
"relationship_form",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Pass through to provider RelationshipAdminSession.update_relationship
|
[
"Pass",
"through",
"to",
"provider",
"RelationshipAdminSession",
".",
"update_relationship"
] |
python
|
train
|
biolink/biolink-model
|
metamodel/utils/generator.py
|
https://github.com/biolink/biolink-model/blob/f379e28d5d4085e1115798c6cb28e5acc4dba8b4/metamodel/utils/generator.py#L230-L238
|
def aliased_slot_name(self, slot: SLOT_OR_SLOTNAME) -> str:
""" Return the overloaded slot name -- the alias if one exists otherwise the actual name
@param slot: either a slot name or a definition
@return: overloaded name
"""
if isinstance(slot, str):
slot = self.schema.slots[slot]
return slot.alias if slot.alias else slot.name
|
[
"def",
"aliased_slot_name",
"(",
"self",
",",
"slot",
":",
"SLOT_OR_SLOTNAME",
")",
"->",
"str",
":",
"if",
"isinstance",
"(",
"slot",
",",
"str",
")",
":",
"slot",
"=",
"self",
".",
"schema",
".",
"slots",
"[",
"slot",
"]",
"return",
"slot",
".",
"alias",
"if",
"slot",
".",
"alias",
"else",
"slot",
".",
"name"
] |
Return the overloaded slot name -- the alias if one exists otherwise the actual name
@param slot: either a slot name or a definition
@return: overloaded name
|
[
"Return",
"the",
"overloaded",
"slot",
"name",
"--",
"the",
"alias",
"if",
"one",
"exists",
"otherwise",
"the",
"actual",
"name"
] |
python
|
train
|
googleapis/gax-python
|
google/gax/utils/protobuf.py
|
https://github.com/googleapis/gax-python/blob/309aedfcfd48e4c8fa22dd60e9c84c3cc71bb20e/google/gax/utils/protobuf.py#L42-L91
|
def get(pb_or_dict, key, default=_SENTINEL):
"""Retrieve the given key off of the object.
If a default is specified, return it if the key is not found, otherwise
raise KeyError.
Args:
pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
object.
key (str): The key to retrieve from the object in question.
default (Any): If the key is not present on the object, and a default
is set, returns that default instead. A type-appropriate falsy
default is generally recommended, as protobuf messages almost
always have default values for unset values and it is not always
possible to tell the difference between a falsy value and an
unset one. If no default is set, raises KeyError for not found
values.
Returns:
Any: The return value from the underlying message or dict.
Raises:
KeyError: If the key is not found. Note that, for unset values,
messages and dictionaries may not have consistent behavior.
TypeError: If pb_or_dict is not a Message or Mapping.
"""
# We may need to get a nested key. Resolve this.
key, subkey = _resolve_subkeys(key)
# Attempt to get the value from the two types of objects we know baout.
# If we get something else, complain.
if isinstance(pb_or_dict, Message):
answer = getattr(pb_or_dict, key, default)
elif isinstance(pb_or_dict, collections.Mapping):
answer = pb_or_dict.get(key, default)
else:
raise TypeError('Tried to fetch a key %s on an invalid object; '
'expected a dict or protobuf message.')
# If the object we got back is our sentinel, raise KeyError; this is
# a "not found" case.
if answer is _SENTINEL:
raise KeyError(key)
# If a subkey exists, call this method recursively against the answer.
if subkey and answer is not default:
return get(answer, subkey, default=default)
# Return the value.
return answer
|
[
"def",
"get",
"(",
"pb_or_dict",
",",
"key",
",",
"default",
"=",
"_SENTINEL",
")",
":",
"# We may need to get a nested key. Resolve this.",
"key",
",",
"subkey",
"=",
"_resolve_subkeys",
"(",
"key",
")",
"# Attempt to get the value from the two types of objects we know baout.",
"# If we get something else, complain.",
"if",
"isinstance",
"(",
"pb_or_dict",
",",
"Message",
")",
":",
"answer",
"=",
"getattr",
"(",
"pb_or_dict",
",",
"key",
",",
"default",
")",
"elif",
"isinstance",
"(",
"pb_or_dict",
",",
"collections",
".",
"Mapping",
")",
":",
"answer",
"=",
"pb_or_dict",
".",
"get",
"(",
"key",
",",
"default",
")",
"else",
":",
"raise",
"TypeError",
"(",
"'Tried to fetch a key %s on an invalid object; '",
"'expected a dict or protobuf message.'",
")",
"# If the object we got back is our sentinel, raise KeyError; this is",
"# a \"not found\" case.",
"if",
"answer",
"is",
"_SENTINEL",
":",
"raise",
"KeyError",
"(",
"key",
")",
"# If a subkey exists, call this method recursively against the answer.",
"if",
"subkey",
"and",
"answer",
"is",
"not",
"default",
":",
"return",
"get",
"(",
"answer",
",",
"subkey",
",",
"default",
"=",
"default",
")",
"# Return the value.",
"return",
"answer"
] |
Retrieve the given key off of the object.
If a default is specified, return it if the key is not found, otherwise
raise KeyError.
Args:
pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the
object.
key (str): The key to retrieve from the object in question.
default (Any): If the key is not present on the object, and a default
is set, returns that default instead. A type-appropriate falsy
default is generally recommended, as protobuf messages almost
always have default values for unset values and it is not always
possible to tell the difference between a falsy value and an
unset one. If no default is set, raises KeyError for not found
values.
Returns:
Any: The return value from the underlying message or dict.
Raises:
KeyError: If the key is not found. Note that, for unset values,
messages and dictionaries may not have consistent behavior.
TypeError: If pb_or_dict is not a Message or Mapping.
|
[
"Retrieve",
"the",
"given",
"key",
"off",
"of",
"the",
"object",
"."
] |
python
|
train
|
msikma/kanaconv
|
kanaconv/converter.py
|
https://github.com/msikma/kanaconv/blob/194f142e616ab5dd6d13a687b96b9f8abd1b4ea8/kanaconv/converter.py#L435-L445
|
def _add_unknown_char(self, string):
'''
Adds an unknown character to the stack.
'''
if self.has_xvowel:
# Ensure an xvowel gets printed if we've got an active
# one right now.
self._promote_solitary_xvowel()
self.unknown_char = string
self._flush_char()
|
[
"def",
"_add_unknown_char",
"(",
"self",
",",
"string",
")",
":",
"if",
"self",
".",
"has_xvowel",
":",
"# Ensure an xvowel gets printed if we've got an active",
"# one right now.",
"self",
".",
"_promote_solitary_xvowel",
"(",
")",
"self",
".",
"unknown_char",
"=",
"string",
"self",
".",
"_flush_char",
"(",
")"
] |
Adds an unknown character to the stack.
|
[
"Adds",
"an",
"unknown",
"character",
"to",
"the",
"stack",
"."
] |
python
|
train
|
pydata/xarray
|
xarray/core/rolling.py
|
https://github.com/pydata/xarray/blob/6d93a95d05bdbfc33fff24064f67d29dd891ab58/xarray/core/rolling.py#L249-L301
|
def _bottleneck_reduce(cls, func):
"""
Methods to return a wrapped function for any function `func` for
bottoleneck method, except for `median`.
"""
def wrapped_func(self, **kwargs):
from .dataarray import DataArray
# bottleneck doesn't allow min_count to be 0, although it should
# work the same as if min_count = 1
if self.min_periods is not None and self.min_periods == 0:
min_count = 1
else:
min_count = self.min_periods
axis = self.obj.get_axis_num(self.dim)
padded = self.obj.variable
if self.center:
if (LooseVersion(np.__version__) < LooseVersion('1.13') and
self.obj.dtype.kind == 'b'):
# with numpy < 1.13 bottleneck cannot handle np.nan-Boolean
# mixed array correctly. We cast boolean array to float.
padded = padded.astype(float)
if isinstance(padded.data, dask_array_type):
# Workaround to make the padded chunk size is larger than
# self.window-1
shift = - (self.window + 1) // 2
offset = (self.window - 1) // 2
valid = (slice(None), ) * axis + (
slice(offset, offset + self.obj.shape[axis]), )
else:
shift = (-self.window // 2) + 1
valid = (slice(None), ) * axis + (slice(-shift, None), )
padded = padded.pad_with_fill_value({self.dim: (0, -shift)})
if isinstance(padded.data, dask_array_type):
values = dask_rolling_wrapper(func, padded,
window=self.window,
min_count=min_count,
axis=axis)
else:
values = func(padded.data, window=self.window,
min_count=min_count, axis=axis)
if self.center:
values = values[valid]
result = DataArray(values, self.obj.coords)
return result
return wrapped_func
|
[
"def",
"_bottleneck_reduce",
"(",
"cls",
",",
"func",
")",
":",
"def",
"wrapped_func",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"dataarray",
"import",
"DataArray",
"# bottleneck doesn't allow min_count to be 0, although it should",
"# work the same as if min_count = 1",
"if",
"self",
".",
"min_periods",
"is",
"not",
"None",
"and",
"self",
".",
"min_periods",
"==",
"0",
":",
"min_count",
"=",
"1",
"else",
":",
"min_count",
"=",
"self",
".",
"min_periods",
"axis",
"=",
"self",
".",
"obj",
".",
"get_axis_num",
"(",
"self",
".",
"dim",
")",
"padded",
"=",
"self",
".",
"obj",
".",
"variable",
"if",
"self",
".",
"center",
":",
"if",
"(",
"LooseVersion",
"(",
"np",
".",
"__version__",
")",
"<",
"LooseVersion",
"(",
"'1.13'",
")",
"and",
"self",
".",
"obj",
".",
"dtype",
".",
"kind",
"==",
"'b'",
")",
":",
"# with numpy < 1.13 bottleneck cannot handle np.nan-Boolean",
"# mixed array correctly. We cast boolean array to float.",
"padded",
"=",
"padded",
".",
"astype",
"(",
"float",
")",
"if",
"isinstance",
"(",
"padded",
".",
"data",
",",
"dask_array_type",
")",
":",
"# Workaround to make the padded chunk size is larger than",
"# self.window-1",
"shift",
"=",
"-",
"(",
"self",
".",
"window",
"+",
"1",
")",
"//",
"2",
"offset",
"=",
"(",
"self",
".",
"window",
"-",
"1",
")",
"//",
"2",
"valid",
"=",
"(",
"slice",
"(",
"None",
")",
",",
")",
"*",
"axis",
"+",
"(",
"slice",
"(",
"offset",
",",
"offset",
"+",
"self",
".",
"obj",
".",
"shape",
"[",
"axis",
"]",
")",
",",
")",
"else",
":",
"shift",
"=",
"(",
"-",
"self",
".",
"window",
"//",
"2",
")",
"+",
"1",
"valid",
"=",
"(",
"slice",
"(",
"None",
")",
",",
")",
"*",
"axis",
"+",
"(",
"slice",
"(",
"-",
"shift",
",",
"None",
")",
",",
")",
"padded",
"=",
"padded",
".",
"pad_with_fill_value",
"(",
"{",
"self",
".",
"dim",
":",
"(",
"0",
",",
"-",
"shift",
")",
"}",
")",
"if",
"isinstance",
"(",
"padded",
".",
"data",
",",
"dask_array_type",
")",
":",
"values",
"=",
"dask_rolling_wrapper",
"(",
"func",
",",
"padded",
",",
"window",
"=",
"self",
".",
"window",
",",
"min_count",
"=",
"min_count",
",",
"axis",
"=",
"axis",
")",
"else",
":",
"values",
"=",
"func",
"(",
"padded",
".",
"data",
",",
"window",
"=",
"self",
".",
"window",
",",
"min_count",
"=",
"min_count",
",",
"axis",
"=",
"axis",
")",
"if",
"self",
".",
"center",
":",
"values",
"=",
"values",
"[",
"valid",
"]",
"result",
"=",
"DataArray",
"(",
"values",
",",
"self",
".",
"obj",
".",
"coords",
")",
"return",
"result",
"return",
"wrapped_func"
] |
Methods to return a wrapped function for any function `func` for
bottoleneck method, except for `median`.
|
[
"Methods",
"to",
"return",
"a",
"wrapped",
"function",
"for",
"any",
"function",
"func",
"for",
"bottoleneck",
"method",
"except",
"for",
"median",
"."
] |
python
|
train
|
RPi-Distro/python-gpiozero
|
gpiozero/boards.py
|
https://github.com/RPi-Distro/python-gpiozero/blob/7b67374fd0c8c4fde5586d9bad9531f076db9c0c/gpiozero/boards.py#L110-L116
|
def on(self):
"""
Turn all the output devices on.
"""
for device in self:
if isinstance(device, (OutputDevice, CompositeOutputDevice)):
device.on()
|
[
"def",
"on",
"(",
"self",
")",
":",
"for",
"device",
"in",
"self",
":",
"if",
"isinstance",
"(",
"device",
",",
"(",
"OutputDevice",
",",
"CompositeOutputDevice",
")",
")",
":",
"device",
".",
"on",
"(",
")"
] |
Turn all the output devices on.
|
[
"Turn",
"all",
"the",
"output",
"devices",
"on",
"."
] |
python
|
train
|
radjkarl/imgProcessor
|
imgProcessor/measure/linePlot.py
|
https://github.com/radjkarl/imgProcessor/blob/7c5a28718f81c01a430152c60a686ac50afbfd7c/imgProcessor/measure/linePlot.py#L10-L28
|
def linePlot(img, x0, y0, x1, y1, resolution=None, order=3):
'''
returns [img] intensity values along line
defined by [x0, y0, x1, y1]
resolution ... number or data points to evaluate
order ... interpolation precision
'''
if resolution is None:
resolution = int( ((x1-x0)**2 + (y1-y0)**2 )**0.5 )
if order == 0:
x = np.linspace(x0, x1, resolution, dtype=int)
y = np.linspace(y0, y1, resolution, dtype=int)
return img[y, x]
x = np.linspace(x0, x1, resolution)
y = np.linspace(y0, y1, resolution)
return map_coordinates(img, np.vstack((y,x)), order=order)
|
[
"def",
"linePlot",
"(",
"img",
",",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
",",
"resolution",
"=",
"None",
",",
"order",
"=",
"3",
")",
":",
"if",
"resolution",
"is",
"None",
":",
"resolution",
"=",
"int",
"(",
"(",
"(",
"x1",
"-",
"x0",
")",
"**",
"2",
"+",
"(",
"y1",
"-",
"y0",
")",
"**",
"2",
")",
"**",
"0.5",
")",
"if",
"order",
"==",
"0",
":",
"x",
"=",
"np",
".",
"linspace",
"(",
"x0",
",",
"x1",
",",
"resolution",
",",
"dtype",
"=",
"int",
")",
"y",
"=",
"np",
".",
"linspace",
"(",
"y0",
",",
"y1",
",",
"resolution",
",",
"dtype",
"=",
"int",
")",
"return",
"img",
"[",
"y",
",",
"x",
"]",
"x",
"=",
"np",
".",
"linspace",
"(",
"x0",
",",
"x1",
",",
"resolution",
")",
"y",
"=",
"np",
".",
"linspace",
"(",
"y0",
",",
"y1",
",",
"resolution",
")",
"return",
"map_coordinates",
"(",
"img",
",",
"np",
".",
"vstack",
"(",
"(",
"y",
",",
"x",
")",
")",
",",
"order",
"=",
"order",
")"
] |
returns [img] intensity values along line
defined by [x0, y0, x1, y1]
resolution ... number or data points to evaluate
order ... interpolation precision
|
[
"returns",
"[",
"img",
"]",
"intensity",
"values",
"along",
"line",
"defined",
"by",
"[",
"x0",
"y0",
"x1",
"y1",
"]",
"resolution",
"...",
"number",
"or",
"data",
"points",
"to",
"evaluate",
"order",
"...",
"interpolation",
"precision"
] |
python
|
train
|
hollenstein/maspy
|
maspy/sil.py
|
https://github.com/hollenstein/maspy/blob/f15fcfd24df306d8420540460d902aa3073ec133/maspy/sil.py#L235-L250
|
def modSymbolsFromLabelInfo(labelDescriptor):
"""Returns a set of all modiciation symbols which were used in the
labelDescriptor
:param labelDescriptor: :class:`LabelDescriptor` describes the label setup
of an experiment
:returns: #TODO: docstring
"""
modSymbols = set()
for labelStateEntry in viewvalues(labelDescriptor.labels):
for labelPositionEntry in viewvalues(labelStateEntry['aminoAcidLabels']):
for modSymbol in aux.toList(labelPositionEntry):
if modSymbol != '':
modSymbols.add(modSymbol)
return modSymbols
|
[
"def",
"modSymbolsFromLabelInfo",
"(",
"labelDescriptor",
")",
":",
"modSymbols",
"=",
"set",
"(",
")",
"for",
"labelStateEntry",
"in",
"viewvalues",
"(",
"labelDescriptor",
".",
"labels",
")",
":",
"for",
"labelPositionEntry",
"in",
"viewvalues",
"(",
"labelStateEntry",
"[",
"'aminoAcidLabels'",
"]",
")",
":",
"for",
"modSymbol",
"in",
"aux",
".",
"toList",
"(",
"labelPositionEntry",
")",
":",
"if",
"modSymbol",
"!=",
"''",
":",
"modSymbols",
".",
"add",
"(",
"modSymbol",
")",
"return",
"modSymbols"
] |
Returns a set of all modiciation symbols which were used in the
labelDescriptor
:param labelDescriptor: :class:`LabelDescriptor` describes the label setup
of an experiment
:returns: #TODO: docstring
|
[
"Returns",
"a",
"set",
"of",
"all",
"modiciation",
"symbols",
"which",
"were",
"used",
"in",
"the",
"labelDescriptor"
] |
python
|
train
|
FNNDSC/med2image
|
med2image/systemMisc.py
|
https://github.com/FNNDSC/med2image/blob/638d5d230de47608af20f9764acf8e382c2bf2ff/med2image/systemMisc.py#L160-L181
|
def com_find(ar_grid):
"""
Find the center of mass in array grid <ar_grid>. Mass elements
are grid index values.
Return an array, in format (x, y), i.e. col, row!
"""
f_x = 0
f_y = 0
f_m = 0
for i in range(len(ar_grid)):
for j in range(len(ar_grid[i])):
if ar_grid[i][j]:
# Since python arrays are zero indexed, we need to offset
# the loop counters by 1 to account for mass in the 1st
# column.
f_x += (j+1) * ar_grid[i][j]
f_y += (i+1) * ar_grid[i][j]
f_m += ar_grid[i][j]
f_com = array( (float(f_x)/f_m , float(f_y)/f_m) )
return f_com
|
[
"def",
"com_find",
"(",
"ar_grid",
")",
":",
"f_x",
"=",
"0",
"f_y",
"=",
"0",
"f_m",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"ar_grid",
")",
")",
":",
"for",
"j",
"in",
"range",
"(",
"len",
"(",
"ar_grid",
"[",
"i",
"]",
")",
")",
":",
"if",
"ar_grid",
"[",
"i",
"]",
"[",
"j",
"]",
":",
"# Since python arrays are zero indexed, we need to offset",
"# the loop counters by 1 to account for mass in the 1st",
"# column.",
"f_x",
"+=",
"(",
"j",
"+",
"1",
")",
"*",
"ar_grid",
"[",
"i",
"]",
"[",
"j",
"]",
"f_y",
"+=",
"(",
"i",
"+",
"1",
")",
"*",
"ar_grid",
"[",
"i",
"]",
"[",
"j",
"]",
"f_m",
"+=",
"ar_grid",
"[",
"i",
"]",
"[",
"j",
"]",
"f_com",
"=",
"array",
"(",
"(",
"float",
"(",
"f_x",
")",
"/",
"f_m",
",",
"float",
"(",
"f_y",
")",
"/",
"f_m",
")",
")",
"return",
"f_com"
] |
Find the center of mass in array grid <ar_grid>. Mass elements
are grid index values.
Return an array, in format (x, y), i.e. col, row!
|
[
"Find",
"the",
"center",
"of",
"mass",
"in",
"array",
"grid",
"<ar_grid",
">",
".",
"Mass",
"elements",
"are",
"grid",
"index",
"values",
"."
] |
python
|
train
|
Chilipp/psyplot
|
psyplot/data.py
|
https://github.com/Chilipp/psyplot/blob/75a0a15a9a1dd018e79d2df270d56c4bf5f311d5/psyplot/data.py#L52-L74
|
def _no_auto_update_getter(self):
""":class:`bool`. Boolean controlling whether the :meth:`start_update`
method is automatically called by the :meth:`update` method
Examples
--------
You can disable the automatic update via
>>> with data.no_auto_update:
... data.update(time=1)
... data.start_update()
To permanently disable the automatic update, simply set
>>> data.no_auto_update = True
>>> data.update(time=1)
>>> data.no_auto_update = False # reenable automatical update"""
if getattr(self, '_no_auto_update', None) is not None:
return self._no_auto_update
else:
self._no_auto_update = utils._TempBool()
return self._no_auto_update
|
[
"def",
"_no_auto_update_getter",
"(",
"self",
")",
":",
"if",
"getattr",
"(",
"self",
",",
"'_no_auto_update'",
",",
"None",
")",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_no_auto_update",
"else",
":",
"self",
".",
"_no_auto_update",
"=",
"utils",
".",
"_TempBool",
"(",
")",
"return",
"self",
".",
"_no_auto_update"
] |
:class:`bool`. Boolean controlling whether the :meth:`start_update`
method is automatically called by the :meth:`update` method
Examples
--------
You can disable the automatic update via
>>> with data.no_auto_update:
... data.update(time=1)
... data.start_update()
To permanently disable the automatic update, simply set
>>> data.no_auto_update = True
>>> data.update(time=1)
>>> data.no_auto_update = False # reenable automatical update
|
[
":",
"class",
":",
"bool",
".",
"Boolean",
"controlling",
"whether",
"the",
":",
"meth",
":",
"start_update",
"method",
"is",
"automatically",
"called",
"by",
"the",
":",
"meth",
":",
"update",
"method"
] |
python
|
train
|
crs4/pydoop
|
pydoop/hdfs/__init__.py
|
https://github.com/crs4/pydoop/blob/f375be2a06f9c67eaae3ce6f605195dbca143b2b/pydoop/hdfs/__init__.py#L129-L143
|
def dump(data, hdfs_path, **kwargs):
"""\
Write ``data`` to ``hdfs_path``.
Keyword arguments are passed to :func:`open`, except for ``mode``,
which is forced to ``"w"`` (or ``"wt"`` for text data).
"""
kwargs["mode"] = "w" if isinstance(data, bintype) else "wt"
with open(hdfs_path, **kwargs) as fo:
i = 0
bufsize = common.BUFSIZE
while i < len(data):
fo.write(data[i: i + bufsize])
i += bufsize
fo.fs.close()
|
[
"def",
"dump",
"(",
"data",
",",
"hdfs_path",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"\"mode\"",
"]",
"=",
"\"w\"",
"if",
"isinstance",
"(",
"data",
",",
"bintype",
")",
"else",
"\"wt\"",
"with",
"open",
"(",
"hdfs_path",
",",
"*",
"*",
"kwargs",
")",
"as",
"fo",
":",
"i",
"=",
"0",
"bufsize",
"=",
"common",
".",
"BUFSIZE",
"while",
"i",
"<",
"len",
"(",
"data",
")",
":",
"fo",
".",
"write",
"(",
"data",
"[",
"i",
":",
"i",
"+",
"bufsize",
"]",
")",
"i",
"+=",
"bufsize",
"fo",
".",
"fs",
".",
"close",
"(",
")"
] |
\
Write ``data`` to ``hdfs_path``.
Keyword arguments are passed to :func:`open`, except for ``mode``,
which is forced to ``"w"`` (or ``"wt"`` for text data).
|
[
"\\",
"Write",
"data",
"to",
"hdfs_path",
"."
] |
python
|
train
|
quantumlib/Cirq
|
cirq/experiments/qubit_characterizations.py
|
https://github.com/quantumlib/Cirq/blob/0827da80dd7880e5b923eb69407e980ed9bc0bd2/cirq/experiments/qubit_characterizations.py#L82-L94
|
def plot(self, **plot_kwargs: Any) -> None:
"""Plots the average ground state probability vs the number of
Cliffords in the RB study.
Args:
**plot_kwargs: Arguments to be passed to matplotlib.pyplot.plot.
"""
fig = plt.figure()
plt.plot(self._num_cfds_seq, self._gnd_state_probs, 'ro-',
figure=fig, **plot_kwargs)
plt.xlabel(r"Number of Cliffords", figure=fig)
plt.ylabel('Ground State Probability', figure=fig)
fig.show()
|
[
"def",
"plot",
"(",
"self",
",",
"*",
"*",
"plot_kwargs",
":",
"Any",
")",
"->",
"None",
":",
"fig",
"=",
"plt",
".",
"figure",
"(",
")",
"plt",
".",
"plot",
"(",
"self",
".",
"_num_cfds_seq",
",",
"self",
".",
"_gnd_state_probs",
",",
"'ro-'",
",",
"figure",
"=",
"fig",
",",
"*",
"*",
"plot_kwargs",
")",
"plt",
".",
"xlabel",
"(",
"r\"Number of Cliffords\"",
",",
"figure",
"=",
"fig",
")",
"plt",
".",
"ylabel",
"(",
"'Ground State Probability'",
",",
"figure",
"=",
"fig",
")",
"fig",
".",
"show",
"(",
")"
] |
Plots the average ground state probability vs the number of
Cliffords in the RB study.
Args:
**plot_kwargs: Arguments to be passed to matplotlib.pyplot.plot.
|
[
"Plots",
"the",
"average",
"ground",
"state",
"probability",
"vs",
"the",
"number",
"of",
"Cliffords",
"in",
"the",
"RB",
"study",
"."
] |
python
|
train
|
BYU-PCCL/holodeck
|
holodeck/command.py
|
https://github.com/BYU-PCCL/holodeck/blob/01acd4013f5acbd9f61fbc9caaafe19975e8b121/holodeck/command.py#L122-L130
|
def set_type(self, agent_type):
"""Set the type of agent to spawn in Holodeck. Currently accepted agents are: DiscreteSphereAgent, UAVAgent,
and AndroidAgent.
Args:
agent_type (str): The type of agent to spawn.
"""
type_str = SpawnAgentCommand.__type_keys[agent_type]
self.add_string_parameters(type_str)
|
[
"def",
"set_type",
"(",
"self",
",",
"agent_type",
")",
":",
"type_str",
"=",
"SpawnAgentCommand",
".",
"__type_keys",
"[",
"agent_type",
"]",
"self",
".",
"add_string_parameters",
"(",
"type_str",
")"
] |
Set the type of agent to spawn in Holodeck. Currently accepted agents are: DiscreteSphereAgent, UAVAgent,
and AndroidAgent.
Args:
agent_type (str): The type of agent to spawn.
|
[
"Set",
"the",
"type",
"of",
"agent",
"to",
"spawn",
"in",
"Holodeck",
".",
"Currently",
"accepted",
"agents",
"are",
":",
"DiscreteSphereAgent",
"UAVAgent",
"and",
"AndroidAgent",
"."
] |
python
|
train
|
pydanny/cookiecutter-django
|
hooks/post_gen_project.py
|
https://github.com/pydanny/cookiecutter-django/blob/bb9b482e96d1966e20745eeea87a8aa10ed1c861/hooks/post_gen_project.py#L107-L129
|
def generate_random_string(
length, using_digits=False, using_ascii_letters=False, using_punctuation=False
):
"""
Example:
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
would yield log_2((26+26+50)^50) ~= 334 bit strength.
"""
if not using_sysrandom:
return None
symbols = []
if using_digits:
symbols += string.digits
if using_ascii_letters:
symbols += string.ascii_letters
if using_punctuation:
all_punctuation = set(string.punctuation)
# These symbols can cause issues in environment variables
unsuitable = {"'", '"', "\\", "$"}
suitable = all_punctuation.difference(unsuitable)
symbols += "".join(suitable)
return "".join([random.choice(symbols) for _ in range(length)])
|
[
"def",
"generate_random_string",
"(",
"length",
",",
"using_digits",
"=",
"False",
",",
"using_ascii_letters",
"=",
"False",
",",
"using_punctuation",
"=",
"False",
")",
":",
"if",
"not",
"using_sysrandom",
":",
"return",
"None",
"symbols",
"=",
"[",
"]",
"if",
"using_digits",
":",
"symbols",
"+=",
"string",
".",
"digits",
"if",
"using_ascii_letters",
":",
"symbols",
"+=",
"string",
".",
"ascii_letters",
"if",
"using_punctuation",
":",
"all_punctuation",
"=",
"set",
"(",
"string",
".",
"punctuation",
")",
"# These symbols can cause issues in environment variables",
"unsuitable",
"=",
"{",
"\"'\"",
",",
"'\"'",
",",
"\"\\\\\"",
",",
"\"$\"",
"}",
"suitable",
"=",
"all_punctuation",
".",
"difference",
"(",
"unsuitable",
")",
"symbols",
"+=",
"\"\"",
".",
"join",
"(",
"suitable",
")",
"return",
"\"\"",
".",
"join",
"(",
"[",
"random",
".",
"choice",
"(",
"symbols",
")",
"for",
"_",
"in",
"range",
"(",
"length",
")",
"]",
")"
] |
Example:
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
would yield log_2((26+26+50)^50) ~= 334 bit strength.
|
[
"Example",
":",
"opting",
"out",
"for",
"50",
"symbol",
"-",
"long",
"[",
"a",
"-",
"z",
"]",
"[",
"A",
"-",
"Z",
"]",
"[",
"0",
"-",
"9",
"]",
"string",
"would",
"yield",
"log_2",
"((",
"26",
"+",
"26",
"+",
"50",
")",
"^50",
")",
"~",
"=",
"334",
"bit",
"strength",
"."
] |
python
|
test
|
kolypto/py-exdoc
|
exdoc/py/__init__.py
|
https://github.com/kolypto/py-exdoc/blob/516526c01c203271410e7d7340024ef9f0bfa46a/exdoc/py/__init__.py#L159-L181
|
def _docspec(func, module=None, qualname=None, of_class=None):
""" For a callable, get the full spec by merging doc_parse() and argspec()
:type func: Callable
:rtype: data.FDocstring
"""
sp = _argspec(func)
doc = _doc_parse(getdoc(func), module=module, qualname=qualname)
# Merge args
doc_map = {a.name: a for a in doc.args}
doc.args = [data.Argument(a, doc_map.get(a.name.lstrip('*'), None)) for a in sp]
# Args shift: dump `self`
if (six.PY2 and inspect.ismethod(func)) or \
(six.PY3 and (inspect.isroutine(func) and of_class is not None) and (of_class is not None and not is_method_static(of_class, func.__name__))):
doc.args = doc.args[1:]
# Signature
doc.update_signature()
# Finish
return doc
|
[
"def",
"_docspec",
"(",
"func",
",",
"module",
"=",
"None",
",",
"qualname",
"=",
"None",
",",
"of_class",
"=",
"None",
")",
":",
"sp",
"=",
"_argspec",
"(",
"func",
")",
"doc",
"=",
"_doc_parse",
"(",
"getdoc",
"(",
"func",
")",
",",
"module",
"=",
"module",
",",
"qualname",
"=",
"qualname",
")",
"# Merge args",
"doc_map",
"=",
"{",
"a",
".",
"name",
":",
"a",
"for",
"a",
"in",
"doc",
".",
"args",
"}",
"doc",
".",
"args",
"=",
"[",
"data",
".",
"Argument",
"(",
"a",
",",
"doc_map",
".",
"get",
"(",
"a",
".",
"name",
".",
"lstrip",
"(",
"'*'",
")",
",",
"None",
")",
")",
"for",
"a",
"in",
"sp",
"]",
"# Args shift: dump `self`",
"if",
"(",
"six",
".",
"PY2",
"and",
"inspect",
".",
"ismethod",
"(",
"func",
")",
")",
"or",
"(",
"six",
".",
"PY3",
"and",
"(",
"inspect",
".",
"isroutine",
"(",
"func",
")",
"and",
"of_class",
"is",
"not",
"None",
")",
"and",
"(",
"of_class",
"is",
"not",
"None",
"and",
"not",
"is_method_static",
"(",
"of_class",
",",
"func",
".",
"__name__",
")",
")",
")",
":",
"doc",
".",
"args",
"=",
"doc",
".",
"args",
"[",
"1",
":",
"]",
"# Signature",
"doc",
".",
"update_signature",
"(",
")",
"# Finish",
"return",
"doc"
] |
For a callable, get the full spec by merging doc_parse() and argspec()
:type func: Callable
:rtype: data.FDocstring
|
[
"For",
"a",
"callable",
"get",
"the",
"full",
"spec",
"by",
"merging",
"doc_parse",
"()",
"and",
"argspec",
"()"
] |
python
|
train
|
epfl-idevelop/epfl-ldap
|
epflldap/ldap_search.py
|
https://github.com/epfl-idevelop/epfl-ldap/blob/bebb94da3609d358bd83f31672eeaddcda872c5d/epflldap/ldap_search.py#L137-L150
|
def get_username(sciper):
"""
Return username of user
"""
attribute = 'uid'
response = LDAP_search(
pattern_search='(uniqueIdentifier={})'.format(sciper),
attribute=attribute
)
try:
username = get_attribute(response, attribute)
except Exception:
raise EpflLdapException("No username corresponds to sciper {}".format(sciper))
return username
|
[
"def",
"get_username",
"(",
"sciper",
")",
":",
"attribute",
"=",
"'uid'",
"response",
"=",
"LDAP_search",
"(",
"pattern_search",
"=",
"'(uniqueIdentifier={})'",
".",
"format",
"(",
"sciper",
")",
",",
"attribute",
"=",
"attribute",
")",
"try",
":",
"username",
"=",
"get_attribute",
"(",
"response",
",",
"attribute",
")",
"except",
"Exception",
":",
"raise",
"EpflLdapException",
"(",
"\"No username corresponds to sciper {}\"",
".",
"format",
"(",
"sciper",
")",
")",
"return",
"username"
] |
Return username of user
|
[
"Return",
"username",
"of",
"user"
] |
python
|
train
|
jason-weirather/py-seq-tools
|
seqtools/format/sam/bam/bamindex.py
|
https://github.com/jason-weirather/py-seq-tools/blob/f642c2c73ffef2acc83656a78059a476fc734ca1/seqtools/format/sam/bam/bamindex.py#L163-L171
|
def get_range_start_line_number(self,rng):
"""
.. warning:: not implemented
"""
sys.stderr.write("error unimplemented get_range_start_line\n")
sys.exit()
for i in range(0,len(self._lines)):
if rng.cmp(self._lines[i]['rng'])==0: return i+1
return None
|
[
"def",
"get_range_start_line_number",
"(",
"self",
",",
"rng",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"error unimplemented get_range_start_line\\n\"",
")",
"sys",
".",
"exit",
"(",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"self",
".",
"_lines",
")",
")",
":",
"if",
"rng",
".",
"cmp",
"(",
"self",
".",
"_lines",
"[",
"i",
"]",
"[",
"'rng'",
"]",
")",
"==",
"0",
":",
"return",
"i",
"+",
"1",
"return",
"None"
] |
.. warning:: not implemented
|
[
"..",
"warning",
"::",
"not",
"implemented"
] |
python
|
train
|
Rapptz/discord.py
|
discord/client.py
|
https://github.com/Rapptz/discord.py/blob/05d4f7f9620ef33635d6ac965b26528e09cdaf5b/discord/client.py#L881-L913
|
async def fetch_guild(self, guild_id):
"""|coro|
Retrieves a :class:`.Guild` from an ID.
.. note::
Using this, you will not receive :attr:`.Guild.channels`, :class:`.Guild.members`,
:attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.
.. note::
This method is an API call. For general usage, consider :meth:`get_guild` instead.
Parameters
-----------
guild_id: :class:`int`
The guild's ID to fetch from.
Raises
------
Forbidden
You do not have access to the guild.
HTTPException
Getting the guild failed.
Returns
--------
:class:`.Guild`
The guild from the ID.
"""
data = await self.http.get_guild(guild_id)
return Guild(data=data, state=self._connection)
|
[
"async",
"def",
"fetch_guild",
"(",
"self",
",",
"guild_id",
")",
":",
"data",
"=",
"await",
"self",
".",
"http",
".",
"get_guild",
"(",
"guild_id",
")",
"return",
"Guild",
"(",
"data",
"=",
"data",
",",
"state",
"=",
"self",
".",
"_connection",
")"
] |
|coro|
Retrieves a :class:`.Guild` from an ID.
.. note::
Using this, you will not receive :attr:`.Guild.channels`, :class:`.Guild.members`,
:attr:`.Member.activity` and :attr:`.Member.voice` per :class:`.Member`.
.. note::
This method is an API call. For general usage, consider :meth:`get_guild` instead.
Parameters
-----------
guild_id: :class:`int`
The guild's ID to fetch from.
Raises
------
Forbidden
You do not have access to the guild.
HTTPException
Getting the guild failed.
Returns
--------
:class:`.Guild`
The guild from the ID.
|
[
"|coro|"
] |
python
|
train
|
ssokolow/fastdupes
|
fastdupes.py
|
https://github.com/ssokolow/fastdupes/blob/0334545885445834307c075a445fba9fe6f0c9e7/fastdupes.py#L217-L263
|
def groupBy(groups_in, classifier, fun_desc='?', keep_uniques=False,
*args, **kwargs):
"""Subdivide groups of paths according to a function.
:param groups_in: Grouped sets of paths.
:type groups_in: :class:`~__builtins__.dict` of iterables
:param classifier: Function to group a list of paths by some attribute.
:type classifier: ``function(list, *args, **kwargs) -> str``
:param fun_desc: Human-readable term for what the classifier operates on.
(Used in log messages)
:type fun_desc: :class:`~__builtins__.str`
:param keep_uniques: If ``False``, discard groups with only one member.
:type keep_uniques: :class:`~__builtins__.bool`
:returns: A dict mapping classifier keys to groups of matches.
:rtype: :class:`~__builtins__.dict`
:attention: Grouping functions generally use a :class:`~__builtins__.set`
``groups`` as extra protection against accidentally counting a given
file twice. (Complimentary to use of :func:`os.path.realpath` in
:func:`~fastdupes.getPaths`)
.. todo:: Find some way to bring back the file-by-file status text
"""
groups, count, group_count = {}, 0, len(groups_in)
for pos, paths in enumerate(groups_in.values()):
out.write("Subdividing group %d of %d by %s... (%d files examined, %d "
"in current group)" % (
pos + 1, group_count, fun_desc, count, len(paths)
))
for key, group in classifier(paths, *args, **kwargs).items():
groups.setdefault(key, set()).update(group)
count += len(group)
if not keep_uniques:
# Return only the groups with more than one file.
groups = dict([(x, groups[x]) for x in groups if len(groups[x]) > 1])
out.write("Found %s sets of files with identical %s. (%d files examined)"
% (len(groups), fun_desc, count), newline=True)
return groups
|
[
"def",
"groupBy",
"(",
"groups_in",
",",
"classifier",
",",
"fun_desc",
"=",
"'?'",
",",
"keep_uniques",
"=",
"False",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"groups",
",",
"count",
",",
"group_count",
"=",
"{",
"}",
",",
"0",
",",
"len",
"(",
"groups_in",
")",
"for",
"pos",
",",
"paths",
"in",
"enumerate",
"(",
"groups_in",
".",
"values",
"(",
")",
")",
":",
"out",
".",
"write",
"(",
"\"Subdividing group %d of %d by %s... (%d files examined, %d \"",
"\"in current group)\"",
"%",
"(",
"pos",
"+",
"1",
",",
"group_count",
",",
"fun_desc",
",",
"count",
",",
"len",
"(",
"paths",
")",
")",
")",
"for",
"key",
",",
"group",
"in",
"classifier",
"(",
"paths",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
".",
"items",
"(",
")",
":",
"groups",
".",
"setdefault",
"(",
"key",
",",
"set",
"(",
")",
")",
".",
"update",
"(",
"group",
")",
"count",
"+=",
"len",
"(",
"group",
")",
"if",
"not",
"keep_uniques",
":",
"# Return only the groups with more than one file.",
"groups",
"=",
"dict",
"(",
"[",
"(",
"x",
",",
"groups",
"[",
"x",
"]",
")",
"for",
"x",
"in",
"groups",
"if",
"len",
"(",
"groups",
"[",
"x",
"]",
")",
">",
"1",
"]",
")",
"out",
".",
"write",
"(",
"\"Found %s sets of files with identical %s. (%d files examined)\"",
"%",
"(",
"len",
"(",
"groups",
")",
",",
"fun_desc",
",",
"count",
")",
",",
"newline",
"=",
"True",
")",
"return",
"groups"
] |
Subdivide groups of paths according to a function.
:param groups_in: Grouped sets of paths.
:type groups_in: :class:`~__builtins__.dict` of iterables
:param classifier: Function to group a list of paths by some attribute.
:type classifier: ``function(list, *args, **kwargs) -> str``
:param fun_desc: Human-readable term for what the classifier operates on.
(Used in log messages)
:type fun_desc: :class:`~__builtins__.str`
:param keep_uniques: If ``False``, discard groups with only one member.
:type keep_uniques: :class:`~__builtins__.bool`
:returns: A dict mapping classifier keys to groups of matches.
:rtype: :class:`~__builtins__.dict`
:attention: Grouping functions generally use a :class:`~__builtins__.set`
``groups`` as extra protection against accidentally counting a given
file twice. (Complimentary to use of :func:`os.path.realpath` in
:func:`~fastdupes.getPaths`)
.. todo:: Find some way to bring back the file-by-file status text
|
[
"Subdivide",
"groups",
"of",
"paths",
"according",
"to",
"a",
"function",
"."
] |
python
|
valid
|
koszullab/metaTOR
|
metator/scripts/hicstuff.py
|
https://github.com/koszullab/metaTOR/blob/0c1203d1dffedfa5ea380c0335b4baa9cfb7e89a/metator/scripts/hicstuff.py#L174-L205
|
def bin_sparse(M, subsampling_factor=3):
"""Perform the bin_dense procedure for sparse matrices. Remaining rows
and cols are lumped with the rest at the end.
"""
try:
from scipy.sparse import coo_matrix
except ImportError as e:
print(str(e))
print("I am peforming dense binning by default.")
return bin_dense(M.todense())
N = M.tocoo()
n, m = N.shape
row, col, data = N.row, N.col, N.data
# Divide row and column indices - duplicate coordinates are added in
# sparse matrix construction
binned_row = row // subsampling_factor
binned_col = col // subsampling_factor
binned_n = n // subsampling_factor
binned_m = m // subsampling_factor
# Attach remaining columns and rows to the last one
binned_row[binned_row >= binned_n] -= n % subsampling_factor
binned_col[binned_col >= binned_m] -= m % subsampling_factor
result = coo_matrix((data, (binned_row, binned_col)),
shape=(binned_n, binned_m))
return result
|
[
"def",
"bin_sparse",
"(",
"M",
",",
"subsampling_factor",
"=",
"3",
")",
":",
"try",
":",
"from",
"scipy",
".",
"sparse",
"import",
"coo_matrix",
"except",
"ImportError",
"as",
"e",
":",
"print",
"(",
"str",
"(",
"e",
")",
")",
"print",
"(",
"\"I am peforming dense binning by default.\"",
")",
"return",
"bin_dense",
"(",
"M",
".",
"todense",
"(",
")",
")",
"N",
"=",
"M",
".",
"tocoo",
"(",
")",
"n",
",",
"m",
"=",
"N",
".",
"shape",
"row",
",",
"col",
",",
"data",
"=",
"N",
".",
"row",
",",
"N",
".",
"col",
",",
"N",
".",
"data",
"# Divide row and column indices - duplicate coordinates are added in",
"# sparse matrix construction",
"binned_row",
"=",
"row",
"//",
"subsampling_factor",
"binned_col",
"=",
"col",
"//",
"subsampling_factor",
"binned_n",
"=",
"n",
"//",
"subsampling_factor",
"binned_m",
"=",
"m",
"//",
"subsampling_factor",
"# Attach remaining columns and rows to the last one",
"binned_row",
"[",
"binned_row",
">=",
"binned_n",
"]",
"-=",
"n",
"%",
"subsampling_factor",
"binned_col",
"[",
"binned_col",
">=",
"binned_m",
"]",
"-=",
"m",
"%",
"subsampling_factor",
"result",
"=",
"coo_matrix",
"(",
"(",
"data",
",",
"(",
"binned_row",
",",
"binned_col",
")",
")",
",",
"shape",
"=",
"(",
"binned_n",
",",
"binned_m",
")",
")",
"return",
"result"
] |
Perform the bin_dense procedure for sparse matrices. Remaining rows
and cols are lumped with the rest at the end.
|
[
"Perform",
"the",
"bin_dense",
"procedure",
"for",
"sparse",
"matrices",
".",
"Remaining",
"rows",
"and",
"cols",
"are",
"lumped",
"with",
"the",
"rest",
"at",
"the",
"end",
"."
] |
python
|
train
|
bokeh/bokeh
|
bokeh/sphinxext/bokehjs_content.py
|
https://github.com/bokeh/bokeh/blob/dc8cf49e4e4302fd38537ad089ece81fbcca4737/bokeh/sphinxext/bokehjs_content.py#L268-L277
|
def setup(app):
''' Required Sphinx extension setup function. '''
app.add_node(
bokehjs_content,
html=(
html_visit_bokehjs_content,
html_depart_bokehjs_content
)
)
app.add_directive('bokehjs-content', BokehJSContent)
|
[
"def",
"setup",
"(",
"app",
")",
":",
"app",
".",
"add_node",
"(",
"bokehjs_content",
",",
"html",
"=",
"(",
"html_visit_bokehjs_content",
",",
"html_depart_bokehjs_content",
")",
")",
"app",
".",
"add_directive",
"(",
"'bokehjs-content'",
",",
"BokehJSContent",
")"
] |
Required Sphinx extension setup function.
|
[
"Required",
"Sphinx",
"extension",
"setup",
"function",
"."
] |
python
|
train
|
xeroc/python-graphenelib
|
graphenecommon/price.py
|
https://github.com/xeroc/python-graphenelib/blob/8bb5396bc79998ee424cf3813af478304173f3a6/graphenecommon/price.py#L291-L306
|
def json(self):
"""
return {
"base": self["base"].json(),
"quote": self["quote"].json()
}
"""
quote = self["quote"]
base = self["base"]
frac = Fraction(int(quote) / int(base)).limit_denominator(
10 ** base["asset"]["precision"]
)
return {
"base": {"amount": int(frac.denominator), "asset_id": base["asset"]["id"]},
"quote": {"amount": int(frac.numerator), "asset_id": quote["asset"]["id"]},
}
|
[
"def",
"json",
"(",
"self",
")",
":",
"quote",
"=",
"self",
"[",
"\"quote\"",
"]",
"base",
"=",
"self",
"[",
"\"base\"",
"]",
"frac",
"=",
"Fraction",
"(",
"int",
"(",
"quote",
")",
"/",
"int",
"(",
"base",
")",
")",
".",
"limit_denominator",
"(",
"10",
"**",
"base",
"[",
"\"asset\"",
"]",
"[",
"\"precision\"",
"]",
")",
"return",
"{",
"\"base\"",
":",
"{",
"\"amount\"",
":",
"int",
"(",
"frac",
".",
"denominator",
")",
",",
"\"asset_id\"",
":",
"base",
"[",
"\"asset\"",
"]",
"[",
"\"id\"",
"]",
"}",
",",
"\"quote\"",
":",
"{",
"\"amount\"",
":",
"int",
"(",
"frac",
".",
"numerator",
")",
",",
"\"asset_id\"",
":",
"quote",
"[",
"\"asset\"",
"]",
"[",
"\"id\"",
"]",
"}",
",",
"}"
] |
return {
"base": self["base"].json(),
"quote": self["quote"].json()
}
|
[
"return",
"{",
"base",
":",
"self",
"[",
"base",
"]",
".",
"json",
"()",
"quote",
":",
"self",
"[",
"quote",
"]",
".",
"json",
"()",
"}"
] |
python
|
valid
|
KrishnaswamyLab/graphtools
|
graphtools/base.py
|
https://github.com/KrishnaswamyLab/graphtools/blob/44685352be7df2005d44722903092207967457f2/graphtools/base.py#L40-L66
|
def _get_param_names(cls):
"""Get parameter names for the estimator"""
# fetch the constructor or the original constructor before
# deprecation wrapping if any
init = getattr(cls.__init__, 'deprecated_original', cls.__init__)
if init is object.__init__:
# No explicit constructor to introspect
return []
# introspect the constructor arguments to find the model parameters
# to represent
init_signature = signature(init)
# Consider the constructor parameters excluding 'self'
parameters = [p for p in init_signature.parameters.values()
if p.name != 'self' and p.kind != p.VAR_KEYWORD]
# Extract and sort argument names excluding 'self'
parameters = set([p.name for p in parameters])
# recurse
for superclass in cls.__bases__:
try:
parameters.update(superclass._get_param_names())
except AttributeError:
# object and pygsp.graphs.Graph don't have this method
pass
return parameters
|
[
"def",
"_get_param_names",
"(",
"cls",
")",
":",
"# fetch the constructor or the original constructor before",
"# deprecation wrapping if any",
"init",
"=",
"getattr",
"(",
"cls",
".",
"__init__",
",",
"'deprecated_original'",
",",
"cls",
".",
"__init__",
")",
"if",
"init",
"is",
"object",
".",
"__init__",
":",
"# No explicit constructor to introspect",
"return",
"[",
"]",
"# introspect the constructor arguments to find the model parameters",
"# to represent",
"init_signature",
"=",
"signature",
"(",
"init",
")",
"# Consider the constructor parameters excluding 'self'",
"parameters",
"=",
"[",
"p",
"for",
"p",
"in",
"init_signature",
".",
"parameters",
".",
"values",
"(",
")",
"if",
"p",
".",
"name",
"!=",
"'self'",
"and",
"p",
".",
"kind",
"!=",
"p",
".",
"VAR_KEYWORD",
"]",
"# Extract and sort argument names excluding 'self'",
"parameters",
"=",
"set",
"(",
"[",
"p",
".",
"name",
"for",
"p",
"in",
"parameters",
"]",
")",
"# recurse",
"for",
"superclass",
"in",
"cls",
".",
"__bases__",
":",
"try",
":",
"parameters",
".",
"update",
"(",
"superclass",
".",
"_get_param_names",
"(",
")",
")",
"except",
"AttributeError",
":",
"# object and pygsp.graphs.Graph don't have this method",
"pass",
"return",
"parameters"
] |
Get parameter names for the estimator
|
[
"Get",
"parameter",
"names",
"for",
"the",
"estimator"
] |
python
|
train
|
knipknap/SpiffWorkflow
|
SpiffWorkflow/util/event.py
|
https://github.com/knipknap/SpiffWorkflow/blob/f0af7f59a332e0619e4f3c00a7d4a3d230760e00/SpiffWorkflow/util/event.py#L89-L120
|
def listen(self, callback, *args, **kwargs):
"""
Like :class:`connect()`, but uses a weak reference instead of a
normal reference.
The signal is automatically disconnected as soon as the handler
is garbage collected.
.. note::
Storing signal handlers as weak references means that if
your handler is a local function, it may be garbage collected. To
prevent this, use :class:`connect()` instead.
:type callback: object
:param callback: The callback function.
:type args: tuple
:param args: Optional arguments passed to the callback.
:type kwargs: dict
:param kwargs: Optional keyword arguments passed to the callback.
:rtype: :class:`Exscript.util.weakmethod.WeakMethod`
:returns: The newly created weak reference to the callback.
"""
if self.lock is None:
self.lock = Lock()
with self.lock:
if self.is_connected(callback):
raise AttributeError('callback is already connected')
if self.weak_subscribers is None:
self.weak_subscribers = []
ref = weakmethod.ref(callback, self._try_disconnect)
self.weak_subscribers.append((ref, args, kwargs))
return ref
|
[
"def",
"listen",
"(",
"self",
",",
"callback",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"lock",
"is",
"None",
":",
"self",
".",
"lock",
"=",
"Lock",
"(",
")",
"with",
"self",
".",
"lock",
":",
"if",
"self",
".",
"is_connected",
"(",
"callback",
")",
":",
"raise",
"AttributeError",
"(",
"'callback is already connected'",
")",
"if",
"self",
".",
"weak_subscribers",
"is",
"None",
":",
"self",
".",
"weak_subscribers",
"=",
"[",
"]",
"ref",
"=",
"weakmethod",
".",
"ref",
"(",
"callback",
",",
"self",
".",
"_try_disconnect",
")",
"self",
".",
"weak_subscribers",
".",
"append",
"(",
"(",
"ref",
",",
"args",
",",
"kwargs",
")",
")",
"return",
"ref"
] |
Like :class:`connect()`, but uses a weak reference instead of a
normal reference.
The signal is automatically disconnected as soon as the handler
is garbage collected.
.. note::
Storing signal handlers as weak references means that if
your handler is a local function, it may be garbage collected. To
prevent this, use :class:`connect()` instead.
:type callback: object
:param callback: The callback function.
:type args: tuple
:param args: Optional arguments passed to the callback.
:type kwargs: dict
:param kwargs: Optional keyword arguments passed to the callback.
:rtype: :class:`Exscript.util.weakmethod.WeakMethod`
:returns: The newly created weak reference to the callback.
|
[
"Like",
":",
"class",
":",
"connect",
"()",
"but",
"uses",
"a",
"weak",
"reference",
"instead",
"of",
"a",
"normal",
"reference",
".",
"The",
"signal",
"is",
"automatically",
"disconnected",
"as",
"soon",
"as",
"the",
"handler",
"is",
"garbage",
"collected",
"."
] |
python
|
valid
|
SUSE-Enceladus/ipa
|
ipa/ipa_cloud.py
|
https://github.com/SUSE-Enceladus/ipa/blob/0845eed0ea25a27dbb059ad1016105fa60002228/ipa/ipa_cloud.py#L351-L360
|
def _set_distro(self):
"""Determine distro for image and create instance of class."""
if self.distro_name == 'sles':
self.distro = SLES()
elif self.distro_name == 'opensuse_leap':
self.distro = openSUSE_Leap()
else:
raise IpaCloudException(
'Distribution: %s, not supported.' % self.distro_name
)
|
[
"def",
"_set_distro",
"(",
"self",
")",
":",
"if",
"self",
".",
"distro_name",
"==",
"'sles'",
":",
"self",
".",
"distro",
"=",
"SLES",
"(",
")",
"elif",
"self",
".",
"distro_name",
"==",
"'opensuse_leap'",
":",
"self",
".",
"distro",
"=",
"openSUSE_Leap",
"(",
")",
"else",
":",
"raise",
"IpaCloudException",
"(",
"'Distribution: %s, not supported.'",
"%",
"self",
".",
"distro_name",
")"
] |
Determine distro for image and create instance of class.
|
[
"Determine",
"distro",
"for",
"image",
"and",
"create",
"instance",
"of",
"class",
"."
] |
python
|
train
|
Yubico/python-yubico
|
yubico/yubikey_config.py
|
https://github.com/Yubico/python-yubico/blob/a72e8eddb90da6ee96e29f60912ca1f2872c9aea/yubico/yubikey_config.py#L408-L429
|
def extended_flag(self, which, new=None):
"""
Get or set a extended flag.
'which' can be either a string ('SERIAL_API_VISIBLE' etc.), or an integer.
You should ALWAYS use a string, unless you really know what you are doing.
"""
flag = _get_flag(which, ExtendedFlags)
if flag:
if not self.capabilities.have_extended_flag(flag):
raise yubikey_base.YubiKeyVersionError('Extended flag %s requires %s, and this is %s %d.%d'
% (which, flag.req_string(self.capabilities.model), \
self.capabilities.model, self.ykver[0], self.ykver[1]))
req_major, req_minor = flag.req_version()
self._require_version(major=req_major, minor=req_minor)
value = flag.to_integer()
else:
if type(which) is not int:
raise yubico_exception.InputError('Unknown non-integer ExtendedFlag (%s)' % which)
value = which
return self.extended_flags.get_set(value, new)
|
[
"def",
"extended_flag",
"(",
"self",
",",
"which",
",",
"new",
"=",
"None",
")",
":",
"flag",
"=",
"_get_flag",
"(",
"which",
",",
"ExtendedFlags",
")",
"if",
"flag",
":",
"if",
"not",
"self",
".",
"capabilities",
".",
"have_extended_flag",
"(",
"flag",
")",
":",
"raise",
"yubikey_base",
".",
"YubiKeyVersionError",
"(",
"'Extended flag %s requires %s, and this is %s %d.%d'",
"%",
"(",
"which",
",",
"flag",
".",
"req_string",
"(",
"self",
".",
"capabilities",
".",
"model",
")",
",",
"self",
".",
"capabilities",
".",
"model",
",",
"self",
".",
"ykver",
"[",
"0",
"]",
",",
"self",
".",
"ykver",
"[",
"1",
"]",
")",
")",
"req_major",
",",
"req_minor",
"=",
"flag",
".",
"req_version",
"(",
")",
"self",
".",
"_require_version",
"(",
"major",
"=",
"req_major",
",",
"minor",
"=",
"req_minor",
")",
"value",
"=",
"flag",
".",
"to_integer",
"(",
")",
"else",
":",
"if",
"type",
"(",
"which",
")",
"is",
"not",
"int",
":",
"raise",
"yubico_exception",
".",
"InputError",
"(",
"'Unknown non-integer ExtendedFlag (%s)'",
"%",
"which",
")",
"value",
"=",
"which",
"return",
"self",
".",
"extended_flags",
".",
"get_set",
"(",
"value",
",",
"new",
")"
] |
Get or set a extended flag.
'which' can be either a string ('SERIAL_API_VISIBLE' etc.), or an integer.
You should ALWAYS use a string, unless you really know what you are doing.
|
[
"Get",
"or",
"set",
"a",
"extended",
"flag",
"."
] |
python
|
train
|
saltstack/salt
|
salt/modules/x509.py
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/x509.py#L401-L407
|
def _get_pubkey_hash(cert):
'''
Returns the sha1 hash of the modulus of a public key in a cert
Used for generating subject key identifiers
'''
sha_hash = hashlib.sha1(cert.get_pubkey().get_modulus()).hexdigest()
return _pretty_hex(sha_hash)
|
[
"def",
"_get_pubkey_hash",
"(",
"cert",
")",
":",
"sha_hash",
"=",
"hashlib",
".",
"sha1",
"(",
"cert",
".",
"get_pubkey",
"(",
")",
".",
"get_modulus",
"(",
")",
")",
".",
"hexdigest",
"(",
")",
"return",
"_pretty_hex",
"(",
"sha_hash",
")"
] |
Returns the sha1 hash of the modulus of a public key in a cert
Used for generating subject key identifiers
|
[
"Returns",
"the",
"sha1",
"hash",
"of",
"the",
"modulus",
"of",
"a",
"public",
"key",
"in",
"a",
"cert",
"Used",
"for",
"generating",
"subject",
"key",
"identifiers"
] |
python
|
train
|
dhhagan/py-opc
|
opc/__init__.py
|
https://github.com/dhhagan/py-opc/blob/2c8f19530fb64bf5fd4ee0d694a47850161ed8a7/opc/__init__.py#L951-L959
|
def off(self):
"""Turn OFF the OPC (fan and laser)
:returns: boolean success state
"""
b1 = self.cnxn.xfer([0x03])[0] # send the command byte
sleep(9e-3) # sleep for 9 ms
return True if b1 == 0xF3 else False
|
[
"def",
"off",
"(",
"self",
")",
":",
"b1",
"=",
"self",
".",
"cnxn",
".",
"xfer",
"(",
"[",
"0x03",
"]",
")",
"[",
"0",
"]",
"# send the command byte",
"sleep",
"(",
"9e-3",
")",
"# sleep for 9 ms",
"return",
"True",
"if",
"b1",
"==",
"0xF3",
"else",
"False"
] |
Turn OFF the OPC (fan and laser)
:returns: boolean success state
|
[
"Turn",
"OFF",
"the",
"OPC",
"(",
"fan",
"and",
"laser",
")"
] |
python
|
valid
|
saltstack/salt
|
salt/utils/pkg/rpm.py
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/utils/pkg/rpm.py#L98-L123
|
def parse_pkginfo(line, osarch=None):
'''
A small helper to parse an rpm/repoquery command's output. Returns a
pkginfo namedtuple.
'''
try:
name, epoch, version, release, arch, repoid, install_time = line.split('_|-')
# Handle unpack errors (should never happen with the queryformat we are
# using, but can't hurt to be careful).
except ValueError:
return None
name = resolve_name(name, arch, osarch)
if release:
version += '-{0}'.format(release)
if epoch not in ('(none)', '0'):
version = ':'.join((epoch, version))
if install_time not in ('(none)', '0'):
install_date = datetime.datetime.utcfromtimestamp(int(install_time)).isoformat() + "Z"
install_date_time_t = int(install_time)
else:
install_date = None
install_date_time_t = None
return pkginfo(name, version, arch, repoid, install_date, install_date_time_t)
|
[
"def",
"parse_pkginfo",
"(",
"line",
",",
"osarch",
"=",
"None",
")",
":",
"try",
":",
"name",
",",
"epoch",
",",
"version",
",",
"release",
",",
"arch",
",",
"repoid",
",",
"install_time",
"=",
"line",
".",
"split",
"(",
"'_|-'",
")",
"# Handle unpack errors (should never happen with the queryformat we are",
"# using, but can't hurt to be careful).",
"except",
"ValueError",
":",
"return",
"None",
"name",
"=",
"resolve_name",
"(",
"name",
",",
"arch",
",",
"osarch",
")",
"if",
"release",
":",
"version",
"+=",
"'-{0}'",
".",
"format",
"(",
"release",
")",
"if",
"epoch",
"not",
"in",
"(",
"'(none)'",
",",
"'0'",
")",
":",
"version",
"=",
"':'",
".",
"join",
"(",
"(",
"epoch",
",",
"version",
")",
")",
"if",
"install_time",
"not",
"in",
"(",
"'(none)'",
",",
"'0'",
")",
":",
"install_date",
"=",
"datetime",
".",
"datetime",
".",
"utcfromtimestamp",
"(",
"int",
"(",
"install_time",
")",
")",
".",
"isoformat",
"(",
")",
"+",
"\"Z\"",
"install_date_time_t",
"=",
"int",
"(",
"install_time",
")",
"else",
":",
"install_date",
"=",
"None",
"install_date_time_t",
"=",
"None",
"return",
"pkginfo",
"(",
"name",
",",
"version",
",",
"arch",
",",
"repoid",
",",
"install_date",
",",
"install_date_time_t",
")"
] |
A small helper to parse an rpm/repoquery command's output. Returns a
pkginfo namedtuple.
|
[
"A",
"small",
"helper",
"to",
"parse",
"an",
"rpm",
"/",
"repoquery",
"command",
"s",
"output",
".",
"Returns",
"a",
"pkginfo",
"namedtuple",
"."
] |
python
|
train
|
has2k1/plotnine
|
plotnine/facets/layout.py
|
https://github.com/has2k1/plotnine/blob/566e579af705367e584fb27a74e6c5199624ca89/plotnine/facets/layout.py#L205-L223
|
def xlabel(self, labels):
"""
Determine x-axis label
Parameters
----------
labels : dict
Labels as specified by the user through the ``labs`` or
``xlab`` calls.
Returns
-------
out : str
x-axis label
"""
if self.panel_scales_x[0].name is not None:
return self.panel_scales_x[0].name
else:
return labels.get('x', '')
|
[
"def",
"xlabel",
"(",
"self",
",",
"labels",
")",
":",
"if",
"self",
".",
"panel_scales_x",
"[",
"0",
"]",
".",
"name",
"is",
"not",
"None",
":",
"return",
"self",
".",
"panel_scales_x",
"[",
"0",
"]",
".",
"name",
"else",
":",
"return",
"labels",
".",
"get",
"(",
"'x'",
",",
"''",
")"
] |
Determine x-axis label
Parameters
----------
labels : dict
Labels as specified by the user through the ``labs`` or
``xlab`` calls.
Returns
-------
out : str
x-axis label
|
[
"Determine",
"x",
"-",
"axis",
"label"
] |
python
|
train
|
openego/eDisGo
|
edisgo/grid/components.py
|
https://github.com/openego/eDisGo/blob/e6245bdaf236f9c49dbda5a18c1c458290f41e2b/edisgo/grid/components.py#L344-L368
|
def power_factor(self):
"""
Power factor of load
Parameters
-----------
power_factor : :obj:`float`
Ratio of real power to apparent power.
Returns
--------
:obj:`float`
Ratio of real power to apparent power. If power factor is not set
it is retrieved from the network config object depending on the
grid level the load is in.
"""
if self._power_factor is None:
if isinstance(self.grid, MVGrid):
self._power_factor = self.grid.network.config[
'reactive_power_factor']['mv_load']
elif isinstance(self.grid, LVGrid):
self._power_factor = self.grid.network.config[
'reactive_power_factor']['lv_load']
return self._power_factor
|
[
"def",
"power_factor",
"(",
"self",
")",
":",
"if",
"self",
".",
"_power_factor",
"is",
"None",
":",
"if",
"isinstance",
"(",
"self",
".",
"grid",
",",
"MVGrid",
")",
":",
"self",
".",
"_power_factor",
"=",
"self",
".",
"grid",
".",
"network",
".",
"config",
"[",
"'reactive_power_factor'",
"]",
"[",
"'mv_load'",
"]",
"elif",
"isinstance",
"(",
"self",
".",
"grid",
",",
"LVGrid",
")",
":",
"self",
".",
"_power_factor",
"=",
"self",
".",
"grid",
".",
"network",
".",
"config",
"[",
"'reactive_power_factor'",
"]",
"[",
"'lv_load'",
"]",
"return",
"self",
".",
"_power_factor"
] |
Power factor of load
Parameters
-----------
power_factor : :obj:`float`
Ratio of real power to apparent power.
Returns
--------
:obj:`float`
Ratio of real power to apparent power. If power factor is not set
it is retrieved from the network config object depending on the
grid level the load is in.
|
[
"Power",
"factor",
"of",
"load"
] |
python
|
train
|
numba/llvmlite
|
llvmlite/binding/value.py
|
https://github.com/numba/llvmlite/blob/fcadf8af11947f3fd041c5d6526c5bf231564883/llvmlite/binding/value.py#L282-L293
|
def operands(self):
"""
Return an iterator over this instruction's operands.
The iterator will yield a ValueRef for each operand.
"""
if not self.is_instruction:
raise ValueError('expected instruction value, got %s'
% (self._kind,))
it = ffi.lib.LLVMPY_InstructionOperandsIter(self)
parents = self._parents.copy()
parents.update(instruction=self)
return _OperandsIterator(it, parents)
|
[
"def",
"operands",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_instruction",
":",
"raise",
"ValueError",
"(",
"'expected instruction value, got %s'",
"%",
"(",
"self",
".",
"_kind",
",",
")",
")",
"it",
"=",
"ffi",
".",
"lib",
".",
"LLVMPY_InstructionOperandsIter",
"(",
"self",
")",
"parents",
"=",
"self",
".",
"_parents",
".",
"copy",
"(",
")",
"parents",
".",
"update",
"(",
"instruction",
"=",
"self",
")",
"return",
"_OperandsIterator",
"(",
"it",
",",
"parents",
")"
] |
Return an iterator over this instruction's operands.
The iterator will yield a ValueRef for each operand.
|
[
"Return",
"an",
"iterator",
"over",
"this",
"instruction",
"s",
"operands",
".",
"The",
"iterator",
"will",
"yield",
"a",
"ValueRef",
"for",
"each",
"operand",
"."
] |
python
|
train
|
klahnakoski/pyLibrary
|
mo_dots/__init__.py
|
https://github.com/klahnakoski/pyLibrary/blob/fa2dcbc48fda8d26999baef400e9a98149e0b982/mo_dots/__init__.py#L307-L319
|
def set_attr(obj, path, value):
"""
SAME AS object.__setattr__(), BUT USES DOT-DELIMITED path
RETURN OLD VALUE
"""
try:
return _set_attr(obj, split_field(path), value)
except Exception as e:
Log = get_logger()
if PATH_NOT_FOUND in e:
Log.warning(PATH_NOT_FOUND + ": {{path}}", path=path, cause=e)
else:
Log.error("Problem setting value", cause=e)
|
[
"def",
"set_attr",
"(",
"obj",
",",
"path",
",",
"value",
")",
":",
"try",
":",
"return",
"_set_attr",
"(",
"obj",
",",
"split_field",
"(",
"path",
")",
",",
"value",
")",
"except",
"Exception",
"as",
"e",
":",
"Log",
"=",
"get_logger",
"(",
")",
"if",
"PATH_NOT_FOUND",
"in",
"e",
":",
"Log",
".",
"warning",
"(",
"PATH_NOT_FOUND",
"+",
"\": {{path}}\"",
",",
"path",
"=",
"path",
",",
"cause",
"=",
"e",
")",
"else",
":",
"Log",
".",
"error",
"(",
"\"Problem setting value\"",
",",
"cause",
"=",
"e",
")"
] |
SAME AS object.__setattr__(), BUT USES DOT-DELIMITED path
RETURN OLD VALUE
|
[
"SAME",
"AS",
"object",
".",
"__setattr__",
"()",
"BUT",
"USES",
"DOT",
"-",
"DELIMITED",
"path",
"RETURN",
"OLD",
"VALUE"
] |
python
|
train
|
tanghaibao/goatools
|
goatools/rpt/rpt_lev_depth.py
|
https://github.com/tanghaibao/goatools/blob/407682e573a108864a79031f8ca19ee3bf377626/goatools/rpt/rpt_lev_depth.py#L149-L156
|
def get_cnts_levels_depths_recs(recs):
"""Collect counts of levels and depths in a Group of GO Terms."""
cnts = cx.defaultdict(lambda: cx.defaultdict(cx.Counter))
for rec in recs:
if rec is not None and not rec.is_obsolete:
cnts['level'][rec.level][rec.namespace] += 1
cnts['depth'][rec.depth][rec.namespace] += 1
return cnts
|
[
"def",
"get_cnts_levels_depths_recs",
"(",
"recs",
")",
":",
"cnts",
"=",
"cx",
".",
"defaultdict",
"(",
"lambda",
":",
"cx",
".",
"defaultdict",
"(",
"cx",
".",
"Counter",
")",
")",
"for",
"rec",
"in",
"recs",
":",
"if",
"rec",
"is",
"not",
"None",
"and",
"not",
"rec",
".",
"is_obsolete",
":",
"cnts",
"[",
"'level'",
"]",
"[",
"rec",
".",
"level",
"]",
"[",
"rec",
".",
"namespace",
"]",
"+=",
"1",
"cnts",
"[",
"'depth'",
"]",
"[",
"rec",
".",
"depth",
"]",
"[",
"rec",
".",
"namespace",
"]",
"+=",
"1",
"return",
"cnts"
] |
Collect counts of levels and depths in a Group of GO Terms.
|
[
"Collect",
"counts",
"of",
"levels",
"and",
"depths",
"in",
"a",
"Group",
"of",
"GO",
"Terms",
"."
] |
python
|
train
|
saulpw/visidata
|
visidata/cmdlog.py
|
https://github.com/saulpw/visidata/blob/32771e0cea6c24fc7902683d14558391395c591f/visidata/cmdlog.py#L171-L218
|
def moveToReplayContext(self, r):
'set the sheet/row/col to the values in the replay row. return sheet'
if not r.sheet:
# assert not r.col and not r.row
return self # any old sheet should do, row/column don't matter
try:
sheetidx = int(r.sheet)
vs = vd().sheets[sheetidx]
except ValueError:
vs = vd().getSheet(r.sheet) or error('no sheet named %s' % r.sheet)
if r.row:
try:
rowidx = int(r.row)
except ValueError:
rowidx = indexMatch(vs.rows, lambda r,vs=vs,k=r.row: keystr(vs.rowkey(r)) == k)
if rowidx is None:
error('no "%s" row' % r.row)
if options.replay_movement:
while vs.cursorRowIndex != rowidx:
vs.cursorRowIndex += 1 if (rowidx - vs.cursorRowIndex) > 0 else -1
while not self.delay(0.5):
pass
else:
vs.cursorRowIndex = rowidx
if r.col:
try:
vcolidx = int(r.col)
except ValueError:
vcolidx = indexMatch(vs.visibleCols, lambda c,name=r.col: name == c.name)
if vcolidx is None:
error('no "%s" column' % r.col)
if options.replay_movement:
while vs.cursorVisibleColIndex != vcolidx:
vs.cursorVisibleColIndex += 1 if (vcolidx - vs.cursorVisibleColIndex) > 0 else -1
while not self.delay(0.5):
pass
assert vs.cursorVisibleColIndex == vcolidx
else:
vs.cursorVisibleColIndex = vcolidx
return vs
|
[
"def",
"moveToReplayContext",
"(",
"self",
",",
"r",
")",
":",
"if",
"not",
"r",
".",
"sheet",
":",
"# assert not r.col and not r.row",
"return",
"self",
"# any old sheet should do, row/column don't matter",
"try",
":",
"sheetidx",
"=",
"int",
"(",
"r",
".",
"sheet",
")",
"vs",
"=",
"vd",
"(",
")",
".",
"sheets",
"[",
"sheetidx",
"]",
"except",
"ValueError",
":",
"vs",
"=",
"vd",
"(",
")",
".",
"getSheet",
"(",
"r",
".",
"sheet",
")",
"or",
"error",
"(",
"'no sheet named %s'",
"%",
"r",
".",
"sheet",
")",
"if",
"r",
".",
"row",
":",
"try",
":",
"rowidx",
"=",
"int",
"(",
"r",
".",
"row",
")",
"except",
"ValueError",
":",
"rowidx",
"=",
"indexMatch",
"(",
"vs",
".",
"rows",
",",
"lambda",
"r",
",",
"vs",
"=",
"vs",
",",
"k",
"=",
"r",
".",
"row",
":",
"keystr",
"(",
"vs",
".",
"rowkey",
"(",
"r",
")",
")",
"==",
"k",
")",
"if",
"rowidx",
"is",
"None",
":",
"error",
"(",
"'no \"%s\" row'",
"%",
"r",
".",
"row",
")",
"if",
"options",
".",
"replay_movement",
":",
"while",
"vs",
".",
"cursorRowIndex",
"!=",
"rowidx",
":",
"vs",
".",
"cursorRowIndex",
"+=",
"1",
"if",
"(",
"rowidx",
"-",
"vs",
".",
"cursorRowIndex",
")",
">",
"0",
"else",
"-",
"1",
"while",
"not",
"self",
".",
"delay",
"(",
"0.5",
")",
":",
"pass",
"else",
":",
"vs",
".",
"cursorRowIndex",
"=",
"rowidx",
"if",
"r",
".",
"col",
":",
"try",
":",
"vcolidx",
"=",
"int",
"(",
"r",
".",
"col",
")",
"except",
"ValueError",
":",
"vcolidx",
"=",
"indexMatch",
"(",
"vs",
".",
"visibleCols",
",",
"lambda",
"c",
",",
"name",
"=",
"r",
".",
"col",
":",
"name",
"==",
"c",
".",
"name",
")",
"if",
"vcolidx",
"is",
"None",
":",
"error",
"(",
"'no \"%s\" column'",
"%",
"r",
".",
"col",
")",
"if",
"options",
".",
"replay_movement",
":",
"while",
"vs",
".",
"cursorVisibleColIndex",
"!=",
"vcolidx",
":",
"vs",
".",
"cursorVisibleColIndex",
"+=",
"1",
"if",
"(",
"vcolidx",
"-",
"vs",
".",
"cursorVisibleColIndex",
")",
">",
"0",
"else",
"-",
"1",
"while",
"not",
"self",
".",
"delay",
"(",
"0.5",
")",
":",
"pass",
"assert",
"vs",
".",
"cursorVisibleColIndex",
"==",
"vcolidx",
"else",
":",
"vs",
".",
"cursorVisibleColIndex",
"=",
"vcolidx",
"return",
"vs"
] |
set the sheet/row/col to the values in the replay row. return sheet
|
[
"set",
"the",
"sheet",
"/",
"row",
"/",
"col",
"to",
"the",
"values",
"in",
"the",
"replay",
"row",
".",
"return",
"sheet"
] |
python
|
train
|
merry-bits/DBQuery
|
src/dbquery/query.py
|
https://github.com/merry-bits/DBQuery/blob/5f46dc94e2721129f8a799b5f613373e6cd9cb73/src/dbquery/query.py#L113-L125
|
def show(self, *args, **kwds):
""" Show how the SQL looks like when executed by the DB.
This might not be supported by all connection types.
For example: PostgreSQL does support it, SQLite does not.
:rtype: str
"""
# Same as in __call__, arguments win over keywords
arg = args
if not arg:
arg = kwds # pylint: disable=redefined-variable-type
return self._db.show(self._sql, arg)
|
[
"def",
"show",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwds",
")",
":",
"# Same as in __call__, arguments win over keywords",
"arg",
"=",
"args",
"if",
"not",
"arg",
":",
"arg",
"=",
"kwds",
"# pylint: disable=redefined-variable-type",
"return",
"self",
".",
"_db",
".",
"show",
"(",
"self",
".",
"_sql",
",",
"arg",
")"
] |
Show how the SQL looks like when executed by the DB.
This might not be supported by all connection types.
For example: PostgreSQL does support it, SQLite does not.
:rtype: str
|
[
"Show",
"how",
"the",
"SQL",
"looks",
"like",
"when",
"executed",
"by",
"the",
"DB",
"."
] |
python
|
train
|
ARMmbed/yotta
|
yotta/lib/component.py
|
https://github.com/ARMmbed/yotta/blob/56bc1e56c602fa20307b23fe27518e9cd6c11af1/yotta/lib/component.py#L250-L277
|
def getDependencies(self,
available_components = None,
search_dirs = None,
target = None,
available_only = False,
test = False,
warnings = True
):
''' Returns {component_name:component}
'''
if search_dirs is None:
search_dirs = [self.modulesPath()]
available_components = self.ensureOrderedDict(available_components)
components, errors = self.__getDependenciesWithProvider(
available_components = available_components,
search_dirs = search_dirs,
target = target,
update_installed = False,
provider = self.provideInstalled,
test = test
)
if warnings:
for error in errors:
logger.warning(error)
if available_only:
components = OrderedDict((k, v) for k, v in components.items() if v)
return components
|
[
"def",
"getDependencies",
"(",
"self",
",",
"available_components",
"=",
"None",
",",
"search_dirs",
"=",
"None",
",",
"target",
"=",
"None",
",",
"available_only",
"=",
"False",
",",
"test",
"=",
"False",
",",
"warnings",
"=",
"True",
")",
":",
"if",
"search_dirs",
"is",
"None",
":",
"search_dirs",
"=",
"[",
"self",
".",
"modulesPath",
"(",
")",
"]",
"available_components",
"=",
"self",
".",
"ensureOrderedDict",
"(",
"available_components",
")",
"components",
",",
"errors",
"=",
"self",
".",
"__getDependenciesWithProvider",
"(",
"available_components",
"=",
"available_components",
",",
"search_dirs",
"=",
"search_dirs",
",",
"target",
"=",
"target",
",",
"update_installed",
"=",
"False",
",",
"provider",
"=",
"self",
".",
"provideInstalled",
",",
"test",
"=",
"test",
")",
"if",
"warnings",
":",
"for",
"error",
"in",
"errors",
":",
"logger",
".",
"warning",
"(",
"error",
")",
"if",
"available_only",
":",
"components",
"=",
"OrderedDict",
"(",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"components",
".",
"items",
"(",
")",
"if",
"v",
")",
"return",
"components"
] |
Returns {component_name:component}
|
[
"Returns",
"{",
"component_name",
":",
"component",
"}"
] |
python
|
valid
|
treycucco/bidon
|
bidon/field_mapping.py
|
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/field_mapping.py#L19-L40
|
def get_value(self, source):
"""Apply self.convert to the source. The parameter passed to convert depends on
self.source_name. If source_name is given, self.convert(getattr(source, source_name)) is called,
otherwise self.convert(source) is called.
"""
if self.source_name is None:
present, value = True, self.convert(source)
converted = True
else:
present, value = has_value(source, self.source_name)
converted = False
if not present or value is None:
if self.is_required:
raise ValueError("required value not present")
else:
return None
else:
if converted:
return value
else:
return self.convert(value)
|
[
"def",
"get_value",
"(",
"self",
",",
"source",
")",
":",
"if",
"self",
".",
"source_name",
"is",
"None",
":",
"present",
",",
"value",
"=",
"True",
",",
"self",
".",
"convert",
"(",
"source",
")",
"converted",
"=",
"True",
"else",
":",
"present",
",",
"value",
"=",
"has_value",
"(",
"source",
",",
"self",
".",
"source_name",
")",
"converted",
"=",
"False",
"if",
"not",
"present",
"or",
"value",
"is",
"None",
":",
"if",
"self",
".",
"is_required",
":",
"raise",
"ValueError",
"(",
"\"required value not present\"",
")",
"else",
":",
"return",
"None",
"else",
":",
"if",
"converted",
":",
"return",
"value",
"else",
":",
"return",
"self",
".",
"convert",
"(",
"value",
")"
] |
Apply self.convert to the source. The parameter passed to convert depends on
self.source_name. If source_name is given, self.convert(getattr(source, source_name)) is called,
otherwise self.convert(source) is called.
|
[
"Apply",
"self",
".",
"convert",
"to",
"the",
"source",
".",
"The",
"parameter",
"passed",
"to",
"convert",
"depends",
"on",
"self",
".",
"source_name",
".",
"If",
"source_name",
"is",
"given",
"self",
".",
"convert",
"(",
"getattr",
"(",
"source",
"source_name",
"))",
"is",
"called",
"otherwise",
"self",
".",
"convert",
"(",
"source",
")",
"is",
"called",
"."
] |
python
|
train
|
swisscom/cleanerversion
|
versions/admin.py
|
https://github.com/swisscom/cleanerversion/blob/becadbab5d7b474a0e9a596b99e97682402d2f2c/versions/admin.py#L261-L295
|
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = get_object_or_404(self.get_queryset(request),
pk=unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(str(obj.identity)),
# this is the change for our override;
content_type=get_content_type_for_model(model)
).select_related().order_by('action_time')
ctx = self.admin_site.each_context(request)
context = dict(ctx,
title=('Change history: %s') % force_text(obj),
action_list=action_list,
module_name=capfirst(
force_text(opts.verbose_name_plural)),
object=obj,
opts=opts,
preserved_filters=self.get_preserved_filters(request),
)
context.update(extra_context or {})
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.model_name),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context)
|
[
"def",
"history_view",
"(",
"self",
",",
"request",
",",
"object_id",
",",
"extra_context",
"=",
"None",
")",
":",
"from",
"django",
".",
"contrib",
".",
"admin",
".",
"models",
"import",
"LogEntry",
"# First check if the user can see this history.",
"model",
"=",
"self",
".",
"model",
"obj",
"=",
"get_object_or_404",
"(",
"self",
".",
"get_queryset",
"(",
"request",
")",
",",
"pk",
"=",
"unquote",
"(",
"object_id",
")",
")",
"if",
"not",
"self",
".",
"has_change_permission",
"(",
"request",
",",
"obj",
")",
":",
"raise",
"PermissionDenied",
"# Then get the history for this object.",
"opts",
"=",
"model",
".",
"_meta",
"app_label",
"=",
"opts",
".",
"app_label",
"action_list",
"=",
"LogEntry",
".",
"objects",
".",
"filter",
"(",
"object_id",
"=",
"unquote",
"(",
"str",
"(",
"obj",
".",
"identity",
")",
")",
",",
"# this is the change for our override;",
"content_type",
"=",
"get_content_type_for_model",
"(",
"model",
")",
")",
".",
"select_related",
"(",
")",
".",
"order_by",
"(",
"'action_time'",
")",
"ctx",
"=",
"self",
".",
"admin_site",
".",
"each_context",
"(",
"request",
")",
"context",
"=",
"dict",
"(",
"ctx",
",",
"title",
"=",
"(",
"'Change history: %s'",
")",
"%",
"force_text",
"(",
"obj",
")",
",",
"action_list",
"=",
"action_list",
",",
"module_name",
"=",
"capfirst",
"(",
"force_text",
"(",
"opts",
".",
"verbose_name_plural",
")",
")",
",",
"object",
"=",
"obj",
",",
"opts",
"=",
"opts",
",",
"preserved_filters",
"=",
"self",
".",
"get_preserved_filters",
"(",
"request",
")",
",",
")",
"context",
".",
"update",
"(",
"extra_context",
"or",
"{",
"}",
")",
"return",
"TemplateResponse",
"(",
"request",
",",
"self",
".",
"object_history_template",
"or",
"[",
"\"admin/%s/%s/object_history.html\"",
"%",
"(",
"app_label",
",",
"opts",
".",
"model_name",
")",
",",
"\"admin/%s/object_history.html\"",
"%",
"app_label",
",",
"\"admin/object_history.html\"",
"]",
",",
"context",
")"
] |
The 'history' admin view for this model.
|
[
"The",
"history",
"admin",
"view",
"for",
"this",
"model",
"."
] |
python
|
train
|
SBRG/ssbio
|
ssbio/protein/structure/properties/fatcat.py
|
https://github.com/SBRG/ssbio/blob/e9449e64ffc1a1f5ad07e5849aa12a650095f8a2/ssbio/protein/structure/properties/fatcat.py#L14-L43
|
def run_fatcat(structure_path_1, structure_path_2, fatcat_sh, outdir='', silent=False, print_cmd=False, force_rerun=False):
"""Run FATCAT on two PDB files, and return the path of the XML result file.
Args:
structure_path_1 (str): Path to PDB file
structure_path_2 (str): Path to PDB file
fatcat_sh (str): Path to "runFATCAT.sh" executable script
outdir (str): Path to where FATCAT XML output files will be saved
silent (bool): If stdout should be silenced from showing up in Python console output
print_cmd (bool): If command to run FATCAT should be printed to stdout
force_rerun (bool): If FATCAT should be run even if XML output files already exist
Returns:
str: Path to XML output file
"""
filename1 = op.splitext(op.basename(structure_path_1))[0]
filename2 = op.splitext(op.basename(structure_path_2))[0]
if not op.exists(outdir):
os.mkdir(outdir)
outfile = op.join(outdir, filename1 + '__' + filename2 + '.xml')
# Run FATCAT on the structures, print the XML of the result to stdout
fatcat_cmd = '{} -file1 {} -file2 {} -outFile {}'.format(fatcat_sh, structure_path_1, structure_path_2, outfile)
if print_cmd:
print(fatcat_cmd)
ssbio.utils.command_runner(fatcat_cmd, force_rerun_flag=force_rerun, outfile_checker=outfile, silent=silent)
return outfile
|
[
"def",
"run_fatcat",
"(",
"structure_path_1",
",",
"structure_path_2",
",",
"fatcat_sh",
",",
"outdir",
"=",
"''",
",",
"silent",
"=",
"False",
",",
"print_cmd",
"=",
"False",
",",
"force_rerun",
"=",
"False",
")",
":",
"filename1",
"=",
"op",
".",
"splitext",
"(",
"op",
".",
"basename",
"(",
"structure_path_1",
")",
")",
"[",
"0",
"]",
"filename2",
"=",
"op",
".",
"splitext",
"(",
"op",
".",
"basename",
"(",
"structure_path_2",
")",
")",
"[",
"0",
"]",
"if",
"not",
"op",
".",
"exists",
"(",
"outdir",
")",
":",
"os",
".",
"mkdir",
"(",
"outdir",
")",
"outfile",
"=",
"op",
".",
"join",
"(",
"outdir",
",",
"filename1",
"+",
"'__'",
"+",
"filename2",
"+",
"'.xml'",
")",
"# Run FATCAT on the structures, print the XML of the result to stdout",
"fatcat_cmd",
"=",
"'{} -file1 {} -file2 {} -outFile {}'",
".",
"format",
"(",
"fatcat_sh",
",",
"structure_path_1",
",",
"structure_path_2",
",",
"outfile",
")",
"if",
"print_cmd",
":",
"print",
"(",
"fatcat_cmd",
")",
"ssbio",
".",
"utils",
".",
"command_runner",
"(",
"fatcat_cmd",
",",
"force_rerun_flag",
"=",
"force_rerun",
",",
"outfile_checker",
"=",
"outfile",
",",
"silent",
"=",
"silent",
")",
"return",
"outfile"
] |
Run FATCAT on two PDB files, and return the path of the XML result file.
Args:
structure_path_1 (str): Path to PDB file
structure_path_2 (str): Path to PDB file
fatcat_sh (str): Path to "runFATCAT.sh" executable script
outdir (str): Path to where FATCAT XML output files will be saved
silent (bool): If stdout should be silenced from showing up in Python console output
print_cmd (bool): If command to run FATCAT should be printed to stdout
force_rerun (bool): If FATCAT should be run even if XML output files already exist
Returns:
str: Path to XML output file
|
[
"Run",
"FATCAT",
"on",
"two",
"PDB",
"files",
"and",
"return",
"the",
"path",
"of",
"the",
"XML",
"result",
"file",
".",
"Args",
":",
"structure_path_1",
"(",
"str",
")",
":",
"Path",
"to",
"PDB",
"file",
"structure_path_2",
"(",
"str",
")",
":",
"Path",
"to",
"PDB",
"file",
"fatcat_sh",
"(",
"str",
")",
":",
"Path",
"to",
"runFATCAT",
".",
"sh",
"executable",
"script",
"outdir",
"(",
"str",
")",
":",
"Path",
"to",
"where",
"FATCAT",
"XML",
"output",
"files",
"will",
"be",
"saved",
"silent",
"(",
"bool",
")",
":",
"If",
"stdout",
"should",
"be",
"silenced",
"from",
"showing",
"up",
"in",
"Python",
"console",
"output",
"print_cmd",
"(",
"bool",
")",
":",
"If",
"command",
"to",
"run",
"FATCAT",
"should",
"be",
"printed",
"to",
"stdout",
"force_rerun",
"(",
"bool",
")",
":",
"If",
"FATCAT",
"should",
"be",
"run",
"even",
"if",
"XML",
"output",
"files",
"already",
"exist"
] |
python
|
train
|
Azure/azure-cosmos-python
|
azure/cosmos/execution_context/query_execution_info.py
|
https://github.com/Azure/azure-cosmos-python/blob/dd01b3c5d308c6da83cfcaa0ab7083351a476353/azure/cosmos/execution_context/query_execution_info.py#L67-L74
|
def get_rewritten_query(self):
"""Returns rewritten query or None (if any)
"""
rewrittenQuery = self._extract(_PartitionedQueryExecutionInfo.RewrittenQueryPath)
if rewrittenQuery is not None:
# Hardcode formattable filter to true for now
rewrittenQuery = rewrittenQuery.replace('{documentdb-formattableorderbyquery-filter}', 'true')
return rewrittenQuery
|
[
"def",
"get_rewritten_query",
"(",
"self",
")",
":",
"rewrittenQuery",
"=",
"self",
".",
"_extract",
"(",
"_PartitionedQueryExecutionInfo",
".",
"RewrittenQueryPath",
")",
"if",
"rewrittenQuery",
"is",
"not",
"None",
":",
"# Hardcode formattable filter to true for now ",
"rewrittenQuery",
"=",
"rewrittenQuery",
".",
"replace",
"(",
"'{documentdb-formattableorderbyquery-filter}'",
",",
"'true'",
")",
"return",
"rewrittenQuery"
] |
Returns rewritten query or None (if any)
|
[
"Returns",
"rewritten",
"query",
"or",
"None",
"(",
"if",
"any",
")"
] |
python
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/rtorrent.py
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/rtorrent.py#L122-L133
|
def _memoize(self, name, getter, *args, **kwargs):
""" Cache a stable expensive-to-get item value for later (optimized) retrieval.
"""
field = "custom_m_" + name
cached = self.fetch(field)
if cached:
value = cached
else:
value = getter(*args, **kwargs)
self._make_it_so("caching %s=%r for" % (name, value,), ["custom.set"], field[7:], value)
self._fields[field] = value
return value
|
[
"def",
"_memoize",
"(",
"self",
",",
"name",
",",
"getter",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"field",
"=",
"\"custom_m_\"",
"+",
"name",
"cached",
"=",
"self",
".",
"fetch",
"(",
"field",
")",
"if",
"cached",
":",
"value",
"=",
"cached",
"else",
":",
"value",
"=",
"getter",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"_make_it_so",
"(",
"\"caching %s=%r for\"",
"%",
"(",
"name",
",",
"value",
",",
")",
",",
"[",
"\"custom.set\"",
"]",
",",
"field",
"[",
"7",
":",
"]",
",",
"value",
")",
"self",
".",
"_fields",
"[",
"field",
"]",
"=",
"value",
"return",
"value"
] |
Cache a stable expensive-to-get item value for later (optimized) retrieval.
|
[
"Cache",
"a",
"stable",
"expensive",
"-",
"to",
"-",
"get",
"item",
"value",
"for",
"later",
"(",
"optimized",
")",
"retrieval",
"."
] |
python
|
train
|
jdrumgoole/mongodbshell
|
mongodbshell/__init__.py
|
https://github.com/jdrumgoole/mongodbshell/blob/7e194247ea2adc1f124532935507cdffafa2c1f6/mongodbshell/__init__.py#L454-L536
|
def pager(self, lines):
"""
Outputs lines to a terminal. It uses
`shutil.get_terminal_size` to determine the height of the terminal.
It expects an iterator that returns a line at a time and those lines
should be terminated by a valid newline sequence.
Behaviour is controlled by a number of external class properties.
`paginate` : Is on by default and triggers pagination. Without `paginate`
all output is written straight to the screen.
`output_file` : By assigning a name to this property we can ensure that
all output is sent to the corresponding file. Prompts are not output.
`pretty_print` : If this is set (default is on) then all output is
pretty printed with `pprint`. If it is off then the output is just
written to the screen.
`overlap` : The number of lines to overlap between one page and the
next.
:param lines:
:return: paginated output
"""
try:
line_count = 0
if self._output_filename:
print(f"Output is also going to '{self.output_file}'")
self._output_file = open(self._output_filename, "a+")
terminal_columns, terminal_lines = shutil.get_terminal_size(fallback=(80, 24))
lines_left = terminal_lines
for i, l in enumerate(lines, 1):
line_residue = 0
if self.line_numbers:
output_line = f"{i:<4} {l}"
else:
output_line = l
line_overflow = int(len(output_line) / terminal_columns)
if line_overflow:
line_residue = len(output_line) % terminal_columns
if line_overflow >= 1:
lines_left = lines_left - line_overflow
else:
lines_left = lines_left - 1
if line_residue > 1:
lines_left = lines_left - 1
# line_count = line_count + 1
print(output_line)
if self._output_file:
self._output_file.write(f"{l}\n")
self._output_file.flush()
#print(lines_left)
if (lines_left - self.overlap - 1) <= 0: # -1 to leave room for prompt
if self.paginate:
print("Hit Return to continue (q or quit to exit)", end="")
user_input = input()
if user_input.lower().strip() in ["q", "quit", "exit"]:
break
terminal_columns, terminal_lines = shutil.get_terminal_size(fallback=(80, 24))
lines_left = terminal_lines
# end for
if self._output_file:
self._output_file.close()
except KeyboardInterrupt:
print("ctrl-C...")
if self._output_file:
self._output_file.close()
|
[
"def",
"pager",
"(",
"self",
",",
"lines",
")",
":",
"try",
":",
"line_count",
"=",
"0",
"if",
"self",
".",
"_output_filename",
":",
"print",
"(",
"f\"Output is also going to '{self.output_file}'\"",
")",
"self",
".",
"_output_file",
"=",
"open",
"(",
"self",
".",
"_output_filename",
",",
"\"a+\"",
")",
"terminal_columns",
",",
"terminal_lines",
"=",
"shutil",
".",
"get_terminal_size",
"(",
"fallback",
"=",
"(",
"80",
",",
"24",
")",
")",
"lines_left",
"=",
"terminal_lines",
"for",
"i",
",",
"l",
"in",
"enumerate",
"(",
"lines",
",",
"1",
")",
":",
"line_residue",
"=",
"0",
"if",
"self",
".",
"line_numbers",
":",
"output_line",
"=",
"f\"{i:<4} {l}\"",
"else",
":",
"output_line",
"=",
"l",
"line_overflow",
"=",
"int",
"(",
"len",
"(",
"output_line",
")",
"/",
"terminal_columns",
")",
"if",
"line_overflow",
":",
"line_residue",
"=",
"len",
"(",
"output_line",
")",
"%",
"terminal_columns",
"if",
"line_overflow",
">=",
"1",
":",
"lines_left",
"=",
"lines_left",
"-",
"line_overflow",
"else",
":",
"lines_left",
"=",
"lines_left",
"-",
"1",
"if",
"line_residue",
">",
"1",
":",
"lines_left",
"=",
"lines_left",
"-",
"1",
"# line_count = line_count + 1",
"print",
"(",
"output_line",
")",
"if",
"self",
".",
"_output_file",
":",
"self",
".",
"_output_file",
".",
"write",
"(",
"f\"{l}\\n\"",
")",
"self",
".",
"_output_file",
".",
"flush",
"(",
")",
"#print(lines_left)",
"if",
"(",
"lines_left",
"-",
"self",
".",
"overlap",
"-",
"1",
")",
"<=",
"0",
":",
"# -1 to leave room for prompt",
"if",
"self",
".",
"paginate",
":",
"print",
"(",
"\"Hit Return to continue (q or quit to exit)\"",
",",
"end",
"=",
"\"\"",
")",
"user_input",
"=",
"input",
"(",
")",
"if",
"user_input",
".",
"lower",
"(",
")",
".",
"strip",
"(",
")",
"in",
"[",
"\"q\"",
",",
"\"quit\"",
",",
"\"exit\"",
"]",
":",
"break",
"terminal_columns",
",",
"terminal_lines",
"=",
"shutil",
".",
"get_terminal_size",
"(",
"fallback",
"=",
"(",
"80",
",",
"24",
")",
")",
"lines_left",
"=",
"terminal_lines",
"# end for",
"if",
"self",
".",
"_output_file",
":",
"self",
".",
"_output_file",
".",
"close",
"(",
")",
"except",
"KeyboardInterrupt",
":",
"print",
"(",
"\"ctrl-C...\"",
")",
"if",
"self",
".",
"_output_file",
":",
"self",
".",
"_output_file",
".",
"close",
"(",
")"
] |
Outputs lines to a terminal. It uses
`shutil.get_terminal_size` to determine the height of the terminal.
It expects an iterator that returns a line at a time and those lines
should be terminated by a valid newline sequence.
Behaviour is controlled by a number of external class properties.
`paginate` : Is on by default and triggers pagination. Without `paginate`
all output is written straight to the screen.
`output_file` : By assigning a name to this property we can ensure that
all output is sent to the corresponding file. Prompts are not output.
`pretty_print` : If this is set (default is on) then all output is
pretty printed with `pprint`. If it is off then the output is just
written to the screen.
`overlap` : The number of lines to overlap between one page and the
next.
:param lines:
:return: paginated output
|
[
"Outputs",
"lines",
"to",
"a",
"terminal",
".",
"It",
"uses",
"shutil",
".",
"get_terminal_size",
"to",
"determine",
"the",
"height",
"of",
"the",
"terminal",
".",
"It",
"expects",
"an",
"iterator",
"that",
"returns",
"a",
"line",
"at",
"a",
"time",
"and",
"those",
"lines",
"should",
"be",
"terminated",
"by",
"a",
"valid",
"newline",
"sequence",
"."
] |
python
|
train
|
tnkteja/myhelp
|
virtualEnvironment/lib/python2.7/site-packages/coverage/execfile.py
|
https://github.com/tnkteja/myhelp/blob/fb3a4809d448ad14d5b2e6ddf2e7e89ad52b71cb/virtualEnvironment/lib/python2.7/site-packages/coverage/execfile.py#L23-L70
|
def run_python_module(modulename, args):
"""Run a python module, as though with ``python -m name args...``.
`modulename` is the name of the module, possibly a dot-separated name.
`args` is the argument array to present as sys.argv, including the first
element naming the module being executed.
"""
openfile = None
glo, loc = globals(), locals()
try:
try:
# Search for the module - inside its parent package, if any - using
# standard import mechanics.
if '.' in modulename:
packagename, name = rsplit1(modulename, '.')
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
else:
packagename, name = None, modulename
searchpath = None # "top-level search" in imp.find_module()
openfile, pathname, _ = imp.find_module(name, searchpath)
# Complain if this is a magic non-file module.
if openfile is None and pathname is None:
raise NoSource(
"module does not live in a file: %r" % modulename
)
# If `modulename` is actually a package, not a mere module, then we
# pretend to be Python 2.7 and try running its __main__.py script.
if openfile is None:
packagename = modulename
name = '__main__'
package = __import__(packagename, glo, loc, ['__path__'])
searchpath = package.__path__
openfile, pathname, _ = imp.find_module(name, searchpath)
except ImportError:
_, err, _ = sys.exc_info()
raise NoSource(str(err))
finally:
if openfile:
openfile.close()
# Finally, hand the file off to run_python_file for execution.
pathname = os.path.abspath(pathname)
args[0] = pathname
run_python_file(pathname, args, package=packagename)
|
[
"def",
"run_python_module",
"(",
"modulename",
",",
"args",
")",
":",
"openfile",
"=",
"None",
"glo",
",",
"loc",
"=",
"globals",
"(",
")",
",",
"locals",
"(",
")",
"try",
":",
"try",
":",
"# Search for the module - inside its parent package, if any - using",
"# standard import mechanics.",
"if",
"'.'",
"in",
"modulename",
":",
"packagename",
",",
"name",
"=",
"rsplit1",
"(",
"modulename",
",",
"'.'",
")",
"package",
"=",
"__import__",
"(",
"packagename",
",",
"glo",
",",
"loc",
",",
"[",
"'__path__'",
"]",
")",
"searchpath",
"=",
"package",
".",
"__path__",
"else",
":",
"packagename",
",",
"name",
"=",
"None",
",",
"modulename",
"searchpath",
"=",
"None",
"# \"top-level search\" in imp.find_module()",
"openfile",
",",
"pathname",
",",
"_",
"=",
"imp",
".",
"find_module",
"(",
"name",
",",
"searchpath",
")",
"# Complain if this is a magic non-file module.",
"if",
"openfile",
"is",
"None",
"and",
"pathname",
"is",
"None",
":",
"raise",
"NoSource",
"(",
"\"module does not live in a file: %r\"",
"%",
"modulename",
")",
"# If `modulename` is actually a package, not a mere module, then we",
"# pretend to be Python 2.7 and try running its __main__.py script.",
"if",
"openfile",
"is",
"None",
":",
"packagename",
"=",
"modulename",
"name",
"=",
"'__main__'",
"package",
"=",
"__import__",
"(",
"packagename",
",",
"glo",
",",
"loc",
",",
"[",
"'__path__'",
"]",
")",
"searchpath",
"=",
"package",
".",
"__path__",
"openfile",
",",
"pathname",
",",
"_",
"=",
"imp",
".",
"find_module",
"(",
"name",
",",
"searchpath",
")",
"except",
"ImportError",
":",
"_",
",",
"err",
",",
"_",
"=",
"sys",
".",
"exc_info",
"(",
")",
"raise",
"NoSource",
"(",
"str",
"(",
"err",
")",
")",
"finally",
":",
"if",
"openfile",
":",
"openfile",
".",
"close",
"(",
")",
"# Finally, hand the file off to run_python_file for execution.",
"pathname",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"pathname",
")",
"args",
"[",
"0",
"]",
"=",
"pathname",
"run_python_file",
"(",
"pathname",
",",
"args",
",",
"package",
"=",
"packagename",
")"
] |
Run a python module, as though with ``python -m name args...``.
`modulename` is the name of the module, possibly a dot-separated name.
`args` is the argument array to present as sys.argv, including the first
element naming the module being executed.
|
[
"Run",
"a",
"python",
"module",
"as",
"though",
"with",
"python",
"-",
"m",
"name",
"args",
"...",
"."
] |
python
|
test
|
allenai/allennlp
|
allennlp/data/dataset_readers/dataset_reader.py
|
https://github.com/allenai/allennlp/blob/648a36f77db7e45784c047176074f98534c76636/allennlp/data/dataset_readers/dataset_reader.py#L91-L145
|
def read(self, file_path: str) -> Iterable[Instance]:
"""
Returns an ``Iterable`` containing all the instances
in the specified dataset.
If ``self.lazy`` is False, this calls ``self._read()``,
ensures that the result is a list, then returns the resulting list.
If ``self.lazy`` is True, this returns an object whose
``__iter__`` method calls ``self._read()`` each iteration.
In this case your implementation of ``_read()`` must also be lazy
(that is, not load all instances into memory at once), otherwise
you will get a ``ConfigurationError``.
In either case, the returned ``Iterable`` can be iterated
over multiple times. It's unlikely you want to override this function,
but if you do your result should likewise be repeatedly iterable.
"""
lazy = getattr(self, 'lazy', None)
if lazy is None:
logger.warning("DatasetReader.lazy is not set, "
"did you forget to call the superclass constructor?")
if self._cache_directory:
cache_file = self._get_cache_location_for_file_path(file_path)
else:
cache_file = None
if lazy:
return _LazyInstances(lambda: self._read(file_path),
cache_file,
self.deserialize_instance,
self.serialize_instance)
else:
# First we read the instances, either from a cache or from the original file.
if cache_file and os.path.exists(cache_file):
instances = self._instances_from_cache_file(cache_file)
else:
instances = self._read(file_path)
# Then some validation.
if not isinstance(instances, list):
instances = [instance for instance in Tqdm.tqdm(instances)]
if not instances:
raise ConfigurationError("No instances were read from the given filepath {}. "
"Is the path correct?".format(file_path))
# And finally we write to the cache if we need to.
if cache_file and not os.path.exists(cache_file):
logger.info(f"Caching instances to {cache_file}")
with open(cache_file, 'w') as cache:
for instance in Tqdm.tqdm(instances):
cache.write(self.serialize_instance(instance) + '\n')
return instances
|
[
"def",
"read",
"(",
"self",
",",
"file_path",
":",
"str",
")",
"->",
"Iterable",
"[",
"Instance",
"]",
":",
"lazy",
"=",
"getattr",
"(",
"self",
",",
"'lazy'",
",",
"None",
")",
"if",
"lazy",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"\"DatasetReader.lazy is not set, \"",
"\"did you forget to call the superclass constructor?\"",
")",
"if",
"self",
".",
"_cache_directory",
":",
"cache_file",
"=",
"self",
".",
"_get_cache_location_for_file_path",
"(",
"file_path",
")",
"else",
":",
"cache_file",
"=",
"None",
"if",
"lazy",
":",
"return",
"_LazyInstances",
"(",
"lambda",
":",
"self",
".",
"_read",
"(",
"file_path",
")",
",",
"cache_file",
",",
"self",
".",
"deserialize_instance",
",",
"self",
".",
"serialize_instance",
")",
"else",
":",
"# First we read the instances, either from a cache or from the original file.",
"if",
"cache_file",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"cache_file",
")",
":",
"instances",
"=",
"self",
".",
"_instances_from_cache_file",
"(",
"cache_file",
")",
"else",
":",
"instances",
"=",
"self",
".",
"_read",
"(",
"file_path",
")",
"# Then some validation.",
"if",
"not",
"isinstance",
"(",
"instances",
",",
"list",
")",
":",
"instances",
"=",
"[",
"instance",
"for",
"instance",
"in",
"Tqdm",
".",
"tqdm",
"(",
"instances",
")",
"]",
"if",
"not",
"instances",
":",
"raise",
"ConfigurationError",
"(",
"\"No instances were read from the given filepath {}. \"",
"\"Is the path correct?\"",
".",
"format",
"(",
"file_path",
")",
")",
"# And finally we write to the cache if we need to.",
"if",
"cache_file",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"cache_file",
")",
":",
"logger",
".",
"info",
"(",
"f\"Caching instances to {cache_file}\"",
")",
"with",
"open",
"(",
"cache_file",
",",
"'w'",
")",
"as",
"cache",
":",
"for",
"instance",
"in",
"Tqdm",
".",
"tqdm",
"(",
"instances",
")",
":",
"cache",
".",
"write",
"(",
"self",
".",
"serialize_instance",
"(",
"instance",
")",
"+",
"'\\n'",
")",
"return",
"instances"
] |
Returns an ``Iterable`` containing all the instances
in the specified dataset.
If ``self.lazy`` is False, this calls ``self._read()``,
ensures that the result is a list, then returns the resulting list.
If ``self.lazy`` is True, this returns an object whose
``__iter__`` method calls ``self._read()`` each iteration.
In this case your implementation of ``_read()`` must also be lazy
(that is, not load all instances into memory at once), otherwise
you will get a ``ConfigurationError``.
In either case, the returned ``Iterable`` can be iterated
over multiple times. It's unlikely you want to override this function,
but if you do your result should likewise be repeatedly iterable.
|
[
"Returns",
"an",
"Iterable",
"containing",
"all",
"the",
"instances",
"in",
"the",
"specified",
"dataset",
"."
] |
python
|
train
|
schlamar/latexmk.py
|
latexmake.py
|
https://github.com/schlamar/latexmk.py/blob/88baba40ff3e844e4542de60d2032503e206d996/latexmake.py#L160-L169
|
def _is_toc_changed(self, toc_file):
'''
Test if the *.toc file has changed during
the first latex run.
'''
fname = '%s.toc' % self.project_name
if os.path.isfile(fname):
with open(fname) as fobj:
if fobj.read() != toc_file:
return True
|
[
"def",
"_is_toc_changed",
"(",
"self",
",",
"toc_file",
")",
":",
"fname",
"=",
"'%s.toc'",
"%",
"self",
".",
"project_name",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"fname",
")",
":",
"with",
"open",
"(",
"fname",
")",
"as",
"fobj",
":",
"if",
"fobj",
".",
"read",
"(",
")",
"!=",
"toc_file",
":",
"return",
"True"
] |
Test if the *.toc file has changed during
the first latex run.
|
[
"Test",
"if",
"the",
"*",
".",
"toc",
"file",
"has",
"changed",
"during",
"the",
"first",
"latex",
"run",
"."
] |
python
|
train
|
coordt/django-alphabetfilter
|
alphafilter/templatetags/alphafilter.py
|
https://github.com/coordt/django-alphabetfilter/blob/a7bc21c0ea985c2021a4668241bf643c615c6c1f/alphafilter/templatetags/alphafilter.py#L177-L202
|
def qs_alphabet_filter(parser, token):
"""
The parser/tokenizer for the queryset alphabet filter.
{% qs_alphabet_filter <queryset> <field name> [<template name>] [strip_params=comma,delim,list] %}
{% qs_alphabet_filter objects lastname myapp/template.html %}
The template name is optional and uses alphafilter/alphabet.html if not
specified
"""
bits = token.split_contents()
if len(bits) == 3:
return AlphabetFilterNode(bits[1], bits[2])
elif len(bits) == 4:
if "=" in bits[3]:
key, val = bits[3].split('=')
return AlphabetFilterNode(bits[1], bits[2], strip_params=val)
else:
return AlphabetFilterNode(bits[1], bits[2], template_name=bits[3])
elif len(bits) == 5:
key, val = bits[4].split('=')
return AlphabetFilterNode(bits[1], bits[2], bits[3], bits[4])
else:
raise TemplateSyntaxError("%s is called with a queryset and field "
"name, and optionally a template." % bits[0])
|
[
"def",
"qs_alphabet_filter",
"(",
"parser",
",",
"token",
")",
":",
"bits",
"=",
"token",
".",
"split_contents",
"(",
")",
"if",
"len",
"(",
"bits",
")",
"==",
"3",
":",
"return",
"AlphabetFilterNode",
"(",
"bits",
"[",
"1",
"]",
",",
"bits",
"[",
"2",
"]",
")",
"elif",
"len",
"(",
"bits",
")",
"==",
"4",
":",
"if",
"\"=\"",
"in",
"bits",
"[",
"3",
"]",
":",
"key",
",",
"val",
"=",
"bits",
"[",
"3",
"]",
".",
"split",
"(",
"'='",
")",
"return",
"AlphabetFilterNode",
"(",
"bits",
"[",
"1",
"]",
",",
"bits",
"[",
"2",
"]",
",",
"strip_params",
"=",
"val",
")",
"else",
":",
"return",
"AlphabetFilterNode",
"(",
"bits",
"[",
"1",
"]",
",",
"bits",
"[",
"2",
"]",
",",
"template_name",
"=",
"bits",
"[",
"3",
"]",
")",
"elif",
"len",
"(",
"bits",
")",
"==",
"5",
":",
"key",
",",
"val",
"=",
"bits",
"[",
"4",
"]",
".",
"split",
"(",
"'='",
")",
"return",
"AlphabetFilterNode",
"(",
"bits",
"[",
"1",
"]",
",",
"bits",
"[",
"2",
"]",
",",
"bits",
"[",
"3",
"]",
",",
"bits",
"[",
"4",
"]",
")",
"else",
":",
"raise",
"TemplateSyntaxError",
"(",
"\"%s is called with a queryset and field \"",
"\"name, and optionally a template.\"",
"%",
"bits",
"[",
"0",
"]",
")"
] |
The parser/tokenizer for the queryset alphabet filter.
{% qs_alphabet_filter <queryset> <field name> [<template name>] [strip_params=comma,delim,list] %}
{% qs_alphabet_filter objects lastname myapp/template.html %}
The template name is optional and uses alphafilter/alphabet.html if not
specified
|
[
"The",
"parser",
"/",
"tokenizer",
"for",
"the",
"queryset",
"alphabet",
"filter",
"."
] |
python
|
train
|
saltstack/salt
|
salt/modules/ddns.py
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/ddns.py#L55-L68
|
def _config(name, key=None, **kwargs):
'''
Return a value for 'name' from command line args then config file options.
Specify 'key' if the config file option is not the same as 'name'.
'''
if key is None:
key = name
if name in kwargs:
value = kwargs[name]
else:
value = __salt__['config.option']('ddns.{0}'.format(key))
if not value:
value = None
return value
|
[
"def",
"_config",
"(",
"name",
",",
"key",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"name",
"if",
"name",
"in",
"kwargs",
":",
"value",
"=",
"kwargs",
"[",
"name",
"]",
"else",
":",
"value",
"=",
"__salt__",
"[",
"'config.option'",
"]",
"(",
"'ddns.{0}'",
".",
"format",
"(",
"key",
")",
")",
"if",
"not",
"value",
":",
"value",
"=",
"None",
"return",
"value"
] |
Return a value for 'name' from command line args then config file options.
Specify 'key' if the config file option is not the same as 'name'.
|
[
"Return",
"a",
"value",
"for",
"name",
"from",
"command",
"line",
"args",
"then",
"config",
"file",
"options",
".",
"Specify",
"key",
"if",
"the",
"config",
"file",
"option",
"is",
"not",
"the",
"same",
"as",
"name",
"."
] |
python
|
train
|
rkhleics/wagtailmodeladmin
|
wagtailmodeladmin/options.py
|
https://github.com/rkhleics/wagtailmodeladmin/blob/7fddc853bab2ff3868b8c7a03329308c55f16358/wagtailmodeladmin/options.py#L481-L490
|
def get_permissions_for_registration(self):
"""
Utilised by Wagtail's 'register_permissions' hook to allow permissions
for a model to be assigned to groups in settings. This is only required
if the model isn't a Page model, and isn't registered as a Snippet
"""
from wagtail.wagtailsnippets.models import SNIPPET_MODELS
if not self.is_pagemodel and self.model not in SNIPPET_MODELS:
return self.permission_helper.get_all_model_permissions()
return Permission.objects.none()
|
[
"def",
"get_permissions_for_registration",
"(",
"self",
")",
":",
"from",
"wagtail",
".",
"wagtailsnippets",
".",
"models",
"import",
"SNIPPET_MODELS",
"if",
"not",
"self",
".",
"is_pagemodel",
"and",
"self",
".",
"model",
"not",
"in",
"SNIPPET_MODELS",
":",
"return",
"self",
".",
"permission_helper",
".",
"get_all_model_permissions",
"(",
")",
"return",
"Permission",
".",
"objects",
".",
"none",
"(",
")"
] |
Utilised by Wagtail's 'register_permissions' hook to allow permissions
for a model to be assigned to groups in settings. This is only required
if the model isn't a Page model, and isn't registered as a Snippet
|
[
"Utilised",
"by",
"Wagtail",
"s",
"register_permissions",
"hook",
"to",
"allow",
"permissions",
"for",
"a",
"model",
"to",
"be",
"assigned",
"to",
"groups",
"in",
"settings",
".",
"This",
"is",
"only",
"required",
"if",
"the",
"model",
"isn",
"t",
"a",
"Page",
"model",
"and",
"isn",
"t",
"registered",
"as",
"a",
"Snippet"
] |
python
|
train
|
susam/ice
|
ice.py
|
https://github.com/susam/ice/blob/532e685c504ea96f9e42833594585159ac1d2068/ice.py#L50-L91
|
def cube():
"""Return an Ice application with a default home page.
Create :class:`Ice` object, add a route to return the default page
when a client requests the server root, i.e. /, using HTTP GET
method, add an error handler to return HTTP error pages when an
error occurs and return this object. The returned object can be used
as a WSGI application.
Returns:
Ice: WSGI application.
"""
app = Ice()
@app.get('/')
def default_home_page():
"""Return a default home page."""
return simple_html('It works!',
'<h1>It works!</h1>\n'
'<p>This is the default ice web page.</p>')
@app.error()
def generic_error_page():
"""Return a simple and generic error page."""
return simple_html(app.response.status_line,
'<h1>{title}</h1>\n'
'<p>{description}</p>\n'
'<hr>\n'
'<address>Ice/{version}</address>'.format(
title=app.response.status_line,
description=app.response.status_detail,
version=__version__))
def simple_html(title, body):
"""Return a simple HTML page."""
return (
'<!DOCTYPE html>\n'
'<html>\n<head><title>{title}</title></head>\n'
'<body>\n{body}\n</body>\n</html>\n'
).format(title=title, body=body)
return app
|
[
"def",
"cube",
"(",
")",
":",
"app",
"=",
"Ice",
"(",
")",
"@",
"app",
".",
"get",
"(",
"'/'",
")",
"def",
"default_home_page",
"(",
")",
":",
"\"\"\"Return a default home page.\"\"\"",
"return",
"simple_html",
"(",
"'It works!'",
",",
"'<h1>It works!</h1>\\n'",
"'<p>This is the default ice web page.</p>'",
")",
"@",
"app",
".",
"error",
"(",
")",
"def",
"generic_error_page",
"(",
")",
":",
"\"\"\"Return a simple and generic error page.\"\"\"",
"return",
"simple_html",
"(",
"app",
".",
"response",
".",
"status_line",
",",
"'<h1>{title}</h1>\\n'",
"'<p>{description}</p>\\n'",
"'<hr>\\n'",
"'<address>Ice/{version}</address>'",
".",
"format",
"(",
"title",
"=",
"app",
".",
"response",
".",
"status_line",
",",
"description",
"=",
"app",
".",
"response",
".",
"status_detail",
",",
"version",
"=",
"__version__",
")",
")",
"def",
"simple_html",
"(",
"title",
",",
"body",
")",
":",
"\"\"\"Return a simple HTML page.\"\"\"",
"return",
"(",
"'<!DOCTYPE html>\\n'",
"'<html>\\n<head><title>{title}</title></head>\\n'",
"'<body>\\n{body}\\n</body>\\n</html>\\n'",
")",
".",
"format",
"(",
"title",
"=",
"title",
",",
"body",
"=",
"body",
")",
"return",
"app"
] |
Return an Ice application with a default home page.
Create :class:`Ice` object, add a route to return the default page
when a client requests the server root, i.e. /, using HTTP GET
method, add an error handler to return HTTP error pages when an
error occurs and return this object. The returned object can be used
as a WSGI application.
Returns:
Ice: WSGI application.
|
[
"Return",
"an",
"Ice",
"application",
"with",
"a",
"default",
"home",
"page",
"."
] |
python
|
test
|
pycontribs/jira
|
jira/client.py
|
https://github.com/pycontribs/jira/blob/397db5d78441ed6a680a9b7db4c62030ade1fd8a/jira/client.py#L1821-L1828
|
def add_vote(self, issue):
"""Register a vote for the current authenticated user on an issue.
:param issue: ID or key of the issue to vote on
:rtype: Response
"""
url = self._get_url('issue/' + str(issue) + '/votes')
return self._session.post(url)
|
[
"def",
"add_vote",
"(",
"self",
",",
"issue",
")",
":",
"url",
"=",
"self",
".",
"_get_url",
"(",
"'issue/'",
"+",
"str",
"(",
"issue",
")",
"+",
"'/votes'",
")",
"return",
"self",
".",
"_session",
".",
"post",
"(",
"url",
")"
] |
Register a vote for the current authenticated user on an issue.
:param issue: ID or key of the issue to vote on
:rtype: Response
|
[
"Register",
"a",
"vote",
"for",
"the",
"current",
"authenticated",
"user",
"on",
"an",
"issue",
"."
] |
python
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.