text
stringlengths 89
104k
| code_tokens
list | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def get_member_slackuid(self, slack):
"""Get a CSHMember object.
Arguments:
slack -- the Slack UID of the member
Returns:
None if the Slack UID provided does not correspond to a CSH Member
"""
members = self.__con__.search_s(
CSHMember.__ldap_user_ou__,
ldap.SCOPE_SUBTREE,
"(slackuid=%s)" % slack,
['ipaUniqueID'])
if members:
return CSHMember(
self,
members[0][1]['ipaUniqueID'][0].decode('utf-8'),
False)
return None
|
[
"def",
"get_member_slackuid",
"(",
"self",
",",
"slack",
")",
":",
"members",
"=",
"self",
".",
"__con__",
".",
"search_s",
"(",
"CSHMember",
".",
"__ldap_user_ou__",
",",
"ldap",
".",
"SCOPE_SUBTREE",
",",
"\"(slackuid=%s)\"",
"%",
"slack",
",",
"[",
"'ipaUniqueID'",
"]",
")",
"if",
"members",
":",
"return",
"CSHMember",
"(",
"self",
",",
"members",
"[",
"0",
"]",
"[",
"1",
"]",
"[",
"'ipaUniqueID'",
"]",
"[",
"0",
"]",
".",
"decode",
"(",
"'utf-8'",
")",
",",
"False",
")",
"return",
"None"
] | 29.7 | 15.15 |
def _leapfrog_integrator_one_step(
target_log_prob_fn,
independent_chain_ndims,
step_sizes,
current_momentum_parts,
current_state_parts,
current_target_log_prob,
current_target_log_prob_grad_parts,
state_gradients_are_stopped=False,
name=None):
"""Applies `num_leapfrog_steps` of the leapfrog integrator.
Assumes a simple quadratic kinetic energy function: `0.5 ||momentum||**2`.
#### Examples:
##### Simple quadratic potential.
```python
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import tensorflow as tf
from tensorflow_probability.python.mcmc.hmc import _leapfrog_integrator_one_step # pylint: disable=line-too-long
tfd = tfp.distributions
dims = 10
num_iter = int(1e3)
dtype = np.float32
position = tf.placeholder(np.float32)
momentum = tf.placeholder(np.float32)
target_log_prob_fn = tfd.MultivariateNormalDiag(
loc=tf.zeros(dims, dtype)).log_prob
def _leapfrog_one_step(*args):
# Closure representing computation done during each leapfrog step.
return _leapfrog_integrator_one_step(
target_log_prob_fn=target_log_prob_fn,
independent_chain_ndims=0,
step_sizes=[0.1],
current_momentum_parts=args[0],
current_state_parts=args[1],
current_target_log_prob=args[2],
current_target_log_prob_grad_parts=args[3])
# Do leapfrog integration.
[
[next_momentum],
[next_position],
next_target_log_prob,
next_target_log_prob_grad_parts,
] = tf.while_loop(
cond=lambda *args: True,
body=_leapfrog_one_step,
loop_vars=[
[momentum],
[position],
target_log_prob_fn(position),
tf.gradients(target_log_prob_fn(position), position),
],
maximum_iterations=3)
momentum_ = np.random.randn(dims).astype(dtype)
position_ = np.random.randn(dims).astype(dtype)
positions = np.zeros([num_iter, dims], dtype)
with tf.Session() as sess:
for i in xrange(num_iter):
position_, momentum_ = sess.run(
[next_momentum, next_position],
feed_dict={position: position_, momentum: momentum_})
positions[i] = position_
plt.plot(positions[:, 0]); # Sinusoidal.
```
Args:
target_log_prob_fn: Python callable which takes an argument like
`*current_state_parts` and returns its (possibly unnormalized) log-density
under the target distribution.
independent_chain_ndims: Scalar `int` `Tensor` representing the number of
leftmost `Tensor` dimensions which index independent chains.
step_sizes: Python `list` of `Tensor`s representing the step size for the
leapfrog integrator. Must broadcast with the shape of
`current_state_parts`. Larger step sizes lead to faster progress, but
too-large step sizes make rejection exponentially more likely. When
possible, it's often helpful to match per-variable step sizes to the
standard deviations of the target distribution in each variable.
current_momentum_parts: Tensor containing the value(s) of the momentum
variable(s) to update.
current_state_parts: Python `list` of `Tensor`s representing the current
state(s) of the Markov chain(s). The first `independent_chain_ndims` of
the `Tensor`(s) index different chains.
current_target_log_prob: `Tensor` representing the value of
`target_log_prob_fn(*current_state_parts)`. The only reason to specify
this argument is to reduce TF graph size.
current_target_log_prob_grad_parts: Python list of `Tensor`s representing
gradient of `target_log_prob_fn(*current_state_parts`) wrt
`current_state_parts`. Must have same shape as `current_state_parts`. The
only reason to specify this argument is to reduce TF graph size.
state_gradients_are_stopped: Python `bool` indicating that the proposed new
state be run through `tf.stop_gradient`. This is particularly useful when
combining optimization over samples from the HMC chain.
Default value: `False` (i.e., do not apply `stop_gradient`).
name: Python `str` name prefixed to Ops created by this function.
Default value: `None` (i.e., 'hmc_leapfrog_integrator').
Returns:
proposed_momentum_parts: Updated value of the momentum.
proposed_state_parts: Tensor or Python list of `Tensor`s representing the
state(s) of the Markov chain(s) at each result step. Has same shape as
input `current_state_parts`.
proposed_target_log_prob: `Tensor` representing the value of
`target_log_prob_fn` at `next_state`.
proposed_target_log_prob_grad_parts: Gradient of `proposed_target_log_prob`
wrt `next_state`.
Raises:
ValueError: if `len(momentum_parts) != len(state_parts)`.
ValueError: if `len(state_parts) != len(step_sizes)`.
ValueError: if `len(state_parts) != len(grads_target_log_prob)`.
TypeError: if `not target_log_prob.dtype.is_floating`.
"""
# Note on per-variable step sizes:
#
# Using per-variable step sizes is equivalent to using the same step
# size for all variables and adding a diagonal mass matrix in the
# kinetic energy term of the Hamiltonian being integrated. This is
# hinted at by Neal (2011) but not derived in detail there.
#
# Let x and v be position and momentum variables respectively.
# Let g(x) be the gradient of `target_log_prob_fn(x)`.
# Let S be a diagonal matrix of per-variable step sizes.
# Let the Hamiltonian H(x, v) = -target_log_prob_fn(x) + 0.5 * ||v||**2.
#
# Using per-variable step sizes gives the updates
# v' = v + 0.5 * matmul(S, g(x))
# x'' = x + matmul(S, v')
# v'' = v' + 0.5 * matmul(S, g(x''))
#
# Let u = matmul(inv(S), v).
# Multiplying v by inv(S) in the updates above gives the transformed dynamics
# u' = matmul(inv(S), v') = matmul(inv(S), v) + 0.5 * g(x)
# = u + 0.5 * g(x)
# x'' = x + matmul(S, v') = x + matmul(S**2, u')
# u'' = matmul(inv(S), v'') = matmul(inv(S), v') + 0.5 * g(x'')
# = u' + 0.5 * g(x'')
#
# These are exactly the leapfrog updates for the Hamiltonian
# H'(x, u) = -target_log_prob_fn(x) + 0.5 * u^T S**2 u
# = -target_log_prob_fn(x) + 0.5 * ||v||**2 = H(x, v).
#
# To summarize:
#
# * Using per-variable step sizes implicitly simulates the dynamics
# of the Hamiltonian H' (which are energy-conserving in H'). We
# keep track of v instead of u, but the underlying dynamics are
# the same if we transform back.
# * The value of the Hamiltonian H'(x, u) is the same as the value
# of the original Hamiltonian H(x, v) after we transform back from
# u to v.
# * Sampling v ~ N(0, I) is equivalent to sampling u ~ N(0, S**-2).
#
# So using per-variable step sizes in HMC will give results that are
# exactly identical to explicitly using a diagonal mass matrix.
with tf.compat.v1.name_scope(name, 'hmc_leapfrog_integrator_one_step', [
independent_chain_ndims, step_sizes, current_momentum_parts,
current_state_parts, current_target_log_prob,
current_target_log_prob_grad_parts
]):
# Step 1: Update momentum.
proposed_momentum_parts = [
v + 0.5 * tf.cast(eps, v.dtype) * g
for v, eps, g
in zip(current_momentum_parts,
step_sizes,
current_target_log_prob_grad_parts)]
# Step 2: Update state.
proposed_state_parts = [
x + tf.cast(eps, v.dtype) * v
for x, eps, v
in zip(current_state_parts,
step_sizes,
proposed_momentum_parts)]
if state_gradients_are_stopped:
proposed_state_parts = [tf.stop_gradient(x) for x in proposed_state_parts]
# Step 3a: Re-evaluate target-log-prob (and grad) at proposed state.
[
proposed_target_log_prob,
proposed_target_log_prob_grad_parts,
] = mcmc_util.maybe_call_fn_and_grads(
target_log_prob_fn,
proposed_state_parts)
if not proposed_target_log_prob.dtype.is_floating:
raise TypeError('`target_log_prob_fn` must produce a `Tensor` '
'with `float` `dtype`.')
if any(g is None for g in proposed_target_log_prob_grad_parts):
raise ValueError(
'Encountered `None` gradient. Does your target `target_log_prob_fn` '
'access all `tf.Variable`s via `tf.get_variable`?\n'
' current_state_parts: {}\n'
' proposed_state_parts: {}\n'
' proposed_target_log_prob_grad_parts: {}'.format(
current_state_parts,
proposed_state_parts,
proposed_target_log_prob_grad_parts))
# Step 3b: Update momentum (again).
proposed_momentum_parts = [
v + 0.5 * tf.cast(eps, v.dtype) * g
for v, eps, g
in zip(proposed_momentum_parts,
step_sizes,
proposed_target_log_prob_grad_parts)]
return [
proposed_momentum_parts,
proposed_state_parts,
proposed_target_log_prob,
proposed_target_log_prob_grad_parts,
]
|
[
"def",
"_leapfrog_integrator_one_step",
"(",
"target_log_prob_fn",
",",
"independent_chain_ndims",
",",
"step_sizes",
",",
"current_momentum_parts",
",",
"current_state_parts",
",",
"current_target_log_prob",
",",
"current_target_log_prob_grad_parts",
",",
"state_gradients_are_stopped",
"=",
"False",
",",
"name",
"=",
"None",
")",
":",
"# Note on per-variable step sizes:",
"#",
"# Using per-variable step sizes is equivalent to using the same step",
"# size for all variables and adding a diagonal mass matrix in the",
"# kinetic energy term of the Hamiltonian being integrated. This is",
"# hinted at by Neal (2011) but not derived in detail there.",
"#",
"# Let x and v be position and momentum variables respectively.",
"# Let g(x) be the gradient of `target_log_prob_fn(x)`.",
"# Let S be a diagonal matrix of per-variable step sizes.",
"# Let the Hamiltonian H(x, v) = -target_log_prob_fn(x) + 0.5 * ||v||**2.",
"#",
"# Using per-variable step sizes gives the updates",
"# v' = v + 0.5 * matmul(S, g(x))",
"# x'' = x + matmul(S, v')",
"# v'' = v' + 0.5 * matmul(S, g(x''))",
"#",
"# Let u = matmul(inv(S), v).",
"# Multiplying v by inv(S) in the updates above gives the transformed dynamics",
"# u' = matmul(inv(S), v') = matmul(inv(S), v) + 0.5 * g(x)",
"# = u + 0.5 * g(x)",
"# x'' = x + matmul(S, v') = x + matmul(S**2, u')",
"# u'' = matmul(inv(S), v'') = matmul(inv(S), v') + 0.5 * g(x'')",
"# = u' + 0.5 * g(x'')",
"#",
"# These are exactly the leapfrog updates for the Hamiltonian",
"# H'(x, u) = -target_log_prob_fn(x) + 0.5 * u^T S**2 u",
"# = -target_log_prob_fn(x) + 0.5 * ||v||**2 = H(x, v).",
"#",
"# To summarize:",
"#",
"# * Using per-variable step sizes implicitly simulates the dynamics",
"# of the Hamiltonian H' (which are energy-conserving in H'). We",
"# keep track of v instead of u, but the underlying dynamics are",
"# the same if we transform back.",
"# * The value of the Hamiltonian H'(x, u) is the same as the value",
"# of the original Hamiltonian H(x, v) after we transform back from",
"# u to v.",
"# * Sampling v ~ N(0, I) is equivalent to sampling u ~ N(0, S**-2).",
"#",
"# So using per-variable step sizes in HMC will give results that are",
"# exactly identical to explicitly using a diagonal mass matrix.",
"with",
"tf",
".",
"compat",
".",
"v1",
".",
"name_scope",
"(",
"name",
",",
"'hmc_leapfrog_integrator_one_step'",
",",
"[",
"independent_chain_ndims",
",",
"step_sizes",
",",
"current_momentum_parts",
",",
"current_state_parts",
",",
"current_target_log_prob",
",",
"current_target_log_prob_grad_parts",
"]",
")",
":",
"# Step 1: Update momentum.",
"proposed_momentum_parts",
"=",
"[",
"v",
"+",
"0.5",
"*",
"tf",
".",
"cast",
"(",
"eps",
",",
"v",
".",
"dtype",
")",
"*",
"g",
"for",
"v",
",",
"eps",
",",
"g",
"in",
"zip",
"(",
"current_momentum_parts",
",",
"step_sizes",
",",
"current_target_log_prob_grad_parts",
")",
"]",
"# Step 2: Update state.",
"proposed_state_parts",
"=",
"[",
"x",
"+",
"tf",
".",
"cast",
"(",
"eps",
",",
"v",
".",
"dtype",
")",
"*",
"v",
"for",
"x",
",",
"eps",
",",
"v",
"in",
"zip",
"(",
"current_state_parts",
",",
"step_sizes",
",",
"proposed_momentum_parts",
")",
"]",
"if",
"state_gradients_are_stopped",
":",
"proposed_state_parts",
"=",
"[",
"tf",
".",
"stop_gradient",
"(",
"x",
")",
"for",
"x",
"in",
"proposed_state_parts",
"]",
"# Step 3a: Re-evaluate target-log-prob (and grad) at proposed state.",
"[",
"proposed_target_log_prob",
",",
"proposed_target_log_prob_grad_parts",
",",
"]",
"=",
"mcmc_util",
".",
"maybe_call_fn_and_grads",
"(",
"target_log_prob_fn",
",",
"proposed_state_parts",
")",
"if",
"not",
"proposed_target_log_prob",
".",
"dtype",
".",
"is_floating",
":",
"raise",
"TypeError",
"(",
"'`target_log_prob_fn` must produce a `Tensor` '",
"'with `float` `dtype`.'",
")",
"if",
"any",
"(",
"g",
"is",
"None",
"for",
"g",
"in",
"proposed_target_log_prob_grad_parts",
")",
":",
"raise",
"ValueError",
"(",
"'Encountered `None` gradient. Does your target `target_log_prob_fn` '",
"'access all `tf.Variable`s via `tf.get_variable`?\\n'",
"' current_state_parts: {}\\n'",
"' proposed_state_parts: {}\\n'",
"' proposed_target_log_prob_grad_parts: {}'",
".",
"format",
"(",
"current_state_parts",
",",
"proposed_state_parts",
",",
"proposed_target_log_prob_grad_parts",
")",
")",
"# Step 3b: Update momentum (again).",
"proposed_momentum_parts",
"=",
"[",
"v",
"+",
"0.5",
"*",
"tf",
".",
"cast",
"(",
"eps",
",",
"v",
".",
"dtype",
")",
"*",
"g",
"for",
"v",
",",
"eps",
",",
"g",
"in",
"zip",
"(",
"proposed_momentum_parts",
",",
"step_sizes",
",",
"proposed_target_log_prob_grad_parts",
")",
"]",
"return",
"[",
"proposed_momentum_parts",
",",
"proposed_state_parts",
",",
"proposed_target_log_prob",
",",
"proposed_target_log_prob_grad_parts",
",",
"]"
] | 38.613043 | 20.873913 |
def check_offset(self):
"""Check to see if initial position and goal are the same
if they are, offset slightly so that the forcing term is not 0"""
for d in range(self.dmps):
if (self.y0[d] == self.goal[d]):
self.goal[d] += 1e-4
|
[
"def",
"check_offset",
"(",
"self",
")",
":",
"for",
"d",
"in",
"range",
"(",
"self",
".",
"dmps",
")",
":",
"if",
"(",
"self",
".",
"y0",
"[",
"d",
"]",
"==",
"self",
".",
"goal",
"[",
"d",
"]",
")",
":",
"self",
".",
"goal",
"[",
"d",
"]",
"+=",
"1e-4"
] | 39.285714 | 10.142857 |
def convert(model, input_features, output_features):
"""Convert a _imputer model to the protobuf spec.
Parameters
----------
model: Imputer
A trained Imputer model.
input_features: str
Name of the input column.
output_features: str
Name of the output column.
Returns
-------
model_spec: An object of type Model_pb.
Protobuf representation of the model
"""
_INTERMEDIATE_FEATURE_NAME = "__sparse_vector_features__"
n_dimensions = len(model.feature_names_)
input_features = process_or_validate_features(input_features)
# Ensure that the output_features are also solid.
output_features = process_or_validate_features(output_features, n_dimensions)
# The DictVectorizer in the framework outputs a sparse dictionary
# of index to value due to other considerations, but we are expecting
# the output of this to be a dense feature vector. To make that happen,
# put a feature_vectorizer immediately after the dict vectorizer.
pline = Pipeline(input_features, output_features)
# Set the basic model parameters of the dict vectorizer component.
dv_spec = _Model_pb2.Model()
dv_spec.specificationVersion = SPECIFICATION_VERSION
# Set up the dict vectorizer parameters
tr_spec = dv_spec.dictVectorizer
is_str = None
for feature_name in model.feature_names_:
if isinstance(feature_name, _six.string_types):
if is_str == False:
raise ValueError("Mapping of DictVectorizer mixes int and str types.")
tr_spec.stringToIndex.vector.append(feature_name)
is_str == True
if isinstance(feature_name, _six.integer_types):
if is_str == True:
raise ValueError("Mapping of DictVectorizer mixes int and str types.")
tr_spec.int64ToIndex.vector.append(feature_name)
is_str == False
intermediate_features = [(_INTERMEDIATE_FEATURE_NAME,
datatypes.Dictionary(key_type = int))]
# Set the interface for the dict vectorizer with the input and the
# intermediate output
set_transform_interface_params(
dv_spec, input_features, intermediate_features)
pline.add_model(dv_spec)
# Follow the dict vectorizer by a feature_vectorizer to change the sparse
# output layer into a dense vector as expected.
fvec, _num_out_dim = create_feature_vectorizer(intermediate_features,
output_features[0][0], {"__sparse_vector_features__" : n_dimensions})
pline.add_model(fvec)
return _MLModel(pline.spec)
|
[
"def",
"convert",
"(",
"model",
",",
"input_features",
",",
"output_features",
")",
":",
"_INTERMEDIATE_FEATURE_NAME",
"=",
"\"__sparse_vector_features__\"",
"n_dimensions",
"=",
"len",
"(",
"model",
".",
"feature_names_",
")",
"input_features",
"=",
"process_or_validate_features",
"(",
"input_features",
")",
"# Ensure that the output_features are also solid.",
"output_features",
"=",
"process_or_validate_features",
"(",
"output_features",
",",
"n_dimensions",
")",
"# The DictVectorizer in the framework outputs a sparse dictionary",
"# of index to value due to other considerations, but we are expecting",
"# the output of this to be a dense feature vector. To make that happen,",
"# put a feature_vectorizer immediately after the dict vectorizer.",
"pline",
"=",
"Pipeline",
"(",
"input_features",
",",
"output_features",
")",
"# Set the basic model parameters of the dict vectorizer component.",
"dv_spec",
"=",
"_Model_pb2",
".",
"Model",
"(",
")",
"dv_spec",
".",
"specificationVersion",
"=",
"SPECIFICATION_VERSION",
"# Set up the dict vectorizer parameters",
"tr_spec",
"=",
"dv_spec",
".",
"dictVectorizer",
"is_str",
"=",
"None",
"for",
"feature_name",
"in",
"model",
".",
"feature_names_",
":",
"if",
"isinstance",
"(",
"feature_name",
",",
"_six",
".",
"string_types",
")",
":",
"if",
"is_str",
"==",
"False",
":",
"raise",
"ValueError",
"(",
"\"Mapping of DictVectorizer mixes int and str types.\"",
")",
"tr_spec",
".",
"stringToIndex",
".",
"vector",
".",
"append",
"(",
"feature_name",
")",
"is_str",
"==",
"True",
"if",
"isinstance",
"(",
"feature_name",
",",
"_six",
".",
"integer_types",
")",
":",
"if",
"is_str",
"==",
"True",
":",
"raise",
"ValueError",
"(",
"\"Mapping of DictVectorizer mixes int and str types.\"",
")",
"tr_spec",
".",
"int64ToIndex",
".",
"vector",
".",
"append",
"(",
"feature_name",
")",
"is_str",
"==",
"False",
"intermediate_features",
"=",
"[",
"(",
"_INTERMEDIATE_FEATURE_NAME",
",",
"datatypes",
".",
"Dictionary",
"(",
"key_type",
"=",
"int",
")",
")",
"]",
"# Set the interface for the dict vectorizer with the input and the",
"# intermediate output",
"set_transform_interface_params",
"(",
"dv_spec",
",",
"input_features",
",",
"intermediate_features",
")",
"pline",
".",
"add_model",
"(",
"dv_spec",
")",
"# Follow the dict vectorizer by a feature_vectorizer to change the sparse",
"# output layer into a dense vector as expected.",
"fvec",
",",
"_num_out_dim",
"=",
"create_feature_vectorizer",
"(",
"intermediate_features",
",",
"output_features",
"[",
"0",
"]",
"[",
"0",
"]",
",",
"{",
"\"__sparse_vector_features__\"",
":",
"n_dimensions",
"}",
")",
"pline",
".",
"add_model",
"(",
"fvec",
")",
"return",
"_MLModel",
"(",
"pline",
".",
"spec",
")"
] | 34.216216 | 23.756757 |
def get_params(self):
"""Get signature and params
"""
params = {
'key': self.get_app_key(),
'uid': self.user_id,
'widget': self.widget_code
}
products_number = len(self.products)
if self.get_api_type() == self.API_GOODS:
if isinstance(self.products, list):
if products_number == 1:
product = self.products[0]
if isinstance(product, Product):
post_trial_product = None
if isinstance(product.get_trial_product(), Product):
post_trial_product = product
product = product.get_trial_product()
params['amount'] = product.get_amount()
params['currencyCode'] = product.get_currency_code()
params['ag_name'] = product.get_name()
params['ag_external_id'] = product.get_id()
params['ag_type'] = product.get_type()
if product.get_type() == Product.TYPE_SUBSCRIPTION:
params['ag_period_length'] = product.get_period_length()
params['ag_period_type'] = product.get_period_type()
if product.is_recurring():
params['ag_recurring'] = 1 if product.is_recurring() else 0
if post_trial_product:
params['ag_trial'] = 1
params['ag_post_trial_external_id'] = post_trial_product.get_id()
params['ag_post_trial_period_length'] = post_trial_product.get_period_length()
params['ag_post_trial_period_type'] = post_trial_product.get_period_type()
params['ag_post_trial_name'] = post_trial_product.get_name()
params['post_trial_amount'] = post_trial_product.get_amount()
params['post_trial_currencyCode'] = post_trial_product.get_currency_code()
else:
self.append_to_errors('Not a Product instance')
else:
self.append_to_errors('Only 1 product is allowed')
elif self.get_api_type() == self.API_CART:
index = 0
for product in self.products:
params['external_ids[' + str(index) + ']'] = product.get_id()
if product.get_amount() > 0:
params['prices[' + str(index) + ']'] = product.get_amount()
if product.get_currency_code() != '' and product.get_currency_code() is not None:
params['currencies[' + str(index) + ']'] = product.get_currency_code()
index += 1
params['sign_version'] = signature_version = str(self.get_default_widget_signature())
if not self.is_empty(self.extra_params, 'sign_version'):
signature_version = params['sign_version'] = str(self.extra_params['sign_version'])
params = self.array_merge(params, self.extra_params)
params['sign'] = self.calculate_signature(params, self.get_secret_key(), int(signature_version))
return params
|
[
"def",
"get_params",
"(",
"self",
")",
":",
"params",
"=",
"{",
"'key'",
":",
"self",
".",
"get_app_key",
"(",
")",
",",
"'uid'",
":",
"self",
".",
"user_id",
",",
"'widget'",
":",
"self",
".",
"widget_code",
"}",
"products_number",
"=",
"len",
"(",
"self",
".",
"products",
")",
"if",
"self",
".",
"get_api_type",
"(",
")",
"==",
"self",
".",
"API_GOODS",
":",
"if",
"isinstance",
"(",
"self",
".",
"products",
",",
"list",
")",
":",
"if",
"products_number",
"==",
"1",
":",
"product",
"=",
"self",
".",
"products",
"[",
"0",
"]",
"if",
"isinstance",
"(",
"product",
",",
"Product",
")",
":",
"post_trial_product",
"=",
"None",
"if",
"isinstance",
"(",
"product",
".",
"get_trial_product",
"(",
")",
",",
"Product",
")",
":",
"post_trial_product",
"=",
"product",
"product",
"=",
"product",
".",
"get_trial_product",
"(",
")",
"params",
"[",
"'amount'",
"]",
"=",
"product",
".",
"get_amount",
"(",
")",
"params",
"[",
"'currencyCode'",
"]",
"=",
"product",
".",
"get_currency_code",
"(",
")",
"params",
"[",
"'ag_name'",
"]",
"=",
"product",
".",
"get_name",
"(",
")",
"params",
"[",
"'ag_external_id'",
"]",
"=",
"product",
".",
"get_id",
"(",
")",
"params",
"[",
"'ag_type'",
"]",
"=",
"product",
".",
"get_type",
"(",
")",
"if",
"product",
".",
"get_type",
"(",
")",
"==",
"Product",
".",
"TYPE_SUBSCRIPTION",
":",
"params",
"[",
"'ag_period_length'",
"]",
"=",
"product",
".",
"get_period_length",
"(",
")",
"params",
"[",
"'ag_period_type'",
"]",
"=",
"product",
".",
"get_period_type",
"(",
")",
"if",
"product",
".",
"is_recurring",
"(",
")",
":",
"params",
"[",
"'ag_recurring'",
"]",
"=",
"1",
"if",
"product",
".",
"is_recurring",
"(",
")",
"else",
"0",
"if",
"post_trial_product",
":",
"params",
"[",
"'ag_trial'",
"]",
"=",
"1",
"params",
"[",
"'ag_post_trial_external_id'",
"]",
"=",
"post_trial_product",
".",
"get_id",
"(",
")",
"params",
"[",
"'ag_post_trial_period_length'",
"]",
"=",
"post_trial_product",
".",
"get_period_length",
"(",
")",
"params",
"[",
"'ag_post_trial_period_type'",
"]",
"=",
"post_trial_product",
".",
"get_period_type",
"(",
")",
"params",
"[",
"'ag_post_trial_name'",
"]",
"=",
"post_trial_product",
".",
"get_name",
"(",
")",
"params",
"[",
"'post_trial_amount'",
"]",
"=",
"post_trial_product",
".",
"get_amount",
"(",
")",
"params",
"[",
"'post_trial_currencyCode'",
"]",
"=",
"post_trial_product",
".",
"get_currency_code",
"(",
")",
"else",
":",
"self",
".",
"append_to_errors",
"(",
"'Not a Product instance'",
")",
"else",
":",
"self",
".",
"append_to_errors",
"(",
"'Only 1 product is allowed'",
")",
"elif",
"self",
".",
"get_api_type",
"(",
")",
"==",
"self",
".",
"API_CART",
":",
"index",
"=",
"0",
"for",
"product",
"in",
"self",
".",
"products",
":",
"params",
"[",
"'external_ids['",
"+",
"str",
"(",
"index",
")",
"+",
"']'",
"]",
"=",
"product",
".",
"get_id",
"(",
")",
"if",
"product",
".",
"get_amount",
"(",
")",
">",
"0",
":",
"params",
"[",
"'prices['",
"+",
"str",
"(",
"index",
")",
"+",
"']'",
"]",
"=",
"product",
".",
"get_amount",
"(",
")",
"if",
"product",
".",
"get_currency_code",
"(",
")",
"!=",
"''",
"and",
"product",
".",
"get_currency_code",
"(",
")",
"is",
"not",
"None",
":",
"params",
"[",
"'currencies['",
"+",
"str",
"(",
"index",
")",
"+",
"']'",
"]",
"=",
"product",
".",
"get_currency_code",
"(",
")",
"index",
"+=",
"1",
"params",
"[",
"'sign_version'",
"]",
"=",
"signature_version",
"=",
"str",
"(",
"self",
".",
"get_default_widget_signature",
"(",
")",
")",
"if",
"not",
"self",
".",
"is_empty",
"(",
"self",
".",
"extra_params",
",",
"'sign_version'",
")",
":",
"signature_version",
"=",
"params",
"[",
"'sign_version'",
"]",
"=",
"str",
"(",
"self",
".",
"extra_params",
"[",
"'sign_version'",
"]",
")",
"params",
"=",
"self",
".",
"array_merge",
"(",
"params",
",",
"self",
".",
"extra_params",
")",
"params",
"[",
"'sign'",
"]",
"=",
"self",
".",
"calculate_signature",
"(",
"params",
",",
"self",
".",
"get_secret_key",
"(",
")",
",",
"int",
"(",
"signature_version",
")",
")",
"return",
"params"
] | 46.887324 | 30.183099 |
def pack(self):
'''
Pack this exception into a serializable dictionary that is safe for
transport via msgpack
'''
if six.PY3:
return {'message': six.text_type(self), 'args': self.args}
return dict(message=self.__unicode__(), args=self.args)
|
[
"def",
"pack",
"(",
"self",
")",
":",
"if",
"six",
".",
"PY3",
":",
"return",
"{",
"'message'",
":",
"six",
".",
"text_type",
"(",
"self",
")",
",",
"'args'",
":",
"self",
".",
"args",
"}",
"return",
"dict",
"(",
"message",
"=",
"self",
".",
"__unicode__",
"(",
")",
",",
"args",
"=",
"self",
".",
"args",
")"
] | 36.625 | 25.375 |
def yield_pair_gradients(self, index1, index2):
"""Yields pairs ((s'(r_ij), grad_i v(bar{r}_ij))"""
strength = self.strengths[index1, index2]
distance = self.distances[index1, index2]
yield -6*strength*distance**(-7), np.zeros(3)
|
[
"def",
"yield_pair_gradients",
"(",
"self",
",",
"index1",
",",
"index2",
")",
":",
"strength",
"=",
"self",
".",
"strengths",
"[",
"index1",
",",
"index2",
"]",
"distance",
"=",
"self",
".",
"distances",
"[",
"index1",
",",
"index2",
"]",
"yield",
"-",
"6",
"*",
"strength",
"*",
"distance",
"**",
"(",
"-",
"7",
")",
",",
"np",
".",
"zeros",
"(",
"3",
")"
] | 51.4 | 7.6 |
def _handle_successor_multitargets(self, job, successor, all_successors):
"""
Generate new jobs for all possible successor targets when there are more than one possible concrete value for
successor.ip
:param VFGJob job: The VFGJob instance.
:param SimState successor: The succeeding state.
:param list all_successors: All succeeding states from the same VFGJob.
:return: A list of new succeeding jobs
:rtype: list
"""
new_jobs = [ ]
# Currently we assume a legit jumping target cannot have more than 256 concrete values
# TODO: make it a setting on VFG
MAX_NUMBER_OF_CONCRETE_VALUES = 256
all_possible_ips = successor.solver.eval_upto(successor.ip, MAX_NUMBER_OF_CONCRETE_VALUES + 1)
if len(all_possible_ips) > MAX_NUMBER_OF_CONCRETE_VALUES:
l.warning("IP can be concretized to more than %d values, which means it might be corrupted.",
MAX_NUMBER_OF_CONCRETE_VALUES)
return [ ]
# Call this function to generate a successor for each possible IP
for ip in all_possible_ips:
concrete_successor = successor.copy()
concrete_successor.ip = ip
concrete_jobs = self._handle_successor(job, concrete_successor, all_successors)
if job.is_call_jump: # TODO: take care of syscalls
for new_job in concrete_jobs:
# TODO: correctly fill the return address. The return address can be found from the
# TODO: fakeret successor in the `successors` list
function_analysis_task = FunctionAnalysis(new_job.addr, None)
# log the new job
function_analysis_task.jobs.append(new_job)
# put it onto the stack
self._task_stack.append(function_analysis_task)
# log it in the call_task
job.call_task.register_function_analysis(function_analysis_task)
new_jobs.extend(concrete_jobs)
return new_jobs
|
[
"def",
"_handle_successor_multitargets",
"(",
"self",
",",
"job",
",",
"successor",
",",
"all_successors",
")",
":",
"new_jobs",
"=",
"[",
"]",
"# Currently we assume a legit jumping target cannot have more than 256 concrete values",
"# TODO: make it a setting on VFG",
"MAX_NUMBER_OF_CONCRETE_VALUES",
"=",
"256",
"all_possible_ips",
"=",
"successor",
".",
"solver",
".",
"eval_upto",
"(",
"successor",
".",
"ip",
",",
"MAX_NUMBER_OF_CONCRETE_VALUES",
"+",
"1",
")",
"if",
"len",
"(",
"all_possible_ips",
")",
">",
"MAX_NUMBER_OF_CONCRETE_VALUES",
":",
"l",
".",
"warning",
"(",
"\"IP can be concretized to more than %d values, which means it might be corrupted.\"",
",",
"MAX_NUMBER_OF_CONCRETE_VALUES",
")",
"return",
"[",
"]",
"# Call this function to generate a successor for each possible IP",
"for",
"ip",
"in",
"all_possible_ips",
":",
"concrete_successor",
"=",
"successor",
".",
"copy",
"(",
")",
"concrete_successor",
".",
"ip",
"=",
"ip",
"concrete_jobs",
"=",
"self",
".",
"_handle_successor",
"(",
"job",
",",
"concrete_successor",
",",
"all_successors",
")",
"if",
"job",
".",
"is_call_jump",
":",
"# TODO: take care of syscalls",
"for",
"new_job",
"in",
"concrete_jobs",
":",
"# TODO: correctly fill the return address. The return address can be found from the",
"# TODO: fakeret successor in the `successors` list",
"function_analysis_task",
"=",
"FunctionAnalysis",
"(",
"new_job",
".",
"addr",
",",
"None",
")",
"# log the new job",
"function_analysis_task",
".",
"jobs",
".",
"append",
"(",
"new_job",
")",
"# put it onto the stack",
"self",
".",
"_task_stack",
".",
"append",
"(",
"function_analysis_task",
")",
"# log it in the call_task",
"job",
".",
"call_task",
".",
"register_function_analysis",
"(",
"function_analysis_task",
")",
"new_jobs",
".",
"extend",
"(",
"concrete_jobs",
")",
"return",
"new_jobs"
] | 44.191489 | 26.829787 |
def predict(self, measurement, output_format='array'):
"""
Method to predict the class labels for the provided data
:param measurement: the point to classify
:type measurement: pandas.DataFrame
:param output_format: the format to return the scores ('array' or 'str')
:type output_format: string
:return prediction: the prediction for a given test/point
:rtype prediction: np.array
"""
scores = np.array([])
for obs in self.observations:
knn = self.__get_knn_by_observation(obs)
p, ids = self.__get_features_for_observation(data_frame=measurement, observation=obs,
skip_id=3497, last_column_is_id=True)
score = knn.predict(pd.DataFrame(p).T)
scores = np.append(scores, score, axis=0)
if output_format == 'array':
return scores.astype(int)
else:
return np.array_str(scores.astype(int))
|
[
"def",
"predict",
"(",
"self",
",",
"measurement",
",",
"output_format",
"=",
"'array'",
")",
":",
"scores",
"=",
"np",
".",
"array",
"(",
"[",
"]",
")",
"for",
"obs",
"in",
"self",
".",
"observations",
":",
"knn",
"=",
"self",
".",
"__get_knn_by_observation",
"(",
"obs",
")",
"p",
",",
"ids",
"=",
"self",
".",
"__get_features_for_observation",
"(",
"data_frame",
"=",
"measurement",
",",
"observation",
"=",
"obs",
",",
"skip_id",
"=",
"3497",
",",
"last_column_is_id",
"=",
"True",
")",
"score",
"=",
"knn",
".",
"predict",
"(",
"pd",
".",
"DataFrame",
"(",
"p",
")",
".",
"T",
")",
"scores",
"=",
"np",
".",
"append",
"(",
"scores",
",",
"score",
",",
"axis",
"=",
"0",
")",
"if",
"output_format",
"==",
"'array'",
":",
"return",
"scores",
".",
"astype",
"(",
"int",
")",
"else",
":",
"return",
"np",
".",
"array_str",
"(",
"scores",
".",
"astype",
"(",
"int",
")",
")"
] | 42.666667 | 19.25 |
def focus_changed(self):
"""Editor focus has changed"""
fwidget = QApplication.focusWidget()
for finfo in self.data:
if fwidget is finfo.editor:
self.refresh()
self.editor_focus_changed.emit()
|
[
"def",
"focus_changed",
"(",
"self",
")",
":",
"fwidget",
"=",
"QApplication",
".",
"focusWidget",
"(",
")",
"for",
"finfo",
"in",
"self",
".",
"data",
":",
"if",
"fwidget",
"is",
"finfo",
".",
"editor",
":",
"self",
".",
"refresh",
"(",
")",
"self",
".",
"editor_focus_changed",
".",
"emit",
"(",
")"
] | 36 | 5.285714 |
def cipher(self):
"""Applies the Caesar shift cipher.
Based on the attributes of the object, applies the Caesar shift cipher
to the message attribute. Accepts positive and negative integers as
offsets.
Required attributes:
message
offset
Returns:
String with cipher applied.
"""
# If no offset is selected, pick random one with sufficient distance
# from original.
if self.offset is False:
self.offset = randrange(5, 25)
logging.info("Random offset selected: {0}".format(self.offset))
logging.debug("Offset set: {0}".format(self.offset))
# Cipher
ciphered_message_list = list(self.message)
for i, letter in enumerate(ciphered_message_list):
if letter.isalpha():
# Use default upper and lower case characters if alphabet
# not supplied by user.
if letter.isupper():
alphabet = [character.upper()
for character in self.alphabet]
else:
alphabet = self.alphabet
logging.debug("Letter: {0}".format(letter))
logging.debug("Alphabet: {0}".format(alphabet))
value = alphabet.index(letter)
cipher_value = value + self.offset
if cipher_value > 25 or cipher_value < 0:
cipher_value = cipher_value % 26
logging.debug("Cipher value: {0}".format(cipher_value))
ciphered_message_list[i] = alphabet[cipher_value]
logging.debug("Ciphered letter: {0}".format(letter))
self.message = ''.join(ciphered_message_list)
return self.message
|
[
"def",
"cipher",
"(",
"self",
")",
":",
"# If no offset is selected, pick random one with sufficient distance",
"# from original.",
"if",
"self",
".",
"offset",
"is",
"False",
":",
"self",
".",
"offset",
"=",
"randrange",
"(",
"5",
",",
"25",
")",
"logging",
".",
"info",
"(",
"\"Random offset selected: {0}\"",
".",
"format",
"(",
"self",
".",
"offset",
")",
")",
"logging",
".",
"debug",
"(",
"\"Offset set: {0}\"",
".",
"format",
"(",
"self",
".",
"offset",
")",
")",
"# Cipher",
"ciphered_message_list",
"=",
"list",
"(",
"self",
".",
"message",
")",
"for",
"i",
",",
"letter",
"in",
"enumerate",
"(",
"ciphered_message_list",
")",
":",
"if",
"letter",
".",
"isalpha",
"(",
")",
":",
"# Use default upper and lower case characters if alphabet",
"# not supplied by user.",
"if",
"letter",
".",
"isupper",
"(",
")",
":",
"alphabet",
"=",
"[",
"character",
".",
"upper",
"(",
")",
"for",
"character",
"in",
"self",
".",
"alphabet",
"]",
"else",
":",
"alphabet",
"=",
"self",
".",
"alphabet",
"logging",
".",
"debug",
"(",
"\"Letter: {0}\"",
".",
"format",
"(",
"letter",
")",
")",
"logging",
".",
"debug",
"(",
"\"Alphabet: {0}\"",
".",
"format",
"(",
"alphabet",
")",
")",
"value",
"=",
"alphabet",
".",
"index",
"(",
"letter",
")",
"cipher_value",
"=",
"value",
"+",
"self",
".",
"offset",
"if",
"cipher_value",
">",
"25",
"or",
"cipher_value",
"<",
"0",
":",
"cipher_value",
"=",
"cipher_value",
"%",
"26",
"logging",
".",
"debug",
"(",
"\"Cipher value: {0}\"",
".",
"format",
"(",
"cipher_value",
")",
")",
"ciphered_message_list",
"[",
"i",
"]",
"=",
"alphabet",
"[",
"cipher_value",
"]",
"logging",
".",
"debug",
"(",
"\"Ciphered letter: {0}\"",
".",
"format",
"(",
"letter",
")",
")",
"self",
".",
"message",
"=",
"''",
".",
"join",
"(",
"ciphered_message_list",
")",
"return",
"self",
".",
"message"
] | 40.022727 | 19.704545 |
def get_smart_contract(self, hex_contract_address: str, is_full: bool = False) -> dict:
"""
This interface is used to get the information of smart contract based on the specified hexadecimal hash value.
:param hex_contract_address: str, a hexadecimal hash value.
:param is_full:
:return: the information of smart contract in dictionary form.
"""
if not isinstance(hex_contract_address, str):
raise SDKException(ErrorCode.param_err('a hexadecimal contract address is required.'))
if len(hex_contract_address) != 40:
raise SDKException(ErrorCode.param_err('the length of the contract address should be 40 bytes.'))
payload = self.generate_json_rpc_payload(RpcMethod.GET_SMART_CONTRACT, [hex_contract_address, 1])
response = self.__post(self.__url, payload)
if is_full:
return response
return response['result']
|
[
"def",
"get_smart_contract",
"(",
"self",
",",
"hex_contract_address",
":",
"str",
",",
"is_full",
":",
"bool",
"=",
"False",
")",
"->",
"dict",
":",
"if",
"not",
"isinstance",
"(",
"hex_contract_address",
",",
"str",
")",
":",
"raise",
"SDKException",
"(",
"ErrorCode",
".",
"param_err",
"(",
"'a hexadecimal contract address is required.'",
")",
")",
"if",
"len",
"(",
"hex_contract_address",
")",
"!=",
"40",
":",
"raise",
"SDKException",
"(",
"ErrorCode",
".",
"param_err",
"(",
"'the length of the contract address should be 40 bytes.'",
")",
")",
"payload",
"=",
"self",
".",
"generate_json_rpc_payload",
"(",
"RpcMethod",
".",
"GET_SMART_CONTRACT",
",",
"[",
"hex_contract_address",
",",
"1",
"]",
")",
"response",
"=",
"self",
".",
"__post",
"(",
"self",
".",
"__url",
",",
"payload",
")",
"if",
"is_full",
":",
"return",
"response",
"return",
"response",
"[",
"'result'",
"]"
] | 54.411765 | 29.352941 |
def reverseCommit(self):
"""
Re-insert the previously removed character(s).
"""
# Get the text cursor for the current document.
tc = self.qteWidget.textCursor()
# Mark the previously inserted text and remove it.
tc.setPosition(self.cursorPos0, QtGui.QTextCursor.MoveAnchor)
tc.setPosition(self.cursorPos1, QtGui.QTextCursor.KeepAnchor)
tc.removeSelectedText()
tc.setPosition(self.cursorPos0, QtGui.QTextCursor.MoveAnchor)
# Apply the changes.
self.qteWidget.setTextCursor(tc)
|
[
"def",
"reverseCommit",
"(",
"self",
")",
":",
"# Get the text cursor for the current document.",
"tc",
"=",
"self",
".",
"qteWidget",
".",
"textCursor",
"(",
")",
"# Mark the previously inserted text and remove it.",
"tc",
".",
"setPosition",
"(",
"self",
".",
"cursorPos0",
",",
"QtGui",
".",
"QTextCursor",
".",
"MoveAnchor",
")",
"tc",
".",
"setPosition",
"(",
"self",
".",
"cursorPos1",
",",
"QtGui",
".",
"QTextCursor",
".",
"KeepAnchor",
")",
"tc",
".",
"removeSelectedText",
"(",
")",
"tc",
".",
"setPosition",
"(",
"self",
".",
"cursorPos0",
",",
"QtGui",
".",
"QTextCursor",
".",
"MoveAnchor",
")",
"# Apply the changes.",
"self",
".",
"qteWidget",
".",
"setTextCursor",
"(",
"tc",
")"
] | 37.266667 | 16.733333 |
def ConsultarTiposCategoriaEmisor(self, sep="||"):
"Obtener el código y descripción para tipos de categorías de emisor"
ret = self.client.consultarTiposCategoriaEmisor(
authRequest={
'token': self.Token, 'sign': self.Sign,
'cuitRepresentada': self.Cuit, },
)['consultarCategoriasEmisorReturn']
self.__analizar_errores(ret)
array = ret.get('arrayCategoriasEmisor', [])
lista = [it['codigoDescripcionString'] for it in array]
return [(u"%s {codigo} %s {descripcion} %s" % (sep, sep, sep)).format(**it) if sep else it for it in lista]
|
[
"def",
"ConsultarTiposCategoriaEmisor",
"(",
"self",
",",
"sep",
"=",
"\"||\"",
")",
":",
"ret",
"=",
"self",
".",
"client",
".",
"consultarTiposCategoriaEmisor",
"(",
"authRequest",
"=",
"{",
"'token'",
":",
"self",
".",
"Token",
",",
"'sign'",
":",
"self",
".",
"Sign",
",",
"'cuitRepresentada'",
":",
"self",
".",
"Cuit",
",",
"}",
",",
")",
"[",
"'consultarCategoriasEmisorReturn'",
"]",
"self",
".",
"__analizar_errores",
"(",
"ret",
")",
"array",
"=",
"ret",
".",
"get",
"(",
"'arrayCategoriasEmisor'",
",",
"[",
"]",
")",
"lista",
"=",
"[",
"it",
"[",
"'codigoDescripcionString'",
"]",
"for",
"it",
"in",
"array",
"]",
"return",
"[",
"(",
"u\"%s {codigo} %s {descripcion} %s\"",
"%",
"(",
"sep",
",",
"sep",
",",
"sep",
")",
")",
".",
"format",
"(",
"*",
"*",
"it",
")",
"if",
"sep",
"else",
"it",
"for",
"it",
"in",
"lista",
"]"
] | 63 | 23.727273 |
def _create_base_string(method, base, params):
"""
Returns base string for HMAC-SHA1 signature as specified in:
http://oauth.net/core/1.0a/#rfc.section.9.1.3.
"""
normalized_qs = _normalize_params(params)
return _join_by_ampersand(method, base, normalized_qs)
|
[
"def",
"_create_base_string",
"(",
"method",
",",
"base",
",",
"params",
")",
":",
"normalized_qs",
"=",
"_normalize_params",
"(",
"params",
")",
"return",
"_join_by_ampersand",
"(",
"method",
",",
"base",
",",
"normalized_qs",
")"
] | 34.625 | 12.875 |
def state_definition_to_dict(state_definition: GeneralState) -> AccountState:
"""Convert a state definition to the canonical dict form.
State can either be defined in the canonical form, or as a list of sub states that are then
merged to one. Sub states can either be given as dictionaries themselves, or as tuples where
the last element is the value and all others the keys for this value in the nested state
dictionary. Example:
```
[
("0xaabb", "balance", 3),
("0xaabb", "storage", {
4: 5,
}),
"0xbbcc", {
"balance": 6,
"nonce": 7
}
]
```
"""
if isinstance(state_definition, Mapping):
state_dict = state_definition
elif isinstance(state_definition, Iterable):
state_dicts = [
assoc_in(
{},
state_item[:-1],
state_item[-1]
) if not isinstance(state_item, Mapping) else state_item
for state_item
in state_definition
]
if not is_cleanly_mergable(*state_dicts):
raise ValidationError("Some state item is defined multiple times")
state_dict = deep_merge(*state_dicts)
else:
assert TypeError("State definition must either be a mapping or a sequence")
seen_keys = set(concat(d.keys() for d in state_dict.values()))
bad_keys = seen_keys - set(["balance", "nonce", "storage", "code"])
if bad_keys:
raise ValidationError(
"State definition contains the following invalid account fields: {}".format(
", ".join(bad_keys)
)
)
return state_dict
|
[
"def",
"state_definition_to_dict",
"(",
"state_definition",
":",
"GeneralState",
")",
"->",
"AccountState",
":",
"if",
"isinstance",
"(",
"state_definition",
",",
"Mapping",
")",
":",
"state_dict",
"=",
"state_definition",
"elif",
"isinstance",
"(",
"state_definition",
",",
"Iterable",
")",
":",
"state_dicts",
"=",
"[",
"assoc_in",
"(",
"{",
"}",
",",
"state_item",
"[",
":",
"-",
"1",
"]",
",",
"state_item",
"[",
"-",
"1",
"]",
")",
"if",
"not",
"isinstance",
"(",
"state_item",
",",
"Mapping",
")",
"else",
"state_item",
"for",
"state_item",
"in",
"state_definition",
"]",
"if",
"not",
"is_cleanly_mergable",
"(",
"*",
"state_dicts",
")",
":",
"raise",
"ValidationError",
"(",
"\"Some state item is defined multiple times\"",
")",
"state_dict",
"=",
"deep_merge",
"(",
"*",
"state_dicts",
")",
"else",
":",
"assert",
"TypeError",
"(",
"\"State definition must either be a mapping or a sequence\"",
")",
"seen_keys",
"=",
"set",
"(",
"concat",
"(",
"d",
".",
"keys",
"(",
")",
"for",
"d",
"in",
"state_dict",
".",
"values",
"(",
")",
")",
")",
"bad_keys",
"=",
"seen_keys",
"-",
"set",
"(",
"[",
"\"balance\"",
",",
"\"nonce\"",
",",
"\"storage\"",
",",
"\"code\"",
"]",
")",
"if",
"bad_keys",
":",
"raise",
"ValidationError",
"(",
"\"State definition contains the following invalid account fields: {}\"",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"bad_keys",
")",
")",
")",
"return",
"state_dict"
] | 34.387755 | 23.367347 |
def get_file(cls, filename=None):
"""
Load settings from an rtv configuration file.
"""
if filename is None:
filename = CONFIG
config = configparser.ConfigParser()
if os.path.exists(filename):
with codecs.open(filename, encoding='utf-8') as fp:
config.readfp(fp)
return cls._parse_rtv_file(config)
|
[
"def",
"get_file",
"(",
"cls",
",",
"filename",
"=",
"None",
")",
":",
"if",
"filename",
"is",
"None",
":",
"filename",
"=",
"CONFIG",
"config",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"filename",
")",
":",
"with",
"codecs",
".",
"open",
"(",
"filename",
",",
"encoding",
"=",
"'utf-8'",
")",
"as",
"fp",
":",
"config",
".",
"readfp",
"(",
"fp",
")",
"return",
"cls",
".",
"_parse_rtv_file",
"(",
"config",
")"
] | 27.357143 | 14.5 |
def _AtNonLeaf(self, attr_value, path):
"""Called when at a non-leaf value. Should recurse and yield values."""
try:
if isinstance(attr_value, collections.Mapping):
# If it's dictionary-like, treat the dict key as the attribute..
sub_obj = attr_value.get(path[1])
if len(path) > 2:
# Expand any additional elements underneath the key.
sub_obj = self.Expand(sub_obj, path[2:])
if isinstance(sub_obj, string_types):
# If it is a string, stop here
yield sub_obj
elif isinstance(sub_obj, collections.Mapping):
# If the result is a dict, return each key/value pair as a new dict.
for k, v in iteritems(sub_obj):
yield {k: v}
else:
for value in sub_obj:
yield value
else:
# If it's an iterable, we recurse on each value.
for sub_obj in attr_value:
for value in self.Expand(sub_obj, path[1:]):
yield value
except TypeError: # This is then not iterable, we recurse with the value
for value in self.Expand(attr_value, path[1:]):
yield value
|
[
"def",
"_AtNonLeaf",
"(",
"self",
",",
"attr_value",
",",
"path",
")",
":",
"try",
":",
"if",
"isinstance",
"(",
"attr_value",
",",
"collections",
".",
"Mapping",
")",
":",
"# If it's dictionary-like, treat the dict key as the attribute..",
"sub_obj",
"=",
"attr_value",
".",
"get",
"(",
"path",
"[",
"1",
"]",
")",
"if",
"len",
"(",
"path",
")",
">",
"2",
":",
"# Expand any additional elements underneath the key.",
"sub_obj",
"=",
"self",
".",
"Expand",
"(",
"sub_obj",
",",
"path",
"[",
"2",
":",
"]",
")",
"if",
"isinstance",
"(",
"sub_obj",
",",
"string_types",
")",
":",
"# If it is a string, stop here",
"yield",
"sub_obj",
"elif",
"isinstance",
"(",
"sub_obj",
",",
"collections",
".",
"Mapping",
")",
":",
"# If the result is a dict, return each key/value pair as a new dict.",
"for",
"k",
",",
"v",
"in",
"iteritems",
"(",
"sub_obj",
")",
":",
"yield",
"{",
"k",
":",
"v",
"}",
"else",
":",
"for",
"value",
"in",
"sub_obj",
":",
"yield",
"value",
"else",
":",
"# If it's an iterable, we recurse on each value.",
"for",
"sub_obj",
"in",
"attr_value",
":",
"for",
"value",
"in",
"self",
".",
"Expand",
"(",
"sub_obj",
",",
"path",
"[",
"1",
":",
"]",
")",
":",
"yield",
"value",
"except",
"TypeError",
":",
"# This is then not iterable, we recurse with the value",
"for",
"value",
"in",
"self",
".",
"Expand",
"(",
"attr_value",
",",
"path",
"[",
"1",
":",
"]",
")",
":",
"yield",
"value"
] | 41.62963 | 15.666667 |
def get_elemental_abunds(self,cycle,index=None):
"""
returns the elemental abundances for one cycle, either
for the whole star or a specific zone depending upon
the value of 'index'.
Parameters
----------
cycle : string or integer
Model to get the abundances for.
index : integer or list, optional
zone number for which to get elemental abundances. If
None the entire abundance profile is returned. If a 1x2
list, the abundances are returned between indices of
index[0] and index[1].
The default is None.
"""
isoabunds=self.se.get(cycle,'iso_massf')
A=array(self.se.A)
Z=array(self.se.Z)
names=self.se.isos
Zuq=list(set(Z)) # list of unique Zs
Zuq.sort()
if index==None:
index=[0,len(isoabunds)]
if type(index)==list:
elemabunds=[]
for zone in range(index[0],index[1]):
percent=int((zone-index[0])*100./(index[1]-index[0]))
sys.stdout.flush()
sys.stdout.write("\rgetting elemental abundances " + "...%d%%" % percent)
elemabunds.append([sum(isoabunds[zone][where(Z==iZ)]) for iZ in Zuq])
else:
elemabunds=[sum(isoabunds[index][where(Z==iZ)]) for iZ in Zuq]
return elemabunds
|
[
"def",
"get_elemental_abunds",
"(",
"self",
",",
"cycle",
",",
"index",
"=",
"None",
")",
":",
"isoabunds",
"=",
"self",
".",
"se",
".",
"get",
"(",
"cycle",
",",
"'iso_massf'",
")",
"A",
"=",
"array",
"(",
"self",
".",
"se",
".",
"A",
")",
"Z",
"=",
"array",
"(",
"self",
".",
"se",
".",
"Z",
")",
"names",
"=",
"self",
".",
"se",
".",
"isos",
"Zuq",
"=",
"list",
"(",
"set",
"(",
"Z",
")",
")",
"# list of unique Zs",
"Zuq",
".",
"sort",
"(",
")",
"if",
"index",
"==",
"None",
":",
"index",
"=",
"[",
"0",
",",
"len",
"(",
"isoabunds",
")",
"]",
"if",
"type",
"(",
"index",
")",
"==",
"list",
":",
"elemabunds",
"=",
"[",
"]",
"for",
"zone",
"in",
"range",
"(",
"index",
"[",
"0",
"]",
",",
"index",
"[",
"1",
"]",
")",
":",
"percent",
"=",
"int",
"(",
"(",
"zone",
"-",
"index",
"[",
"0",
"]",
")",
"*",
"100.",
"/",
"(",
"index",
"[",
"1",
"]",
"-",
"index",
"[",
"0",
"]",
")",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"\"\\rgetting elemental abundances \"",
"+",
"\"...%d%%\"",
"%",
"percent",
")",
"elemabunds",
".",
"append",
"(",
"[",
"sum",
"(",
"isoabunds",
"[",
"zone",
"]",
"[",
"where",
"(",
"Z",
"==",
"iZ",
")",
"]",
")",
"for",
"iZ",
"in",
"Zuq",
"]",
")",
"else",
":",
"elemabunds",
"=",
"[",
"sum",
"(",
"isoabunds",
"[",
"index",
"]",
"[",
"where",
"(",
"Z",
"==",
"iZ",
")",
"]",
")",
"for",
"iZ",
"in",
"Zuq",
"]",
"return",
"elemabunds"
] | 34.4 | 19.6 |
def p_debugger_statement(self, p):
"""debugger_statement : DEBUGGER SEMI
| DEBUGGER AUTOSEMI
"""
p[0] = self.asttypes.Debugger(p[1])
p[0].setpos(p)
|
[
"def",
"p_debugger_statement",
"(",
"self",
",",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"self",
".",
"asttypes",
".",
"Debugger",
"(",
"p",
"[",
"1",
"]",
")",
"p",
"[",
"0",
"]",
".",
"setpos",
"(",
"p",
")"
] | 34 | 6 |
def psql(self, args):
r"""Invoke psql, passing the given command-line arguments.
Typical <args> values: ['-c', <sql_string>] or ['-f', <pathname>].
Connection parameters are taken from self. STDIN, STDOUT,
and STDERR are inherited from the parent.
WARNING: This method uses the psql(1) program, which ignores SQL
errors
by default. That hides many real errors, making our software less
reliable. To overcome this flaw, add this line to the head of your
SQL:
"\set ON_ERROR_STOP TRUE"
@return: None. Raises an exception upon error, but *ignores SQL
errors* unless "\set ON_ERROR_STOP TRUE" is used.
"""
argv = [
PostgresFinder.find_root() / 'psql',
'--quiet',
'-U', self.user,
'-h', self.host,
'-p', self.port,
] + args + [self.db_name]
subprocess.check_call(argv)
|
[
"def",
"psql",
"(",
"self",
",",
"args",
")",
":",
"argv",
"=",
"[",
"PostgresFinder",
".",
"find_root",
"(",
")",
"/",
"'psql'",
",",
"'--quiet'",
",",
"'-U'",
",",
"self",
".",
"user",
",",
"'-h'",
",",
"self",
".",
"host",
",",
"'-p'",
",",
"self",
".",
"port",
",",
"]",
"+",
"args",
"+",
"[",
"self",
".",
"db_name",
"]",
"subprocess",
".",
"check_call",
"(",
"argv",
")"
] | 36.038462 | 21.230769 |
def write_output(self, data, args=None, filename=None, label=None):
"""Write log data to a log file"""
if args:
if not args.outlog:
return 0
if not filename: filename=args.outlog
lastpath = ''
with open(str(filename), 'w') as output_file:
for entry in data['entries']:
if args.label:
if entry['source_path'] == lastpath:
output_file.write(entry['raw_text'] + '\n')
elif args.label == 'fname':
output_file.write('======== ' + \
entry['source_path'].split('/')[-1] + \
' >>>>\n' + entry['raw_text'] + '\n')
elif args.label == 'fpath':
output_file.write('======== ' + \
entry['source_path'] + \
' >>>>\n' + entry['raw_text'] + '\n')
else: output_file.write(entry['raw_text'] + '\n')
lastpath = entry['source_path']
|
[
"def",
"write_output",
"(",
"self",
",",
"data",
",",
"args",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"label",
"=",
"None",
")",
":",
"if",
"args",
":",
"if",
"not",
"args",
".",
"outlog",
":",
"return",
"0",
"if",
"not",
"filename",
":",
"filename",
"=",
"args",
".",
"outlog",
"lastpath",
"=",
"''",
"with",
"open",
"(",
"str",
"(",
"filename",
")",
",",
"'w'",
")",
"as",
"output_file",
":",
"for",
"entry",
"in",
"data",
"[",
"'entries'",
"]",
":",
"if",
"args",
".",
"label",
":",
"if",
"entry",
"[",
"'source_path'",
"]",
"==",
"lastpath",
":",
"output_file",
".",
"write",
"(",
"entry",
"[",
"'raw_text'",
"]",
"+",
"'\\n'",
")",
"elif",
"args",
".",
"label",
"==",
"'fname'",
":",
"output_file",
".",
"write",
"(",
"'======== '",
"+",
"entry",
"[",
"'source_path'",
"]",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"+",
"' >>>>\\n'",
"+",
"entry",
"[",
"'raw_text'",
"]",
"+",
"'\\n'",
")",
"elif",
"args",
".",
"label",
"==",
"'fpath'",
":",
"output_file",
".",
"write",
"(",
"'======== '",
"+",
"entry",
"[",
"'source_path'",
"]",
"+",
"' >>>>\\n'",
"+",
"entry",
"[",
"'raw_text'",
"]",
"+",
"'\\n'",
")",
"else",
":",
"output_file",
".",
"write",
"(",
"entry",
"[",
"'raw_text'",
"]",
"+",
"'\\n'",
")",
"lastpath",
"=",
"entry",
"[",
"'source_path'",
"]"
] | 49.045455 | 16.045455 |
def _check_default(value, parameter, default_chars):
'''Returns the default if the value is "empty"'''
# not using a set here because it fails when value is unhashable
if value in default_chars:
if parameter.default is inspect.Parameter.empty:
raise ValueError('Value was empty, but no default value is given in view function for parameter: {} ({})'.format(parameter.position, parameter.name))
return parameter.default
return value
|
[
"def",
"_check_default",
"(",
"value",
",",
"parameter",
",",
"default_chars",
")",
":",
"# not using a set here because it fails when value is unhashable",
"if",
"value",
"in",
"default_chars",
":",
"if",
"parameter",
".",
"default",
"is",
"inspect",
".",
"Parameter",
".",
"empty",
":",
"raise",
"ValueError",
"(",
"'Value was empty, but no default value is given in view function for parameter: {} ({})'",
".",
"format",
"(",
"parameter",
".",
"position",
",",
"parameter",
".",
"name",
")",
")",
"return",
"parameter",
".",
"default",
"return",
"value"
] | 58.5 | 29 |
def update(self, value, *args, **kwargs):
"""
Call this function to inform that an update is available.
This function does NOT call finish when value == maximum.
:param value: The current index/position of the action. (Should be, but must not be, in the range [min, max])
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on
"""
log.debug('update(value={value}, args={args}, kwargs={kwargs})'.format(value=value, args=args, kwargs=kwargs))
self.on_update(value, *args, **kwargs)
|
[
"def",
"update",
"(",
"self",
",",
"value",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"log",
".",
"debug",
"(",
"'update(value={value}, args={args}, kwargs={kwargs})'",
".",
"format",
"(",
"value",
"=",
"value",
",",
"args",
"=",
"args",
",",
"kwargs",
"=",
"kwargs",
")",
")",
"self",
".",
"on_update",
"(",
"value",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 58.9 | 24.7 |
def master_primary_name(self) -> Optional[str]:
"""
Return the name of the primary node of the master instance
"""
master_primary_name = self.master_replica.primaryName
if master_primary_name:
return self.master_replica.getNodeName(master_primary_name)
return None
|
[
"def",
"master_primary_name",
"(",
"self",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"master_primary_name",
"=",
"self",
".",
"master_replica",
".",
"primaryName",
"if",
"master_primary_name",
":",
"return",
"self",
".",
"master_replica",
".",
"getNodeName",
"(",
"master_primary_name",
")",
"return",
"None"
] | 35.222222 | 17.222222 |
def ellipse(self, x, y, width, height, color):
"""
See the Processing function ellipse():
https://processing.org/reference/ellipse_.html
"""
self.context.set_source_rgb(*color)
self.context.save()
self.context.translate(self.tx(x + (width / 2.0)), self.ty(y + (height / 2.0)))
self.context.scale(self.tx(width / 2.0), self.ty(height / 2.0))
self.context.arc(0.0, 0.0, 1.0, 0.0, 2 * math.pi)
self.context.fill()
self.context.restore()
|
[
"def",
"ellipse",
"(",
"self",
",",
"x",
",",
"y",
",",
"width",
",",
"height",
",",
"color",
")",
":",
"self",
".",
"context",
".",
"set_source_rgb",
"(",
"*",
"color",
")",
"self",
".",
"context",
".",
"save",
"(",
")",
"self",
".",
"context",
".",
"translate",
"(",
"self",
".",
"tx",
"(",
"x",
"+",
"(",
"width",
"/",
"2.0",
")",
")",
",",
"self",
".",
"ty",
"(",
"y",
"+",
"(",
"height",
"/",
"2.0",
")",
")",
")",
"self",
".",
"context",
".",
"scale",
"(",
"self",
".",
"tx",
"(",
"width",
"/",
"2.0",
")",
",",
"self",
".",
"ty",
"(",
"height",
"/",
"2.0",
")",
")",
"self",
".",
"context",
".",
"arc",
"(",
"0.0",
",",
"0.0",
",",
"1.0",
",",
"0.0",
",",
"2",
"*",
"math",
".",
"pi",
")",
"self",
".",
"context",
".",
"fill",
"(",
")",
"self",
".",
"context",
".",
"restore",
"(",
")"
] | 42.5 | 13.333333 |
def validate(self, data, schema, **kwargs):
"""Validate data using schema with ``JSONResolver``."""
if not isinstance(schema, dict):
schema = {'$ref': schema}
return validate(
data,
schema,
resolver=self.ref_resolver_cls.from_schema(schema),
types=self.app.config.get('RECORDS_VALIDATION_TYPES', {}),
**kwargs
)
|
[
"def",
"validate",
"(",
"self",
",",
"data",
",",
"schema",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"isinstance",
"(",
"schema",
",",
"dict",
")",
":",
"schema",
"=",
"{",
"'$ref'",
":",
"schema",
"}",
"return",
"validate",
"(",
"data",
",",
"schema",
",",
"resolver",
"=",
"self",
".",
"ref_resolver_cls",
".",
"from_schema",
"(",
"schema",
")",
",",
"types",
"=",
"self",
".",
"app",
".",
"config",
".",
"get",
"(",
"'RECORDS_VALIDATION_TYPES'",
",",
"{",
"}",
")",
",",
"*",
"*",
"kwargs",
")"
] | 36.818182 | 15.454545 |
def user_admin_view(model, login_view="Login", template_dir=None):
"""
:param UserStruct: The User model structure containing other classes
:param login_view: The login view interface
:param template_dir: The directory containing the view pages
:return: UserAdmin
Doc:
User Admin is a view that allows you to admin users.
You must create a Pylot view called `UserAdmin` to activate it
UserAdmin = app.views.user_admin(User, Login)
class UserAdmin(UserAdmin, Pylot):
pass
The user admin create some global available vars under '__.user_admin'
It's also best to add some security access on it
class UserAdmin(UserAdmin, Pylot):
decorators = [login_required]
You can customize the user info page (::get) by creating the directory in your
templates dir, and include the get.html inside of it
ie:
>/admin/templates/UserAdmin/get.html
<div>
{% include "Pylot/UserAdmin/get.html" %}
<div>
<div>Hello {{ __.user_admin.user.name }}<div>
"""
Pylot.context_(COMPONENT_USER_ADMIN=True)
User = model.UserStruct.User
LoginView = login_view
if not template_dir:
template_dir = "Pylot/UserAdmin"
template_page = template_dir + "/%s.html"
class UserAdmin(object):
route_base = "user-admin"
@classmethod
def _options(cls):
return {
"user_role": [("Rol", "Role")], #[(role, role) for i, role in enumerate(.all_roles)],
"user_status": [("Sta", "Stat")] #[(status, status) for i, status in enumerate(User.all_status)]
}
@classmethod
def search_handler(cls, per_page=20):
"""
To initiate a search
"""
page = request.args.get("page", 1)
show_deleted = True if request.args.get("show-deleted") else False
name = request.args.get("name")
email = request.args.get("email")
users = User.all(include_deleted=show_deleted)
users = users.order_by(User.name.asc())
if name:
users = users.filter(User.name.contains(name))
if email:
users = users.filter(User.email.contains(email))
users = users.paginate(page=page, per_page=per_page)
cls.__(user_admin=dict(
options=cls._options(),
users=users,
search_query={
"excluded_deleted": request.args.get("show-deleted"),
"role": request.args.get("role"),
"status": request.args.get("status"),
"name": request.args.get("name"),
"email": request.args.get("email")
}
))
return users
@classmethod
def get_user_handler(cls, id):
"""
Get a user
"""
user = User.get(id, include_deleted=True)
if not user:
abort(404, "User doesn't exist")
cls.__(user_admin=dict(user=user, options=cls._options()))
return user
def index(self):
self.search_handler()
return self.render(view_template=template_page % "index")
def get(self, id):
self.get_user_handler(id)
return self.render(view_template=template_page % "get")
def post(self):
try:
id = request.form.get("id")
user = User.get(id, include_deleted=True)
if not user:
self.error_("Can't change user info. Invalid user")
return redirect(url_for("UserAdmin:index"))
delete_entry = True if request.form.get("delete-entry") else False
if delete_entry:
user.update(status=user.STATUS_SUSPENDED)
user.delete()
self.success_("User DELETED Successfully!")
return redirect(url_for("UserAdmin:get", id=id))
email = request.form.get("email")
password = request.form.get("password")
password2 = request.form.get("password2")
name = request.form.get("name")
role = request.form.get("user_role")
status = request.form.get("user_status")
upd = {}
if email and email != user.email:
LoginView.change_login_handler(user_context=user)
if password and password2:
LoginView.change_password_handler(user_context=user)
if name != user.name:
upd.update({"name": name})
if role and role != user.role:
upd.update({"role": role})
if status and status != user.status:
if user.is_deleted and status == user.STATUS_ACTIVE:
user.delete(False)
upd.update({"status": status})
if upd:
user.update(**upd)
self.success_("User's Info updated successfully!")
except Exception as ex:
self.error_("Error: %s " % ex.message)
return redirect(url_for("UserAdmin:get", id=id))
@route("reset-password", methods=["POST"])
def reset_password(self):
try:
id = request.form.get("id")
user = User.get(id)
if not user:
self.error_("Can't reset password. Invalid user")
return redirect(url_for("User:index"))
password = LoginView.reset_password_handler(user_context=user)
self.success_("User's password reset successfully!")
except Exception as ex:
self.error_("Error: %s " % ex.message)
return redirect(url_for("UserAdmin:get", id=id))
@route("create", methods=["POST"])
def create(self):
try:
account = LoginView.signup_handler()
account.set_role(request.form.get("role", "USER"))
self.success_("User created successfully!")
return redirect(url_for("UserAdmin:get", id=account.id))
except Exception as ex:
self.error_("Error: %s" % ex.message)
return redirect(url_for("UserAdmin:index"))
return UserAdmin
|
[
"def",
"user_admin_view",
"(",
"model",
",",
"login_view",
"=",
"\"Login\"",
",",
"template_dir",
"=",
"None",
")",
":",
"Pylot",
".",
"context_",
"(",
"COMPONENT_USER_ADMIN",
"=",
"True",
")",
"User",
"=",
"model",
".",
"UserStruct",
".",
"User",
"LoginView",
"=",
"login_view",
"if",
"not",
"template_dir",
":",
"template_dir",
"=",
"\"Pylot/UserAdmin\"",
"template_page",
"=",
"template_dir",
"+",
"\"/%s.html\"",
"class",
"UserAdmin",
"(",
"object",
")",
":",
"route_base",
"=",
"\"user-admin\"",
"@",
"classmethod",
"def",
"_options",
"(",
"cls",
")",
":",
"return",
"{",
"\"user_role\"",
":",
"[",
"(",
"\"Rol\"",
",",
"\"Role\"",
")",
"]",
",",
"#[(role, role) for i, role in enumerate(.all_roles)],",
"\"user_status\"",
":",
"[",
"(",
"\"Sta\"",
",",
"\"Stat\"",
")",
"]",
"#[(status, status) for i, status in enumerate(User.all_status)]",
"}",
"@",
"classmethod",
"def",
"search_handler",
"(",
"cls",
",",
"per_page",
"=",
"20",
")",
":",
"\"\"\"\n To initiate a search\n \"\"\"",
"page",
"=",
"request",
".",
"args",
".",
"get",
"(",
"\"page\"",
",",
"1",
")",
"show_deleted",
"=",
"True",
"if",
"request",
".",
"args",
".",
"get",
"(",
"\"show-deleted\"",
")",
"else",
"False",
"name",
"=",
"request",
".",
"args",
".",
"get",
"(",
"\"name\"",
")",
"email",
"=",
"request",
".",
"args",
".",
"get",
"(",
"\"email\"",
")",
"users",
"=",
"User",
".",
"all",
"(",
"include_deleted",
"=",
"show_deleted",
")",
"users",
"=",
"users",
".",
"order_by",
"(",
"User",
".",
"name",
".",
"asc",
"(",
")",
")",
"if",
"name",
":",
"users",
"=",
"users",
".",
"filter",
"(",
"User",
".",
"name",
".",
"contains",
"(",
"name",
")",
")",
"if",
"email",
":",
"users",
"=",
"users",
".",
"filter",
"(",
"User",
".",
"email",
".",
"contains",
"(",
"email",
")",
")",
"users",
"=",
"users",
".",
"paginate",
"(",
"page",
"=",
"page",
",",
"per_page",
"=",
"per_page",
")",
"cls",
".",
"__",
"(",
"user_admin",
"=",
"dict",
"(",
"options",
"=",
"cls",
".",
"_options",
"(",
")",
",",
"users",
"=",
"users",
",",
"search_query",
"=",
"{",
"\"excluded_deleted\"",
":",
"request",
".",
"args",
".",
"get",
"(",
"\"show-deleted\"",
")",
",",
"\"role\"",
":",
"request",
".",
"args",
".",
"get",
"(",
"\"role\"",
")",
",",
"\"status\"",
":",
"request",
".",
"args",
".",
"get",
"(",
"\"status\"",
")",
",",
"\"name\"",
":",
"request",
".",
"args",
".",
"get",
"(",
"\"name\"",
")",
",",
"\"email\"",
":",
"request",
".",
"args",
".",
"get",
"(",
"\"email\"",
")",
"}",
")",
")",
"return",
"users",
"@",
"classmethod",
"def",
"get_user_handler",
"(",
"cls",
",",
"id",
")",
":",
"\"\"\"\n Get a user\n \"\"\"",
"user",
"=",
"User",
".",
"get",
"(",
"id",
",",
"include_deleted",
"=",
"True",
")",
"if",
"not",
"user",
":",
"abort",
"(",
"404",
",",
"\"User doesn't exist\"",
")",
"cls",
".",
"__",
"(",
"user_admin",
"=",
"dict",
"(",
"user",
"=",
"user",
",",
"options",
"=",
"cls",
".",
"_options",
"(",
")",
")",
")",
"return",
"user",
"def",
"index",
"(",
"self",
")",
":",
"self",
".",
"search_handler",
"(",
")",
"return",
"self",
".",
"render",
"(",
"view_template",
"=",
"template_page",
"%",
"\"index\"",
")",
"def",
"get",
"(",
"self",
",",
"id",
")",
":",
"self",
".",
"get_user_handler",
"(",
"id",
")",
"return",
"self",
".",
"render",
"(",
"view_template",
"=",
"template_page",
"%",
"\"get\"",
")",
"def",
"post",
"(",
"self",
")",
":",
"try",
":",
"id",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"id\"",
")",
"user",
"=",
"User",
".",
"get",
"(",
"id",
",",
"include_deleted",
"=",
"True",
")",
"if",
"not",
"user",
":",
"self",
".",
"error_",
"(",
"\"Can't change user info. Invalid user\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"UserAdmin:index\"",
")",
")",
"delete_entry",
"=",
"True",
"if",
"request",
".",
"form",
".",
"get",
"(",
"\"delete-entry\"",
")",
"else",
"False",
"if",
"delete_entry",
":",
"user",
".",
"update",
"(",
"status",
"=",
"user",
".",
"STATUS_SUSPENDED",
")",
"user",
".",
"delete",
"(",
")",
"self",
".",
"success_",
"(",
"\"User DELETED Successfully!\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"UserAdmin:get\"",
",",
"id",
"=",
"id",
")",
")",
"email",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"email\"",
")",
"password",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password\"",
")",
"password2",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"password2\"",
")",
"name",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"name\"",
")",
"role",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"user_role\"",
")",
"status",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"user_status\"",
")",
"upd",
"=",
"{",
"}",
"if",
"email",
"and",
"email",
"!=",
"user",
".",
"email",
":",
"LoginView",
".",
"change_login_handler",
"(",
"user_context",
"=",
"user",
")",
"if",
"password",
"and",
"password2",
":",
"LoginView",
".",
"change_password_handler",
"(",
"user_context",
"=",
"user",
")",
"if",
"name",
"!=",
"user",
".",
"name",
":",
"upd",
".",
"update",
"(",
"{",
"\"name\"",
":",
"name",
"}",
")",
"if",
"role",
"and",
"role",
"!=",
"user",
".",
"role",
":",
"upd",
".",
"update",
"(",
"{",
"\"role\"",
":",
"role",
"}",
")",
"if",
"status",
"and",
"status",
"!=",
"user",
".",
"status",
":",
"if",
"user",
".",
"is_deleted",
"and",
"status",
"==",
"user",
".",
"STATUS_ACTIVE",
":",
"user",
".",
"delete",
"(",
"False",
")",
"upd",
".",
"update",
"(",
"{",
"\"status\"",
":",
"status",
"}",
")",
"if",
"upd",
":",
"user",
".",
"update",
"(",
"*",
"*",
"upd",
")",
"self",
".",
"success_",
"(",
"\"User's Info updated successfully!\"",
")",
"except",
"Exception",
"as",
"ex",
":",
"self",
".",
"error_",
"(",
"\"Error: %s \"",
"%",
"ex",
".",
"message",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"UserAdmin:get\"",
",",
"id",
"=",
"id",
")",
")",
"@",
"route",
"(",
"\"reset-password\"",
",",
"methods",
"=",
"[",
"\"POST\"",
"]",
")",
"def",
"reset_password",
"(",
"self",
")",
":",
"try",
":",
"id",
"=",
"request",
".",
"form",
".",
"get",
"(",
"\"id\"",
")",
"user",
"=",
"User",
".",
"get",
"(",
"id",
")",
"if",
"not",
"user",
":",
"self",
".",
"error_",
"(",
"\"Can't reset password. Invalid user\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"User:index\"",
")",
")",
"password",
"=",
"LoginView",
".",
"reset_password_handler",
"(",
"user_context",
"=",
"user",
")",
"self",
".",
"success_",
"(",
"\"User's password reset successfully!\"",
")",
"except",
"Exception",
"as",
"ex",
":",
"self",
".",
"error_",
"(",
"\"Error: %s \"",
"%",
"ex",
".",
"message",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"UserAdmin:get\"",
",",
"id",
"=",
"id",
")",
")",
"@",
"route",
"(",
"\"create\"",
",",
"methods",
"=",
"[",
"\"POST\"",
"]",
")",
"def",
"create",
"(",
"self",
")",
":",
"try",
":",
"account",
"=",
"LoginView",
".",
"signup_handler",
"(",
")",
"account",
".",
"set_role",
"(",
"request",
".",
"form",
".",
"get",
"(",
"\"role\"",
",",
"\"USER\"",
")",
")",
"self",
".",
"success_",
"(",
"\"User created successfully!\"",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"UserAdmin:get\"",
",",
"id",
"=",
"account",
".",
"id",
")",
")",
"except",
"Exception",
"as",
"ex",
":",
"self",
".",
"error_",
"(",
"\"Error: %s\"",
"%",
"ex",
".",
"message",
")",
"return",
"redirect",
"(",
"url_for",
"(",
"\"UserAdmin:index\"",
")",
")",
"return",
"UserAdmin"
] | 36.251429 | 20.182857 |
def set_headers(self, headers):
"""Set headers"""
for (header, value) in headers.iteritems():
self.set_header(header, value)
|
[
"def",
"set_headers",
"(",
"self",
",",
"headers",
")",
":",
"for",
"(",
"header",
",",
"value",
")",
"in",
"headers",
".",
"iteritems",
"(",
")",
":",
"self",
".",
"set_header",
"(",
"header",
",",
"value",
")"
] | 37.25 | 5.5 |
def _get_post_data_to_create_dns_entry(self, rtype, name, content, identifier=None):
"""
Build and return the post date that is needed to create a DNS entry.
"""
is_update = identifier is not None
if is_update:
records = self._list_records_internal(identifier=identifier)
assert len(records) == 1, 'ID is not unique or does not exist'
record = records[0]
LOGGER.debug('Create post data to update record: %s', record)
data = {
'id': str(identifier) if is_update else '',
'action': 'save',
'name': name,
'type': rtype,
'content': content,
'prio': str(record['priority']) if is_update else '10',
'ttl': str(record['ttl']) if is_update else '360',
'commit': ''
}
ttl = self._get_lexicon_option('ttl')
if ttl and ttl > 360:
data['ttl'] = str(ttl)
prio = self._get_lexicon_option('priority')
if prio and prio > 0:
data['prio'] = str(prio)
return data
|
[
"def",
"_get_post_data_to_create_dns_entry",
"(",
"self",
",",
"rtype",
",",
"name",
",",
"content",
",",
"identifier",
"=",
"None",
")",
":",
"is_update",
"=",
"identifier",
"is",
"not",
"None",
"if",
"is_update",
":",
"records",
"=",
"self",
".",
"_list_records_internal",
"(",
"identifier",
"=",
"identifier",
")",
"assert",
"len",
"(",
"records",
")",
"==",
"1",
",",
"'ID is not unique or does not exist'",
"record",
"=",
"records",
"[",
"0",
"]",
"LOGGER",
".",
"debug",
"(",
"'Create post data to update record: %s'",
",",
"record",
")",
"data",
"=",
"{",
"'id'",
":",
"str",
"(",
"identifier",
")",
"if",
"is_update",
"else",
"''",
",",
"'action'",
":",
"'save'",
",",
"'name'",
":",
"name",
",",
"'type'",
":",
"rtype",
",",
"'content'",
":",
"content",
",",
"'prio'",
":",
"str",
"(",
"record",
"[",
"'priority'",
"]",
")",
"if",
"is_update",
"else",
"'10'",
",",
"'ttl'",
":",
"str",
"(",
"record",
"[",
"'ttl'",
"]",
")",
"if",
"is_update",
"else",
"'360'",
",",
"'commit'",
":",
"''",
"}",
"ttl",
"=",
"self",
".",
"_get_lexicon_option",
"(",
"'ttl'",
")",
"if",
"ttl",
"and",
"ttl",
">",
"360",
":",
"data",
"[",
"'ttl'",
"]",
"=",
"str",
"(",
"ttl",
")",
"prio",
"=",
"self",
".",
"_get_lexicon_option",
"(",
"'priority'",
")",
"if",
"prio",
"and",
"prio",
">",
"0",
":",
"data",
"[",
"'prio'",
"]",
"=",
"str",
"(",
"prio",
")",
"return",
"data"
] | 36.066667 | 19.4 |
def _process_field_queries(field_dictionary):
"""
We have a field_dictionary - we want to match the values for an elasticsearch "match" query
This is only potentially useful when trying to tune certain search operations
"""
def field_item(field):
""" format field match as "match" item for elasticsearch query """
return {
"match": {
field: field_dictionary[field]
}
}
return [field_item(field) for field in field_dictionary]
|
[
"def",
"_process_field_queries",
"(",
"field_dictionary",
")",
":",
"def",
"field_item",
"(",
"field",
")",
":",
"\"\"\" format field match as \"match\" item for elasticsearch query \"\"\"",
"return",
"{",
"\"match\"",
":",
"{",
"field",
":",
"field_dictionary",
"[",
"field",
"]",
"}",
"}",
"return",
"[",
"field_item",
"(",
"field",
")",
"for",
"field",
"in",
"field_dictionary",
"]"
] | 35.785714 | 20.071429 |
def locales(self, query=None):
"""Fetches all Locales from the Environment (up to the set limit, can be modified in `query`).
# TODO: fix url
API Reference: https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/assets/assets-collection/get-all-assets-of-a-space
:param query: (optional) Dict with API options.
:return: List of :class:`Locale <contentful.locale.Locale>` objects.
:rtype: List of contentful.locale.Locale
Usage:
>>> locales = client.locales()
[<Locale[English (United States)] code='en-US' default=True fallback_code=None optional=False>]
"""
if query is None:
query = {}
return self._get(
self.environment_url('/locales'),
query
)
|
[
"def",
"locales",
"(",
"self",
",",
"query",
"=",
"None",
")",
":",
"if",
"query",
"is",
"None",
":",
"query",
"=",
"{",
"}",
"return",
"self",
".",
"_get",
"(",
"self",
".",
"environment_url",
"(",
"'/locales'",
")",
",",
"query",
")"
] | 36.863636 | 27.636364 |
def _calc_min_width(self, table):
""" Calculate the minimum allowable width for a table """
width = len(table.name)
cap = table.consumed_capacity["__table__"]
width = max(width, 4 + len("%.1f/%d" % (cap["read"], table.read_throughput)))
width = max(width, 4 + len("%.1f/%d" % (cap["write"], table.write_throughput)))
for index_name, cap in iteritems(table.consumed_capacity):
if index_name == "__table__":
continue
index = table.global_indexes[index_name]
width = max(
width,
4 + len(index_name + "%.1f/%d" % (cap["read"], index.read_throughput)),
)
width = max(
width,
4
+ len(index_name + "%.1f/%d" % (cap["write"], index.write_throughput)),
)
return width
|
[
"def",
"_calc_min_width",
"(",
"self",
",",
"table",
")",
":",
"width",
"=",
"len",
"(",
"table",
".",
"name",
")",
"cap",
"=",
"table",
".",
"consumed_capacity",
"[",
"\"__table__\"",
"]",
"width",
"=",
"max",
"(",
"width",
",",
"4",
"+",
"len",
"(",
"\"%.1f/%d\"",
"%",
"(",
"cap",
"[",
"\"read\"",
"]",
",",
"table",
".",
"read_throughput",
")",
")",
")",
"width",
"=",
"max",
"(",
"width",
",",
"4",
"+",
"len",
"(",
"\"%.1f/%d\"",
"%",
"(",
"cap",
"[",
"\"write\"",
"]",
",",
"table",
".",
"write_throughput",
")",
")",
")",
"for",
"index_name",
",",
"cap",
"in",
"iteritems",
"(",
"table",
".",
"consumed_capacity",
")",
":",
"if",
"index_name",
"==",
"\"__table__\"",
":",
"continue",
"index",
"=",
"table",
".",
"global_indexes",
"[",
"index_name",
"]",
"width",
"=",
"max",
"(",
"width",
",",
"4",
"+",
"len",
"(",
"index_name",
"+",
"\"%.1f/%d\"",
"%",
"(",
"cap",
"[",
"\"read\"",
"]",
",",
"index",
".",
"read_throughput",
")",
")",
",",
")",
"width",
"=",
"max",
"(",
"width",
",",
"4",
"+",
"len",
"(",
"index_name",
"+",
"\"%.1f/%d\"",
"%",
"(",
"cap",
"[",
"\"write\"",
"]",
",",
"index",
".",
"write_throughput",
")",
")",
",",
")",
"return",
"width"
] | 43.15 | 21.6 |
def distance(self, x, y):
"""
Computes the Manhattan distance between vectors x and y. Returns float.
"""
if scipy.sparse.issparse(x):
return numpy.sum(numpy.absolute((x-y).toarray().ravel()))
else:
return numpy.sum(numpy.absolute(x-y))
|
[
"def",
"distance",
"(",
"self",
",",
"x",
",",
"y",
")",
":",
"if",
"scipy",
".",
"sparse",
".",
"issparse",
"(",
"x",
")",
":",
"return",
"numpy",
".",
"sum",
"(",
"numpy",
".",
"absolute",
"(",
"(",
"x",
"-",
"y",
")",
".",
"toarray",
"(",
")",
".",
"ravel",
"(",
")",
")",
")",
"else",
":",
"return",
"numpy",
".",
"sum",
"(",
"numpy",
".",
"absolute",
"(",
"x",
"-",
"y",
")",
")"
] | 36.625 | 15.375 |
def glob_in_parents(dir, patterns, upper_limit=None):
"""Recursive version of GLOB which glob sall parent directories
of dir until the first match is found. Returns an empty result if no match
is found"""
assert(isinstance(dir, str))
assert(isinstance(patterns, list))
result = []
absolute_dir = os.path.join(os.getcwd(), dir)
absolute_dir = os.path.normpath(absolute_dir)
while absolute_dir:
new_dir = os.path.split(absolute_dir)[0]
if new_dir == absolute_dir:
break
result = glob([new_dir], patterns)
if result:
break
absolute_dir = new_dir
return result
|
[
"def",
"glob_in_parents",
"(",
"dir",
",",
"patterns",
",",
"upper_limit",
"=",
"None",
")",
":",
"assert",
"(",
"isinstance",
"(",
"dir",
",",
"str",
")",
")",
"assert",
"(",
"isinstance",
"(",
"patterns",
",",
"list",
")",
")",
"result",
"=",
"[",
"]",
"absolute_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"dir",
")",
"absolute_dir",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"absolute_dir",
")",
"while",
"absolute_dir",
":",
"new_dir",
"=",
"os",
".",
"path",
".",
"split",
"(",
"absolute_dir",
")",
"[",
"0",
"]",
"if",
"new_dir",
"==",
"absolute_dir",
":",
"break",
"result",
"=",
"glob",
"(",
"[",
"new_dir",
"]",
",",
"patterns",
")",
"if",
"result",
":",
"break",
"absolute_dir",
"=",
"new_dir",
"return",
"result"
] | 29.227273 | 18.045455 |
def submit_all(self):
"""
:returns: an IterResult object
"""
for args in self.task_args:
self.submit(*args)
return self.get_results()
|
[
"def",
"submit_all",
"(",
"self",
")",
":",
"for",
"args",
"in",
"self",
".",
"task_args",
":",
"self",
".",
"submit",
"(",
"*",
"args",
")",
"return",
"self",
".",
"get_results",
"(",
")"
] | 25.571429 | 6.142857 |
def add_cmd_handler(self, handler_obj):
"""Registers a new command handler object.
All methods on `handler_obj` whose name starts with "cmd_" are
registered as a GTP command. For example, the method cmd_genmove will
be invoked when the engine receives a genmove command.
Args:
handler_obj: the handler object to register.
"""
for field in dir(handler_obj):
if field.startswith("cmd_"):
cmd = field[4:]
fn = getattr(handler_obj, field)
if cmd in self.cmds:
print('Replacing {} with {}'.format(
_handler_name(self.cmds[cmd]), _handler_name(fn)),
file=sys.stderr)
self.cmds[cmd] = fn
|
[
"def",
"add_cmd_handler",
"(",
"self",
",",
"handler_obj",
")",
":",
"for",
"field",
"in",
"dir",
"(",
"handler_obj",
")",
":",
"if",
"field",
".",
"startswith",
"(",
"\"cmd_\"",
")",
":",
"cmd",
"=",
"field",
"[",
"4",
":",
"]",
"fn",
"=",
"getattr",
"(",
"handler_obj",
",",
"field",
")",
"if",
"cmd",
"in",
"self",
".",
"cmds",
":",
"print",
"(",
"'Replacing {} with {}'",
".",
"format",
"(",
"_handler_name",
"(",
"self",
".",
"cmds",
"[",
"cmd",
"]",
")",
",",
"_handler_name",
"(",
"fn",
")",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"self",
".",
"cmds",
"[",
"cmd",
"]",
"=",
"fn"
] | 40.736842 | 15.210526 |
def auto_override(memb):
"""Decorator applicable to methods, classes or modules (by explicit call).
If applied on a module, memb must be a module or a module name contained in sys.modules.
See pytypes.set_global_auto_override_decorator to apply this on all modules.
Works like override decorator on type annotated methods that actually have a type
annotated parent method. Has no effect on methods that do not override anything.
In contrast to plain override decorator, auto_override can be applied easily on
every method in a class or module.
In contrast to explicit override decorator, auto_override is not suitable to detect
typos in spelling of a child method's name. It is only useful to assert compatibility
of type information (note that return type is contravariant).
Use pytypes.check_override_at_runtime and pytypes.check_override_at_class_definition_time
to control whether checks happen at class definition time or at "actual runtime".
"""
if type_util._check_as_func(memb):
return override(memb, True)
if isclass(memb):
return auto_override_class(memb)
if ismodule(memb):
return auto_override_module(memb, True)
if memb in sys.modules or memb in _pending_modules:
return auto_override_module(memb, True)
return memb
|
[
"def",
"auto_override",
"(",
"memb",
")",
":",
"if",
"type_util",
".",
"_check_as_func",
"(",
"memb",
")",
":",
"return",
"override",
"(",
"memb",
",",
"True",
")",
"if",
"isclass",
"(",
"memb",
")",
":",
"return",
"auto_override_class",
"(",
"memb",
")",
"if",
"ismodule",
"(",
"memb",
")",
":",
"return",
"auto_override_module",
"(",
"memb",
",",
"True",
")",
"if",
"memb",
"in",
"sys",
".",
"modules",
"or",
"memb",
"in",
"_pending_modules",
":",
"return",
"auto_override_module",
"(",
"memb",
",",
"True",
")",
"return",
"memb"
] | 56.956522 | 24.304348 |
def wait_until_gone(self, timeout=0, *args, **selectors):
"""
Wait for the object which has *selectors* within the given timeout.
Return true if the object *disappear* in the given timeout. Else return false.
"""
return self.device(**selectors).wait.gone(timeout=timeout)
|
[
"def",
"wait_until_gone",
"(",
"self",
",",
"timeout",
"=",
"0",
",",
"*",
"args",
",",
"*",
"*",
"selectors",
")",
":",
"return",
"self",
".",
"device",
"(",
"*",
"*",
"selectors",
")",
".",
"wait",
".",
"gone",
"(",
"timeout",
"=",
"timeout",
")"
] | 43.714286 | 23.428571 |
def reset_formatter(self):
"""Rebuild formatter for all handlers."""
for handler in self.handlers:
formatter = self.get_formatter(handler)
handler.setFormatter(formatter)
|
[
"def",
"reset_formatter",
"(",
"self",
")",
":",
"for",
"handler",
"in",
"self",
".",
"handlers",
":",
"formatter",
"=",
"self",
".",
"get_formatter",
"(",
"handler",
")",
"handler",
".",
"setFormatter",
"(",
"formatter",
")"
] | 41.2 | 6.2 |
def CacheStorage_requestCachedResponse(self, cacheId, requestURL):
"""
Function path: CacheStorage.requestCachedResponse
Domain: CacheStorage
Method name: requestCachedResponse
Parameters:
Required arguments:
'cacheId' (type: CacheId) -> Id of cache that contains the enty.
'requestURL' (type: string) -> URL spec of the request.
Returns:
'response' (type: CachedResponse) -> Response read from the cache.
Description: Fetches cache entry.
"""
assert isinstance(requestURL, (str,)
), "Argument 'requestURL' must be of type '['str']'. Received type: '%s'" % type(
requestURL)
subdom_funcs = self.synchronous_command('CacheStorage.requestCachedResponse',
cacheId=cacheId, requestURL=requestURL)
return subdom_funcs
|
[
"def",
"CacheStorage_requestCachedResponse",
"(",
"self",
",",
"cacheId",
",",
"requestURL",
")",
":",
"assert",
"isinstance",
"(",
"requestURL",
",",
"(",
"str",
",",
")",
")",
",",
"\"Argument 'requestURL' must be of type '['str']'. Received type: '%s'\"",
"%",
"type",
"(",
"requestURL",
")",
"subdom_funcs",
"=",
"self",
".",
"synchronous_command",
"(",
"'CacheStorage.requestCachedResponse'",
",",
"cacheId",
"=",
"cacheId",
",",
"requestURL",
"=",
"requestURL",
")",
"return",
"subdom_funcs"
] | 36.238095 | 20.142857 |
def add_config(lines):
'''
Add one or more config lines to the switch running config
.. code-block:: bash
salt '*' onyx.cmd add_config 'snmp-server community TESTSTRINGHERE rw'
.. note::
For more than one config added per command, lines should be a list.
'''
if not isinstance(lines, list):
lines = [lines]
try:
enable()
configure_terminal()
for line in lines:
sendline(line)
configure_terminal_exit()
disable()
except TerminalException as e:
log.error(e)
return False
return True
|
[
"def",
"add_config",
"(",
"lines",
")",
":",
"if",
"not",
"isinstance",
"(",
"lines",
",",
"list",
")",
":",
"lines",
"=",
"[",
"lines",
"]",
"try",
":",
"enable",
"(",
")",
"configure_terminal",
"(",
")",
"for",
"line",
"in",
"lines",
":",
"sendline",
"(",
"line",
")",
"configure_terminal_exit",
"(",
")",
"disable",
"(",
")",
"except",
"TerminalException",
"as",
"e",
":",
"log",
".",
"error",
"(",
"e",
")",
"return",
"False",
"return",
"True"
] | 23.52 | 24 |
def overall_state_id(self):
"""Get the service overall state.
The service overall state identifier is the service status including:
- the monitored state
- the acknowledged state
- the downtime state
The overall state is (prioritized):
- a service is not monitored (5)
- a service critical or unreachable (4)
- a service warning or unknown (3)
- a service downtimed (2)
- a service acknowledged (1)
- a service ok (0)
*Note* that services in unknown state are considered as warning, and unreachable ones
are considered as critical!
Also note that the service state is considered only for HARD state type!
"""
overall_state = 0
if not self.monitored:
overall_state = 5
elif self.acknowledged:
overall_state = 1
elif self.downtimed:
overall_state = 2
elif self.state_type == 'HARD':
if self.state == 'WARNING':
overall_state = 3
elif self.state == 'CRITICAL':
overall_state = 4
elif self.state == 'UNKNOWN':
overall_state = 3
elif self.state == 'UNREACHABLE':
overall_state = 4
return overall_state
|
[
"def",
"overall_state_id",
"(",
"self",
")",
":",
"overall_state",
"=",
"0",
"if",
"not",
"self",
".",
"monitored",
":",
"overall_state",
"=",
"5",
"elif",
"self",
".",
"acknowledged",
":",
"overall_state",
"=",
"1",
"elif",
"self",
".",
"downtimed",
":",
"overall_state",
"=",
"2",
"elif",
"self",
".",
"state_type",
"==",
"'HARD'",
":",
"if",
"self",
".",
"state",
"==",
"'WARNING'",
":",
"overall_state",
"=",
"3",
"elif",
"self",
".",
"state",
"==",
"'CRITICAL'",
":",
"overall_state",
"=",
"4",
"elif",
"self",
".",
"state",
"==",
"'UNKNOWN'",
":",
"overall_state",
"=",
"3",
"elif",
"self",
".",
"state",
"==",
"'UNREACHABLE'",
":",
"overall_state",
"=",
"4",
"return",
"overall_state"
] | 32.175 | 14.625 |
def _encode_status(status):
"""Cast status to bytes representation of current Python version.
According to :pep:`3333`, when using Python 3, the response status
and headers must be bytes masquerading as unicode; that is, they
must be of type "str" but are restricted to code points in the
"latin-1" set.
"""
if six.PY2:
return status
if not isinstance(status, str):
raise TypeError('WSGI response status is not of type str.')
return status.encode('ISO-8859-1')
|
[
"def",
"_encode_status",
"(",
"status",
")",
":",
"if",
"six",
".",
"PY2",
":",
"return",
"status",
"if",
"not",
"isinstance",
"(",
"status",
",",
"str",
")",
":",
"raise",
"TypeError",
"(",
"'WSGI response status is not of type str.'",
")",
"return",
"status",
".",
"encode",
"(",
"'ISO-8859-1'",
")"
] | 41.923077 | 18.230769 |
def forward(self, input, target):
"""
Calculate the loss
:param input: prediction logits
:param target: target probabilities
:return: loss
"""
n, k = input.shape
losses = input.new_zeros(n)
for i in range(k):
cls_idx = input.new_full((n,), i, dtype=torch.long)
loss = F.cross_entropy(input, cls_idx, reduction="none")
if self.weight is not None:
loss = loss * self.weight[i]
losses += target[:, i].float() * loss
if self.reduction == "mean":
losses = losses.mean()
elif self.reduction == "sum":
losses = losses.sum()
elif self.reduction != "none":
raise ValueError(f"Unrecognized reduction: {self.reduction}")
return losses
|
[
"def",
"forward",
"(",
"self",
",",
"input",
",",
"target",
")",
":",
"n",
",",
"k",
"=",
"input",
".",
"shape",
"losses",
"=",
"input",
".",
"new_zeros",
"(",
"n",
")",
"for",
"i",
"in",
"range",
"(",
"k",
")",
":",
"cls_idx",
"=",
"input",
".",
"new_full",
"(",
"(",
"n",
",",
")",
",",
"i",
",",
"dtype",
"=",
"torch",
".",
"long",
")",
"loss",
"=",
"F",
".",
"cross_entropy",
"(",
"input",
",",
"cls_idx",
",",
"reduction",
"=",
"\"none\"",
")",
"if",
"self",
".",
"weight",
"is",
"not",
"None",
":",
"loss",
"=",
"loss",
"*",
"self",
".",
"weight",
"[",
"i",
"]",
"losses",
"+=",
"target",
"[",
":",
",",
"i",
"]",
".",
"float",
"(",
")",
"*",
"loss",
"if",
"self",
".",
"reduction",
"==",
"\"mean\"",
":",
"losses",
"=",
"losses",
".",
"mean",
"(",
")",
"elif",
"self",
".",
"reduction",
"==",
"\"sum\"",
":",
"losses",
"=",
"losses",
".",
"sum",
"(",
")",
"elif",
"self",
".",
"reduction",
"!=",
"\"none\"",
":",
"raise",
"ValueError",
"(",
"f\"Unrecognized reduction: {self.reduction}\"",
")",
"return",
"losses"
] | 29.851852 | 15.407407 |
def _api_args_item(self, item):
"""Glances API RESTful implementation.
Return the JSON representation of the Glances command line arguments item
HTTP/200 if OK
HTTP/400 if item is not found
HTTP/404 if others error
"""
response.content_type = 'application/json; charset=utf-8'
if item not in self.args:
abort(400, "Unknown argument item %s" % item)
try:
# Get the JSON value of the args' dict
# Use vars to convert namespace to dict
# Source: https://docs.python.org/%s/library/functions.html#vars
args_json = json.dumps(vars(self.args)[item])
except Exception as e:
abort(404, "Cannot get args item (%s)" % str(e))
return args_json
|
[
"def",
"_api_args_item",
"(",
"self",
",",
"item",
")",
":",
"response",
".",
"content_type",
"=",
"'application/json; charset=utf-8'",
"if",
"item",
"not",
"in",
"self",
".",
"args",
":",
"abort",
"(",
"400",
",",
"\"Unknown argument item %s\"",
"%",
"item",
")",
"try",
":",
"# Get the JSON value of the args' dict",
"# Use vars to convert namespace to dict",
"# Source: https://docs.python.org/%s/library/functions.html#vars",
"args_json",
"=",
"json",
".",
"dumps",
"(",
"vars",
"(",
"self",
".",
"args",
")",
"[",
"item",
"]",
")",
"except",
"Exception",
"as",
"e",
":",
"abort",
"(",
"404",
",",
"\"Cannot get args item (%s)\"",
"%",
"str",
"(",
"e",
")",
")",
"return",
"args_json"
] | 36.904762 | 18.857143 |
def add(self,attrlist,attrvalues):
'''
add an attribute
:parameter dimlist: list of dimensions
:parameter dimvalues: list of values for dimlist
'''
for i,d in enumerate(attrlist):
self[d] = attrvalues[i]
|
[
"def",
"add",
"(",
"self",
",",
"attrlist",
",",
"attrvalues",
")",
":",
"for",
"i",
",",
"d",
"in",
"enumerate",
"(",
"attrlist",
")",
":",
"self",
"[",
"d",
"]",
"=",
"attrvalues",
"[",
"i",
"]"
] | 30.222222 | 15.111111 |
def send(self, item, timeout=-1):
"""
Send an *item* on this pair. This will block unless our Rever is ready,
either forever or until *timeout* milliseconds.
"""
if not self.ready:
self.pause(timeout=timeout)
if isinstance(item, Exception):
return self.hub.throw_to(self.other.peak, item)
return self.hub.switch_to(self.other.peak, self.other, item)
|
[
"def",
"send",
"(",
"self",
",",
"item",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"if",
"not",
"self",
".",
"ready",
":",
"self",
".",
"pause",
"(",
"timeout",
"=",
"timeout",
")",
"if",
"isinstance",
"(",
"item",
",",
"Exception",
")",
":",
"return",
"self",
".",
"hub",
".",
"throw_to",
"(",
"self",
".",
"other",
".",
"peak",
",",
"item",
")",
"return",
"self",
".",
"hub",
".",
"switch_to",
"(",
"self",
".",
"other",
".",
"peak",
",",
"self",
".",
"other",
",",
"item",
")"
] | 35 | 17 |
def create_from_name_and_dictionary(self, name, datas):
"""Return a populated object Parameter from dictionary datas
"""
parameter = ObjectParameter()
self.set_common_datas(parameter, name, datas)
if "optional" in datas:
parameter.optional = to_boolean(datas["optional"])
if "type" in datas:
parameter.type = str(datas["type"])
if "generic" in datas:
parameter.generic = to_boolean(datas["generic"])
return parameter
|
[
"def",
"create_from_name_and_dictionary",
"(",
"self",
",",
"name",
",",
"datas",
")",
":",
"parameter",
"=",
"ObjectParameter",
"(",
")",
"self",
".",
"set_common_datas",
"(",
"parameter",
",",
"name",
",",
"datas",
")",
"if",
"\"optional\"",
"in",
"datas",
":",
"parameter",
".",
"optional",
"=",
"to_boolean",
"(",
"datas",
"[",
"\"optional\"",
"]",
")",
"if",
"\"type\"",
"in",
"datas",
":",
"parameter",
".",
"type",
"=",
"str",
"(",
"datas",
"[",
"\"type\"",
"]",
")",
"if",
"\"generic\"",
"in",
"datas",
":",
"parameter",
".",
"generic",
"=",
"to_boolean",
"(",
"datas",
"[",
"\"generic\"",
"]",
")",
"return",
"parameter"
] | 36.071429 | 14.857143 |
def BuildServiceStub(self, cls):
"""Constructs the stub class.
Args:
cls: The class that will be constructed.
"""
def _ServiceStubInit(stub, rpc_channel):
stub.rpc_channel = rpc_channel
self.cls = cls
cls.__init__ = _ServiceStubInit
for method in self.descriptor.methods:
setattr(cls, method.name, self._GenerateStubMethod(method))
|
[
"def",
"BuildServiceStub",
"(",
"self",
",",
"cls",
")",
":",
"def",
"_ServiceStubInit",
"(",
"stub",
",",
"rpc_channel",
")",
":",
"stub",
".",
"rpc_channel",
"=",
"rpc_channel",
"self",
".",
"cls",
"=",
"cls",
"cls",
".",
"__init__",
"=",
"_ServiceStubInit",
"for",
"method",
"in",
"self",
".",
"descriptor",
".",
"methods",
":",
"setattr",
"(",
"cls",
",",
"method",
".",
"name",
",",
"self",
".",
"_GenerateStubMethod",
"(",
"method",
")",
")"
] | 28.230769 | 14.384615 |
def store(self, prof_name, prof_type):
"""
Store a profile with the given name and type.
:param str prof_name:
Profile name.
:param str prof_type:
Profile type.
"""
prof_dir = self.__profile_dir(prof_name)
prof_stub = self.__profile_stub(prof_name, prof_type, prof_dir)
if not os.path.exists(prof_dir):
os.makedirs(prof_dir)
prof_ini_path = self.__profile_ini_path(prof_dir)
# Load previous properties
if os.path.exists(prof_ini_path):
prof_ini_file = open(prof_ini_path, "r")
prof_ini = configparser.ConfigParser()
prof_ini.read_file(prof_ini_file)
prof_ini_file.close()
prev_props = prof_ini["properties"]
else:
prev_props = {}
# Prepare and store profile
prof_ini = configparser.ConfigParser()
prof_ini["profile"] = {}
prof_ini["profile"]["type"] = prof_type
prof_ini["properties"] = prof_stub.prepare(prev_props)
prof_ini_file = open(prof_ini_path, "w")
prof_ini.write(prof_ini_file)
prof_ini_file.close()
|
[
"def",
"store",
"(",
"self",
",",
"prof_name",
",",
"prof_type",
")",
":",
"prof_dir",
"=",
"self",
".",
"__profile_dir",
"(",
"prof_name",
")",
"prof_stub",
"=",
"self",
".",
"__profile_stub",
"(",
"prof_name",
",",
"prof_type",
",",
"prof_dir",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"prof_dir",
")",
":",
"os",
".",
"makedirs",
"(",
"prof_dir",
")",
"prof_ini_path",
"=",
"self",
".",
"__profile_ini_path",
"(",
"prof_dir",
")",
"# Load previous properties",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"prof_ini_path",
")",
":",
"prof_ini_file",
"=",
"open",
"(",
"prof_ini_path",
",",
"\"r\"",
")",
"prof_ini",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"prof_ini",
".",
"read_file",
"(",
"prof_ini_file",
")",
"prof_ini_file",
".",
"close",
"(",
")",
"prev_props",
"=",
"prof_ini",
"[",
"\"properties\"",
"]",
"else",
":",
"prev_props",
"=",
"{",
"}",
"# Prepare and store profile",
"prof_ini",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"prof_ini",
"[",
"\"profile\"",
"]",
"=",
"{",
"}",
"prof_ini",
"[",
"\"profile\"",
"]",
"[",
"\"type\"",
"]",
"=",
"prof_type",
"prof_ini",
"[",
"\"properties\"",
"]",
"=",
"prof_stub",
".",
"prepare",
"(",
"prev_props",
")",
"prof_ini_file",
"=",
"open",
"(",
"prof_ini_path",
",",
"\"w\"",
")",
"prof_ini",
".",
"write",
"(",
"prof_ini_file",
")",
"prof_ini_file",
".",
"close",
"(",
")"
] | 27.457143 | 14.428571 |
def add_action(self, dash, dashdash, action_code):
"""Add a specialized option that is the action to execute."""
option = self.add_option(dash, dashdash, action='callback',
callback=self._append_action
)
option.action_code = action_code
|
[
"def",
"add_action",
"(",
"self",
",",
"dash",
",",
"dashdash",
",",
"action_code",
")",
":",
"option",
"=",
"self",
".",
"add_option",
"(",
"dash",
",",
"dashdash",
",",
"action",
"=",
"'callback'",
",",
"callback",
"=",
"self",
".",
"_append_action",
")",
"option",
".",
"action_code",
"=",
"action_code"
] | 46.5 | 10.666667 |
def get_vnetwork_portgroups_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
[
"def",
"get_vnetwork_portgroups_input_name",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"config",
"=",
"ET",
".",
"Element",
"(",
"\"config\"",
")",
"get_vnetwork_portgroups",
"=",
"ET",
".",
"Element",
"(",
"\"get_vnetwork_portgroups\"",
")",
"config",
"=",
"get_vnetwork_portgroups",
"input",
"=",
"ET",
".",
"SubElement",
"(",
"get_vnetwork_portgroups",
",",
"\"input\"",
")",
"name",
"=",
"ET",
".",
"SubElement",
"(",
"input",
",",
"\"name\"",
")",
"name",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'name'",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"return",
"callback",
"(",
"config",
")"
] | 39.666667 | 11.916667 |
def filter(self, result):
"""
Filter the specified result based on query criteria.
@param result: A potential result.
@type result: L{sxbase.SchemaObject}
@return: True if result should be excluded.
@rtype: boolean
"""
if result is None:
return True
reject = result in self.history
if reject:
log.debug('result %s, rejected by\n%s', Repr(result), self)
return reject
|
[
"def",
"filter",
"(",
"self",
",",
"result",
")",
":",
"if",
"result",
"is",
"None",
":",
"return",
"True",
"reject",
"=",
"result",
"in",
"self",
".",
"history",
"if",
"reject",
":",
"log",
".",
"debug",
"(",
"'result %s, rejected by\\n%s'",
",",
"Repr",
"(",
"result",
")",
",",
"self",
")",
"return",
"reject"
] | 33.214286 | 12.357143 |
def calculate_embedding_output_shapes(operator):
'''
Allowed input/output patterns are
1. [N, 1] ---> [N, C]
2. [N, 1, 1, 1] ---> [N, C, 1, 1]
'''
check_input_and_output_numbers(operator, input_count_range=1, output_count_range=1)
check_input_and_output_types(operator, good_input_types=[Int64Type, Int64TensorType])
output = operator.outputs[0]
input_shape = operator.inputs[0].type.shape
if input_shape[1] != 1 or (len(input_shape) > 2 and (input_shape[2] != 1 or input_shape[3] != 1)):
raise RuntimeError('If input is a 4-D tensor, its shape must be [N, 1, 1, 1]')
params = operator.raw_operator.embedding
if len(input_shape) == 4:
output_shape = [input_shape[0], params.outputChannels, 1, 1]
elif len(input_shape) == 2:
output_shape = [input_shape[0], params.outputChannels]
else:
raise RuntimeError('Input must be a 2-D or a 4-D tensor')
output.type.shape = output_shape
|
[
"def",
"calculate_embedding_output_shapes",
"(",
"operator",
")",
":",
"check_input_and_output_numbers",
"(",
"operator",
",",
"input_count_range",
"=",
"1",
",",
"output_count_range",
"=",
"1",
")",
"check_input_and_output_types",
"(",
"operator",
",",
"good_input_types",
"=",
"[",
"Int64Type",
",",
"Int64TensorType",
"]",
")",
"output",
"=",
"operator",
".",
"outputs",
"[",
"0",
"]",
"input_shape",
"=",
"operator",
".",
"inputs",
"[",
"0",
"]",
".",
"type",
".",
"shape",
"if",
"input_shape",
"[",
"1",
"]",
"!=",
"1",
"or",
"(",
"len",
"(",
"input_shape",
")",
">",
"2",
"and",
"(",
"input_shape",
"[",
"2",
"]",
"!=",
"1",
"or",
"input_shape",
"[",
"3",
"]",
"!=",
"1",
")",
")",
":",
"raise",
"RuntimeError",
"(",
"'If input is a 4-D tensor, its shape must be [N, 1, 1, 1]'",
")",
"params",
"=",
"operator",
".",
"raw_operator",
".",
"embedding",
"if",
"len",
"(",
"input_shape",
")",
"==",
"4",
":",
"output_shape",
"=",
"[",
"input_shape",
"[",
"0",
"]",
",",
"params",
".",
"outputChannels",
",",
"1",
",",
"1",
"]",
"elif",
"len",
"(",
"input_shape",
")",
"==",
"2",
":",
"output_shape",
"=",
"[",
"input_shape",
"[",
"0",
"]",
",",
"params",
".",
"outputChannels",
"]",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Input must be a 2-D or a 4-D tensor'",
")",
"output",
".",
"type",
".",
"shape",
"=",
"output_shape"
] | 38.24 | 25.68 |
def apply_actions(self, name_of_action, actions):
"""Start to execute an action or a group of actions.
This method takes a bunch of actions and runs them on your
Tahoma box.
:param name_of_action: the label/name for the action
:param actions: an array of Action objects
:return: the execution identifier **************
what if it fails
:rtype: string
raises ValueError in case of protocol issues
:Seealso:
- get_events
- get_current_executions
"""
header = BASE_HEADERS.copy()
header['Cookie'] = self.__cookie
actions_serialized = []
for action in actions:
actions_serialized.append(action.serialize())
data = {"label": name_of_action, "actions": actions_serialized}
json_data = json.dumps(data, indent=None, sort_keys=True)
request = requests.post(
BASE_URL + "apply",
headers=header, data=json_data,
timeout=10)
if request.status_code != 200:
self.__logged_in = False
self.login()
self.apply_actions(name_of_action, actions)
return
try:
result = request.json()
except ValueError as error:
raise Exception(
"Not a valid result for applying an " +
"action, protocol error: " + request.status_code +
' - ' + request.reason + " (" + error + ")")
if 'execId' not in result.keys():
raise Exception("Could not run actions, missing execId.")
return result['execId']
|
[
"def",
"apply_actions",
"(",
"self",
",",
"name_of_action",
",",
"actions",
")",
":",
"header",
"=",
"BASE_HEADERS",
".",
"copy",
"(",
")",
"header",
"[",
"'Cookie'",
"]",
"=",
"self",
".",
"__cookie",
"actions_serialized",
"=",
"[",
"]",
"for",
"action",
"in",
"actions",
":",
"actions_serialized",
".",
"append",
"(",
"action",
".",
"serialize",
"(",
")",
")",
"data",
"=",
"{",
"\"label\"",
":",
"name_of_action",
",",
"\"actions\"",
":",
"actions_serialized",
"}",
"json_data",
"=",
"json",
".",
"dumps",
"(",
"data",
",",
"indent",
"=",
"None",
",",
"sort_keys",
"=",
"True",
")",
"request",
"=",
"requests",
".",
"post",
"(",
"BASE_URL",
"+",
"\"apply\"",
",",
"headers",
"=",
"header",
",",
"data",
"=",
"json_data",
",",
"timeout",
"=",
"10",
")",
"if",
"request",
".",
"status_code",
"!=",
"200",
":",
"self",
".",
"__logged_in",
"=",
"False",
"self",
".",
"login",
"(",
")",
"self",
".",
"apply_actions",
"(",
"name_of_action",
",",
"actions",
")",
"return",
"try",
":",
"result",
"=",
"request",
".",
"json",
"(",
")",
"except",
"ValueError",
"as",
"error",
":",
"raise",
"Exception",
"(",
"\"Not a valid result for applying an \"",
"+",
"\"action, protocol error: \"",
"+",
"request",
".",
"status_code",
"+",
"' - '",
"+",
"request",
".",
"reason",
"+",
"\" (\"",
"+",
"error",
"+",
"\")\"",
")",
"if",
"'execId'",
"not",
"in",
"result",
".",
"keys",
"(",
")",
":",
"raise",
"Exception",
"(",
"\"Could not run actions, missing execId.\"",
")",
"return",
"result",
"[",
"'execId'",
"]"
] | 30.207547 | 20.037736 |
def plot_welch_peaks(f, S, peak_loc=None, title=''):
'''Plot welch PSD with peaks as scatter points
Args
----
f: ndarray
Array of frequencies produced with PSD
S: ndarray
Array of powers produced with PSD
peak_loc: ndarray
Indices of peak locations in signal
title: str
Main title for plot
'''
plt.plot(f, S, linewidth=_linewidth)
plt.title(title)
plt.xlabel('Fequency (Hz)')
plt.ylabel('"Power" (g**2 Hz**−1)')
if peak_loc is not None:
plt.scatter(f[peak_loc], S[peak_loc], label='peaks')
plt.legend(loc='upper right')
plt.show()
return None
|
[
"def",
"plot_welch_peaks",
"(",
"f",
",",
"S",
",",
"peak_loc",
"=",
"None",
",",
"title",
"=",
"''",
")",
":",
"plt",
".",
"plot",
"(",
"f",
",",
"S",
",",
"linewidth",
"=",
"_linewidth",
")",
"plt",
".",
"title",
"(",
"title",
")",
"plt",
".",
"xlabel",
"(",
"'Fequency (Hz)'",
")",
"plt",
".",
"ylabel",
"(",
"'\"Power\" (g**2 Hz**−1)')",
"",
"if",
"peak_loc",
"is",
"not",
"None",
":",
"plt",
".",
"scatter",
"(",
"f",
"[",
"peak_loc",
"]",
",",
"S",
"[",
"peak_loc",
"]",
",",
"label",
"=",
"'peaks'",
")",
"plt",
".",
"legend",
"(",
"loc",
"=",
"'upper right'",
")",
"plt",
".",
"show",
"(",
")",
"return",
"None"
] | 24.192308 | 19.807692 |
def prove(x,t,kw,y):
"""
Computes public key P*kw where <P> = G1.
x, t, and y are ignored. They are included only for API compatibility with
other Pythia PRF implementations.
"""
# Verify the key type and compute the pubkey
assertScalarType(kw)
p = generatorG2() * kw
return (p,None,None)
|
[
"def",
"prove",
"(",
"x",
",",
"t",
",",
"kw",
",",
"y",
")",
":",
"# Verify the key type and compute the pubkey",
"assertScalarType",
"(",
"kw",
")",
"p",
"=",
"generatorG2",
"(",
")",
"*",
"kw",
"return",
"(",
"p",
",",
"None",
",",
"None",
")"
] | 31.7 | 12.1 |
def Then2(self, f, arg1, *args, **kwargs):
"""
`Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
"""
args = (arg1,) + args
return self.ThenAt(2, f, *args, **kwargs)
|
[
"def",
"Then2",
"(",
"self",
",",
"f",
",",
"arg1",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"arg1",
",",
")",
"+",
"args",
"return",
"self",
".",
"ThenAt",
"(",
"2",
",",
"f",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 42.5 | 15.833333 |
def get_checkerboard_matrix(kernel_width, kernel_type="default", gaussian_param=0.1):
"""
example matrix for width = 2
-1 -1 1 1
-1 -1 1 1
1 1 -1 -1
1 1 -1 -1
:param kernel_type:
:param kernel_width:
:return:
"""
if kernel_type is "gaussian":
return get_gaussian_kernel(kernel_width, gaussian_param)
if kernel_type is "default":
return np.vstack((
np.hstack((
-1 * np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
)),
np.hstack((
np.ones((kernel_width, kernel_width)), -1 * np.ones((kernel_width, kernel_width))
))
))
elif kernel_type is "bottom_right":
return np.vstack((
np.hstack((
np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
)),
np.hstack((
np.ones((kernel_width, kernel_width)), -1 * np.ones((kernel_width, kernel_width))
))
))
elif kernel_type is "top_left":
return np.vstack((
np.hstack((
-1 * np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
)),
np.hstack((
np.ones((kernel_width, kernel_width)), np.ones((kernel_width, kernel_width))
))
))
|
[
"def",
"get_checkerboard_matrix",
"(",
"kernel_width",
",",
"kernel_type",
"=",
"\"default\"",
",",
"gaussian_param",
"=",
"0.1",
")",
":",
"if",
"kernel_type",
"is",
"\"gaussian\"",
":",
"return",
"get_gaussian_kernel",
"(",
"kernel_width",
",",
"gaussian_param",
")",
"if",
"kernel_type",
"is",
"\"default\"",
":",
"return",
"np",
".",
"vstack",
"(",
"(",
"np",
".",
"hstack",
"(",
"(",
"-",
"1",
"*",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
",",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
")",
")",
",",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
",",
"-",
"1",
"*",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
")",
")",
")",
")",
"elif",
"kernel_type",
"is",
"\"bottom_right\"",
":",
"return",
"np",
".",
"vstack",
"(",
"(",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
",",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
")",
")",
",",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
",",
"-",
"1",
"*",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
")",
")",
")",
")",
"elif",
"kernel_type",
"is",
"\"top_left\"",
":",
"return",
"np",
".",
"vstack",
"(",
"(",
"np",
".",
"hstack",
"(",
"(",
"-",
"1",
"*",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
",",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
")",
")",
",",
"np",
".",
"hstack",
"(",
"(",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
",",
"np",
".",
"ones",
"(",
"(",
"kernel_width",
",",
"kernel_width",
")",
")",
")",
")",
")",
")"
] | 29.234043 | 26.425532 |
def or_(*validation_func # type: ValidationFuncs
):
# type: (...) -> Callable
"""
An 'or' validator: returns `True` if at least one of the provided validators returns `True`. All exceptions will be
silently caught. In case of failure, a global `AllValidatorsFailed` failure will be raised, together with details
about all validation results.
:param validation_func: the base validation function or list of base validation functions to use. A callable, a
tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists
are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit
`_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead
of callables, they will be transformed to functions automatically.
:return:
"""
validation_func = _process_validation_function_s(list(validation_func), auto_and_wrapper=False)
if len(validation_func) == 1:
return validation_func[0] # simplification for single validator case
else:
def or_v_(x):
for validator in validation_func:
# noinspection PyBroadException
try:
res = validator(x)
if result_is_success(res):
# we can return : one validator was happy
return True
except Exception:
# catch all silently
pass
# no validator accepted: gather details and raise
raise AllValidatorsFailed(validation_func, x)
or_v_.__name__ = 'or({})'.format(get_callable_names(validation_func))
return or_v_
|
[
"def",
"or_",
"(",
"*",
"validation_func",
"# type: ValidationFuncs",
")",
":",
"# type: (...) -> Callable",
"validation_func",
"=",
"_process_validation_function_s",
"(",
"list",
"(",
"validation_func",
")",
",",
"auto_and_wrapper",
"=",
"False",
")",
"if",
"len",
"(",
"validation_func",
")",
"==",
"1",
":",
"return",
"validation_func",
"[",
"0",
"]",
"# simplification for single validator case",
"else",
":",
"def",
"or_v_",
"(",
"x",
")",
":",
"for",
"validator",
"in",
"validation_func",
":",
"# noinspection PyBroadException",
"try",
":",
"res",
"=",
"validator",
"(",
"x",
")",
"if",
"result_is_success",
"(",
"res",
")",
":",
"# we can return : one validator was happy",
"return",
"True",
"except",
"Exception",
":",
"# catch all silently",
"pass",
"# no validator accepted: gather details and raise",
"raise",
"AllValidatorsFailed",
"(",
"validation_func",
",",
"x",
")",
"or_v_",
".",
"__name__",
"=",
"'or({})'",
".",
"format",
"(",
"get_callable_names",
"(",
"validation_func",
")",
")",
"return",
"or_v_"
] | 45.973684 | 28.921053 |
def send(self, chat_id, msg_type, **kwargs):
"""
应用推送消息
详情请参考:https://work.weixin.qq.com/api/doc#90000/90135/90248
:param chat_id: 群聊id
:param msg_type: 消息类型,可以为text/image/voice/video/file/textcard/news/mpnews/markdown
:param kwargs: 具体消息类型的扩展参数
:return:
"""
data = {
'chatid': chat_id,
'safe': kwargs.get('safe') or 0
}
data.update(self._build_msg_content(msg_type, **kwargs))
return self._post('appchat/send', data=data)
|
[
"def",
"send",
"(",
"self",
",",
"chat_id",
",",
"msg_type",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"{",
"'chatid'",
":",
"chat_id",
",",
"'safe'",
":",
"kwargs",
".",
"get",
"(",
"'safe'",
")",
"or",
"0",
"}",
"data",
".",
"update",
"(",
"self",
".",
"_build_msg_content",
"(",
"msg_type",
",",
"*",
"*",
"kwargs",
")",
")",
"return",
"self",
".",
"_post",
"(",
"'appchat/send'",
",",
"data",
"=",
"data",
")"
] | 31.058824 | 19.529412 |
def get_site_collection(oqparam):
"""
Returns a SiteCollection instance by looking at the points and the
site model defined by the configuration parameters.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
"""
mesh = get_mesh(oqparam)
req_site_params = get_gsim_lt(oqparam).req_site_params
if oqparam.inputs.get('site_model'):
sm = get_site_model(oqparam)
try:
# in the future we could have elevation in the site model
depth = sm['depth']
except ValueError:
# this is the normal case
depth = None
sitecol = site.SiteCollection.from_points(
sm['lon'], sm['lat'], depth, sm, req_site_params)
if oqparam.region_grid_spacing:
logging.info('Reducing the grid sites to the site '
'parameters within the grid spacing')
sitecol, params, _ = geo.utils.assoc(
sm, sitecol, oqparam.region_grid_spacing * 1.414, 'filter')
sitecol.make_complete()
else:
params = sm
for name in req_site_params:
if name in ('vs30measured', 'backarc') \
and name not in params.dtype.names:
sitecol._set(name, 0) # the default
else:
sitecol._set(name, params[name])
elif mesh is None and oqparam.ground_motion_fields:
raise InvalidFile('You are missing sites.csv or site_model.csv in %s'
% oqparam.inputs['job_ini'])
elif mesh is None:
# a None sitecol is okay when computing the ruptures only
return
else: # use the default site params
sitecol = site.SiteCollection.from_points(
mesh.lons, mesh.lats, mesh.depths, oqparam, req_site_params)
ss = os.environ.get('OQ_SAMPLE_SITES')
if ss:
# debugging tip to reduce the size of a calculation
# OQ_SAMPLE_SITES=.1 oq engine --run job.ini
# will run a computation with 10 times less sites
sitecol.array = numpy.array(random_filter(sitecol.array, float(ss)))
sitecol.make_complete()
return sitecol
|
[
"def",
"get_site_collection",
"(",
"oqparam",
")",
":",
"mesh",
"=",
"get_mesh",
"(",
"oqparam",
")",
"req_site_params",
"=",
"get_gsim_lt",
"(",
"oqparam",
")",
".",
"req_site_params",
"if",
"oqparam",
".",
"inputs",
".",
"get",
"(",
"'site_model'",
")",
":",
"sm",
"=",
"get_site_model",
"(",
"oqparam",
")",
"try",
":",
"# in the future we could have elevation in the site model",
"depth",
"=",
"sm",
"[",
"'depth'",
"]",
"except",
"ValueError",
":",
"# this is the normal case",
"depth",
"=",
"None",
"sitecol",
"=",
"site",
".",
"SiteCollection",
".",
"from_points",
"(",
"sm",
"[",
"'lon'",
"]",
",",
"sm",
"[",
"'lat'",
"]",
",",
"depth",
",",
"sm",
",",
"req_site_params",
")",
"if",
"oqparam",
".",
"region_grid_spacing",
":",
"logging",
".",
"info",
"(",
"'Reducing the grid sites to the site '",
"'parameters within the grid spacing'",
")",
"sitecol",
",",
"params",
",",
"_",
"=",
"geo",
".",
"utils",
".",
"assoc",
"(",
"sm",
",",
"sitecol",
",",
"oqparam",
".",
"region_grid_spacing",
"*",
"1.414",
",",
"'filter'",
")",
"sitecol",
".",
"make_complete",
"(",
")",
"else",
":",
"params",
"=",
"sm",
"for",
"name",
"in",
"req_site_params",
":",
"if",
"name",
"in",
"(",
"'vs30measured'",
",",
"'backarc'",
")",
"and",
"name",
"not",
"in",
"params",
".",
"dtype",
".",
"names",
":",
"sitecol",
".",
"_set",
"(",
"name",
",",
"0",
")",
"# the default",
"else",
":",
"sitecol",
".",
"_set",
"(",
"name",
",",
"params",
"[",
"name",
"]",
")",
"elif",
"mesh",
"is",
"None",
"and",
"oqparam",
".",
"ground_motion_fields",
":",
"raise",
"InvalidFile",
"(",
"'You are missing sites.csv or site_model.csv in %s'",
"%",
"oqparam",
".",
"inputs",
"[",
"'job_ini'",
"]",
")",
"elif",
"mesh",
"is",
"None",
":",
"# a None sitecol is okay when computing the ruptures only",
"return",
"else",
":",
"# use the default site params",
"sitecol",
"=",
"site",
".",
"SiteCollection",
".",
"from_points",
"(",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
",",
"mesh",
".",
"depths",
",",
"oqparam",
",",
"req_site_params",
")",
"ss",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'OQ_SAMPLE_SITES'",
")",
"if",
"ss",
":",
"# debugging tip to reduce the size of a calculation",
"# OQ_SAMPLE_SITES=.1 oq engine --run job.ini",
"# will run a computation with 10 times less sites",
"sitecol",
".",
"array",
"=",
"numpy",
".",
"array",
"(",
"random_filter",
"(",
"sitecol",
".",
"array",
",",
"float",
"(",
"ss",
")",
")",
")",
"sitecol",
".",
"make_complete",
"(",
")",
"return",
"sitecol"
] | 42.039216 | 16.509804 |
def translate(translationAmt):
"""Create a translation matrix."""
if not isinstance(translationAmt, Vector3):
raise ValueError("translationAmt must be a Vector3")
ma4 = Matrix4((1, 0, 0, translationAmt.x),
(0, 1, 0, translationAmt.y),
(0, 0, 1, translationAmt.z),
(0, 0, 0, 1))
return ma4
|
[
"def",
"translate",
"(",
"translationAmt",
")",
":",
"if",
"not",
"isinstance",
"(",
"translationAmt",
",",
"Vector3",
")",
":",
"raise",
"ValueError",
"(",
"\"translationAmt must be a Vector3\"",
")",
"ma4",
"=",
"Matrix4",
"(",
"(",
"1",
",",
"0",
",",
"0",
",",
"translationAmt",
".",
"x",
")",
",",
"(",
"0",
",",
"1",
",",
"0",
",",
"translationAmt",
".",
"y",
")",
",",
"(",
"0",
",",
"0",
",",
"1",
",",
"translationAmt",
".",
"z",
")",
",",
"(",
"0",
",",
"0",
",",
"0",
",",
"1",
")",
")",
"return",
"ma4"
] | 39 | 14.2 |
def single_gate_params(gate, params=None):
"""Apply a single qubit gate to the qubit.
Args:
gate(str): the single qubit gate name
params(list): the operation parameters op['params']
Returns:
tuple: a tuple of U gate parameters (theta, phi, lam)
Raises:
QiskitError: if the gate name is not valid
"""
if gate in ('U', 'u3'):
return params[0], params[1], params[2]
elif gate == 'u2':
return np.pi / 2, params[0], params[1]
elif gate == 'u1':
return 0, 0, params[0]
elif gate == 'id':
return 0, 0, 0
raise QiskitError('Gate is not among the valid types: %s' % gate)
|
[
"def",
"single_gate_params",
"(",
"gate",
",",
"params",
"=",
"None",
")",
":",
"if",
"gate",
"in",
"(",
"'U'",
",",
"'u3'",
")",
":",
"return",
"params",
"[",
"0",
"]",
",",
"params",
"[",
"1",
"]",
",",
"params",
"[",
"2",
"]",
"elif",
"gate",
"==",
"'u2'",
":",
"return",
"np",
".",
"pi",
"/",
"2",
",",
"params",
"[",
"0",
"]",
",",
"params",
"[",
"1",
"]",
"elif",
"gate",
"==",
"'u1'",
":",
"return",
"0",
",",
"0",
",",
"params",
"[",
"0",
"]",
"elif",
"gate",
"==",
"'id'",
":",
"return",
"0",
",",
"0",
",",
"0",
"raise",
"QiskitError",
"(",
"'Gate is not among the valid types: %s'",
"%",
"gate",
")"
] | 32.4 | 16.05 |
def _maybe_download_corpus(tmp_dir, vocab_type):
"""Download and unpack the corpus.
Args:
tmp_dir: directory containing dataset.
vocab_type: which vocabulary are we using.
Returns:
The list of names of files.
"""
if vocab_type == text_problems.VocabType.CHARACTER:
dataset_url = ("https://s3.amazonaws.com/research.metamind.io/wikitext"
"/wikitext-103-raw-v1.zip")
dir_name = "wikitext-103-raw"
else:
dataset_url = ("https://s3.amazonaws.com/research.metamind.io/wikitext"
"/wikitext-103-v1.zip")
dir_name = "wikitext-103"
fname = os.path.basename(dataset_url)
compressed_filepath = generator_utils.maybe_download(tmp_dir, fname,
dataset_url)
zip_ref = zipfile.ZipFile(compressed_filepath, "r")
zip_ref.extractall(tmp_dir)
zip_ref.close()
files = os.path.join(tmp_dir, dir_name, "*")
train_file, valid_file, test_file = None, None, None
for f in tf.gfile.Glob(files):
fname = os.path.basename(f)
if "train" in fname:
train_file = f
elif "valid" in fname:
valid_file = f
elif "test" in fname:
test_file = f
assert train_file, "Training file not found"
assert valid_file, "Validation file not found"
assert test_file, "Testing file not found"
return train_file, valid_file, test_file
|
[
"def",
"_maybe_download_corpus",
"(",
"tmp_dir",
",",
"vocab_type",
")",
":",
"if",
"vocab_type",
"==",
"text_problems",
".",
"VocabType",
".",
"CHARACTER",
":",
"dataset_url",
"=",
"(",
"\"https://s3.amazonaws.com/research.metamind.io/wikitext\"",
"\"/wikitext-103-raw-v1.zip\"",
")",
"dir_name",
"=",
"\"wikitext-103-raw\"",
"else",
":",
"dataset_url",
"=",
"(",
"\"https://s3.amazonaws.com/research.metamind.io/wikitext\"",
"\"/wikitext-103-v1.zip\"",
")",
"dir_name",
"=",
"\"wikitext-103\"",
"fname",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"dataset_url",
")",
"compressed_filepath",
"=",
"generator_utils",
".",
"maybe_download",
"(",
"tmp_dir",
",",
"fname",
",",
"dataset_url",
")",
"zip_ref",
"=",
"zipfile",
".",
"ZipFile",
"(",
"compressed_filepath",
",",
"\"r\"",
")",
"zip_ref",
".",
"extractall",
"(",
"tmp_dir",
")",
"zip_ref",
".",
"close",
"(",
")",
"files",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmp_dir",
",",
"dir_name",
",",
"\"*\"",
")",
"train_file",
",",
"valid_file",
",",
"test_file",
"=",
"None",
",",
"None",
",",
"None",
"for",
"f",
"in",
"tf",
".",
"gfile",
".",
"Glob",
"(",
"files",
")",
":",
"fname",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
"if",
"\"train\"",
"in",
"fname",
":",
"train_file",
"=",
"f",
"elif",
"\"valid\"",
"in",
"fname",
":",
"valid_file",
"=",
"f",
"elif",
"\"test\"",
"in",
"fname",
":",
"test_file",
"=",
"f",
"assert",
"train_file",
",",
"\"Training file not found\"",
"assert",
"valid_file",
",",
"\"Validation file not found\"",
"assert",
"test_file",
",",
"\"Testing file not found\"",
"return",
"train_file",
",",
"valid_file",
",",
"test_file"
] | 31.093023 | 18.093023 |
def generate(env):
"""Add Builders and construction variables for swig to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
c_file.suffix['.i'] = swigSuffixEmitter
cxx_file.suffix['.i'] = swigSuffixEmitter
c_file.add_action('.i', SwigAction)
c_file.add_emitter('.i', _swigEmitter)
cxx_file.add_action('.i', SwigAction)
cxx_file.add_emitter('.i', _swigEmitter)
java_file = SCons.Tool.CreateJavaFileBuilder(env)
java_file.suffix['.i'] = swigSuffixEmitter
java_file.add_action('.i', SwigAction)
java_file.add_emitter('.i', _swigEmitter)
if 'SWIG' not in env:
env['SWIG'] = env.Detect(swigs) or swigs[0]
env['SWIGVERSION'] = _get_swig_version(env, env['SWIG'])
env['SWIGFLAGS'] = SCons.Util.CLVar('')
env['SWIGDIRECTORSUFFIX'] = '_wrap.h'
env['SWIGCFILESUFFIX'] = '_wrap$CFILESUFFIX'
env['SWIGCXXFILESUFFIX'] = '_wrap$CXXFILESUFFIX'
env['_SWIGOUTDIR'] = r'${"-outdir \"%s\"" % SWIGOUTDIR}'
env['SWIGPATH'] = []
env['SWIGINCPREFIX'] = '-I'
env['SWIGINCSUFFIX'] = ''
env['_SWIGINCFLAGS'] = '$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'
env['SWIGCOM'] = '$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES'
|
[
"def",
"generate",
"(",
"env",
")",
":",
"c_file",
",",
"cxx_file",
"=",
"SCons",
".",
"Tool",
".",
"createCFileBuilders",
"(",
"env",
")",
"c_file",
".",
"suffix",
"[",
"'.i'",
"]",
"=",
"swigSuffixEmitter",
"cxx_file",
".",
"suffix",
"[",
"'.i'",
"]",
"=",
"swigSuffixEmitter",
"c_file",
".",
"add_action",
"(",
"'.i'",
",",
"SwigAction",
")",
"c_file",
".",
"add_emitter",
"(",
"'.i'",
",",
"_swigEmitter",
")",
"cxx_file",
".",
"add_action",
"(",
"'.i'",
",",
"SwigAction",
")",
"cxx_file",
".",
"add_emitter",
"(",
"'.i'",
",",
"_swigEmitter",
")",
"java_file",
"=",
"SCons",
".",
"Tool",
".",
"CreateJavaFileBuilder",
"(",
"env",
")",
"java_file",
".",
"suffix",
"[",
"'.i'",
"]",
"=",
"swigSuffixEmitter",
"java_file",
".",
"add_action",
"(",
"'.i'",
",",
"SwigAction",
")",
"java_file",
".",
"add_emitter",
"(",
"'.i'",
",",
"_swigEmitter",
")",
"if",
"'SWIG'",
"not",
"in",
"env",
":",
"env",
"[",
"'SWIG'",
"]",
"=",
"env",
".",
"Detect",
"(",
"swigs",
")",
"or",
"swigs",
"[",
"0",
"]",
"env",
"[",
"'SWIGVERSION'",
"]",
"=",
"_get_swig_version",
"(",
"env",
",",
"env",
"[",
"'SWIG'",
"]",
")",
"env",
"[",
"'SWIGFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
"env",
"[",
"'SWIGDIRECTORSUFFIX'",
"]",
"=",
"'_wrap.h'",
"env",
"[",
"'SWIGCFILESUFFIX'",
"]",
"=",
"'_wrap$CFILESUFFIX'",
"env",
"[",
"'SWIGCXXFILESUFFIX'",
"]",
"=",
"'_wrap$CXXFILESUFFIX'",
"env",
"[",
"'_SWIGOUTDIR'",
"]",
"=",
"r'${\"-outdir \\\"%s\\\"\" % SWIGOUTDIR}'",
"env",
"[",
"'SWIGPATH'",
"]",
"=",
"[",
"]",
"env",
"[",
"'SWIGINCPREFIX'",
"]",
"=",
"'-I'",
"env",
"[",
"'SWIGINCSUFFIX'",
"]",
"=",
"''",
"env",
"[",
"'_SWIGINCFLAGS'",
"]",
"=",
"'$( ${_concat(SWIGINCPREFIX, SWIGPATH, SWIGINCSUFFIX, __env__, RDirs, TARGET, SOURCE)} $)'",
"env",
"[",
"'SWIGCOM'",
"]",
"=",
"'$SWIG -o $TARGET ${_SWIGOUTDIR} ${_SWIGINCFLAGS} $SWIGFLAGS $SOURCES'"
] | 41.1875 | 18.59375 |
def search(self, project, text=''):
"""
Search in your Taiga.io instance
:param project: the project id
:param text: the query of your search
"""
result = self.raw_request.get(
'search', query={'project': project, 'text': text}
)
result = result.json()
search_result = SearchResult()
search_result.tasks = self.tasks.parse_list(result['tasks'])
search_result.issues = self.issues.parse_list(result['issues'])
search_result.user_stories = self.user_stories.parse_list(
result['userstories']
)
search_result.wikipages = self.wikipages.parse_list(
result['wikipages']
)
return search_result
|
[
"def",
"search",
"(",
"self",
",",
"project",
",",
"text",
"=",
"''",
")",
":",
"result",
"=",
"self",
".",
"raw_request",
".",
"get",
"(",
"'search'",
",",
"query",
"=",
"{",
"'project'",
":",
"project",
",",
"'text'",
":",
"text",
"}",
")",
"result",
"=",
"result",
".",
"json",
"(",
")",
"search_result",
"=",
"SearchResult",
"(",
")",
"search_result",
".",
"tasks",
"=",
"self",
".",
"tasks",
".",
"parse_list",
"(",
"result",
"[",
"'tasks'",
"]",
")",
"search_result",
".",
"issues",
"=",
"self",
".",
"issues",
".",
"parse_list",
"(",
"result",
"[",
"'issues'",
"]",
")",
"search_result",
".",
"user_stories",
"=",
"self",
".",
"user_stories",
".",
"parse_list",
"(",
"result",
"[",
"'userstories'",
"]",
")",
"search_result",
".",
"wikipages",
"=",
"self",
".",
"wikipages",
".",
"parse_list",
"(",
"result",
"[",
"'wikipages'",
"]",
")",
"return",
"search_result"
] | 34.857143 | 14.952381 |
def on_entry_click(self, event):
"""
function that gets called whenever entry is clicked
"""
if event.widget.config('fg') [4] == 'grey':
event.widget.delete(0, "end" ) # delete all the text in the entry
event.widget.insert(0, '') #Insert blank for user input
event.widget.config(fg = 'black')
|
[
"def",
"on_entry_click",
"(",
"self",
",",
"event",
")",
":",
"if",
"event",
".",
"widget",
".",
"config",
"(",
"'fg'",
")",
"[",
"4",
"]",
"==",
"'grey'",
":",
"event",
".",
"widget",
".",
"delete",
"(",
"0",
",",
"\"end\"",
")",
"# delete all the text in the entry",
"event",
".",
"widget",
".",
"insert",
"(",
"0",
",",
"''",
")",
"#Insert blank for user input",
"event",
".",
"widget",
".",
"config",
"(",
"fg",
"=",
"'black'",
")"
] | 38.5 | 9.75 |
def impulse_response(self, impulse_length=30):
"""
Get the impulse response corresponding to our model.
Returns
-------
psi : array_like(float)
psi[j] is the response at lag j of the impulse response.
We take psi[0] as unity.
"""
from scipy.signal import dimpulse
sys = self.ma_poly, self.ar_poly, 1
times, psi = dimpulse(sys, n=impulse_length)
psi = psi[0].flatten() # Simplify return value into flat array
return psi
|
[
"def",
"impulse_response",
"(",
"self",
",",
"impulse_length",
"=",
"30",
")",
":",
"from",
"scipy",
".",
"signal",
"import",
"dimpulse",
"sys",
"=",
"self",
".",
"ma_poly",
",",
"self",
".",
"ar_poly",
",",
"1",
"times",
",",
"psi",
"=",
"dimpulse",
"(",
"sys",
",",
"n",
"=",
"impulse_length",
")",
"psi",
"=",
"psi",
"[",
"0",
"]",
".",
"flatten",
"(",
")",
"# Simplify return value into flat array",
"return",
"psi"
] | 30.470588 | 18 |
def add_pv(self, device):
"""
Initializes a device as a physical volume and adds it to the volume group::
from lvm2py import *
lvm = LVM()
vg = lvm.get_vg("myvg", "w")
vg.add_pv("/dev/sdbX")
*Args:*
* device (str): An existing device.
*Raises:*
* ValueError, CommitError, HandleError
.. note::
The VolumeGroup instance must be in write mode, otherwise CommitError
is raised.
"""
if not os.path.exists(device):
raise ValueError("%s does not exist." % device)
self.open()
ext = lvm_vg_extend(self.handle, device)
if ext != 0:
self.close()
raise CommitError("Failed to extend Volume Group.")
self._commit()
self.close()
return PhysicalVolume(self, name=device)
|
[
"def",
"add_pv",
"(",
"self",
",",
"device",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"device",
")",
":",
"raise",
"ValueError",
"(",
"\"%s does not exist.\"",
"%",
"device",
")",
"self",
".",
"open",
"(",
")",
"ext",
"=",
"lvm_vg_extend",
"(",
"self",
".",
"handle",
",",
"device",
")",
"if",
"ext",
"!=",
"0",
":",
"self",
".",
"close",
"(",
")",
"raise",
"CommitError",
"(",
"\"Failed to extend Volume Group.\"",
")",
"self",
".",
"_commit",
"(",
")",
"self",
".",
"close",
"(",
")",
"return",
"PhysicalVolume",
"(",
"self",
",",
"name",
"=",
"device",
")"
] | 26.484848 | 21.757576 |
def async_or_fail(self, **options):
"""
Attempt to call self.apply_async, but if that fails with an exception,
we fake the task completion using the exception as the result. This
allows us to seamlessly handle errors on task creation the same way we
handle errors when a task runs, simplifying the user interface.
"""
args = options.pop("args", None)
kwargs = options.pop("kwargs", None)
possible_broker_errors = self._get_possible_broker_errors_tuple()
try:
return self.apply_async(args, kwargs, **options)
except possible_broker_errors as e:
return self.simulate_async_error(e)
|
[
"def",
"async_or_fail",
"(",
"self",
",",
"*",
"*",
"options",
")",
":",
"args",
"=",
"options",
".",
"pop",
"(",
"\"args\"",
",",
"None",
")",
"kwargs",
"=",
"options",
".",
"pop",
"(",
"\"kwargs\"",
",",
"None",
")",
"possible_broker_errors",
"=",
"self",
".",
"_get_possible_broker_errors_tuple",
"(",
")",
"try",
":",
"return",
"self",
".",
"apply_async",
"(",
"args",
",",
"kwargs",
",",
"*",
"*",
"options",
")",
"except",
"possible_broker_errors",
"as",
"e",
":",
"return",
"self",
".",
"simulate_async_error",
"(",
"e",
")"
] | 48.428571 | 17.285714 |
def dist(self, src, tar, probs=None):
"""Return the NCD between two strings using arithmetic coding.
Parameters
----------
src : str
Source string for comparison
tar : str
Target string for comparison
probs : dict
A dictionary trained with :py:meth:`Arithmetic.train`
Returns
-------
float
Compression distance
Examples
--------
>>> cmp = NCDarith()
>>> cmp.dist('cat', 'hat')
0.5454545454545454
>>> cmp.dist('Niall', 'Neil')
0.6875
>>> cmp.dist('aluminum', 'Catalan')
0.8275862068965517
>>> cmp.dist('ATCG', 'TAGC')
0.6923076923076923
"""
if src == tar:
return 0.0
if probs is None:
# lacking a reasonable dictionary, train on the strings themselves
self._coder.train(src + tar)
else:
self._coder.set_probs(probs)
src_comp = self._coder.encode(src)[1]
tar_comp = self._coder.encode(tar)[1]
concat_comp = self._coder.encode(src + tar)[1]
concat_comp2 = self._coder.encode(tar + src)[1]
return (
min(concat_comp, concat_comp2) - min(src_comp, tar_comp)
) / max(src_comp, tar_comp)
|
[
"def",
"dist",
"(",
"self",
",",
"src",
",",
"tar",
",",
"probs",
"=",
"None",
")",
":",
"if",
"src",
"==",
"tar",
":",
"return",
"0.0",
"if",
"probs",
"is",
"None",
":",
"# lacking a reasonable dictionary, train on the strings themselves",
"self",
".",
"_coder",
".",
"train",
"(",
"src",
"+",
"tar",
")",
"else",
":",
"self",
".",
"_coder",
".",
"set_probs",
"(",
"probs",
")",
"src_comp",
"=",
"self",
".",
"_coder",
".",
"encode",
"(",
"src",
")",
"[",
"1",
"]",
"tar_comp",
"=",
"self",
".",
"_coder",
".",
"encode",
"(",
"tar",
")",
"[",
"1",
"]",
"concat_comp",
"=",
"self",
".",
"_coder",
".",
"encode",
"(",
"src",
"+",
"tar",
")",
"[",
"1",
"]",
"concat_comp2",
"=",
"self",
".",
"_coder",
".",
"encode",
"(",
"tar",
"+",
"src",
")",
"[",
"1",
"]",
"return",
"(",
"min",
"(",
"concat_comp",
",",
"concat_comp2",
")",
"-",
"min",
"(",
"src_comp",
",",
"tar_comp",
")",
")",
"/",
"max",
"(",
"src_comp",
",",
"tar_comp",
")"
] | 27.404255 | 18.276596 |
def to_btc(ccy, value, api_code=None):
"""Call the 'tobtc' method and convert x value in the provided currency to BTC.
:param str ccy: currency code
:param float value: value to convert
:param str api_code: Blockchain.info API code
:return: the value in BTC
"""
res = 'tobtc?currency={0}&value={1}'.format(ccy, value)
if api_code is not None:
res += '&api_code=' + api_code
return float(util.call_api(res))
|
[
"def",
"to_btc",
"(",
"ccy",
",",
"value",
",",
"api_code",
"=",
"None",
")",
":",
"res",
"=",
"'tobtc?currency={0}&value={1}'",
".",
"format",
"(",
"ccy",
",",
"value",
")",
"if",
"api_code",
"is",
"not",
"None",
":",
"res",
"+=",
"'&api_code='",
"+",
"api_code",
"return",
"float",
"(",
"util",
".",
"call_api",
"(",
"res",
")",
")"
] | 34.461538 | 10.615385 |
def satoshi_to_currency_cached(num, currency):
"""Converts a given number of satoshi to another currency as a formatted
string rounded down to the proper number of decimal places. Results are
cached using a decorator for 60 seconds by default. See :ref:`cache times`.
:param num: The number of satoshi.
:type num: ``int``
:param currency: One of the :ref:`supported currencies`.
:type currency: ``str``
:rtype: ``str``
"""
return '{:f}'.format(
Decimal(
num / Decimal(currency_to_satoshi_cached(1, currency))
).quantize(
Decimal('0.' + '0' * CURRENCY_PRECISION[currency]),
rounding=ROUND_DOWN
).normalize()
)
|
[
"def",
"satoshi_to_currency_cached",
"(",
"num",
",",
"currency",
")",
":",
"return",
"'{:f}'",
".",
"format",
"(",
"Decimal",
"(",
"num",
"/",
"Decimal",
"(",
"currency_to_satoshi_cached",
"(",
"1",
",",
"currency",
")",
")",
")",
".",
"quantize",
"(",
"Decimal",
"(",
"'0.'",
"+",
"'0'",
"*",
"CURRENCY_PRECISION",
"[",
"currency",
"]",
")",
",",
"rounding",
"=",
"ROUND_DOWN",
")",
".",
"normalize",
"(",
")",
")"
] | 36.578947 | 19.263158 |
def get_solarposition(self, times, pressure=None, temperature=12,
**kwargs):
"""
Uses the :py:func:`solarposition.get_solarposition` function
to calculate the solar zenith, azimuth, etc. at this location.
Parameters
----------
times : DatetimeIndex
pressure : None, float, or array-like, default None
If None, pressure will be calculated using
:py:func:`atmosphere.alt2pres` and ``self.altitude``.
temperature : None, float, or array-like, default 12
kwargs
passed to :py:func:`solarposition.get_solarposition`
Returns
-------
solar_position : DataFrame
Columns depend on the ``method`` kwarg, but always include
``zenith`` and ``azimuth``.
"""
if pressure is None:
pressure = atmosphere.alt2pres(self.altitude)
return solarposition.get_solarposition(times, latitude=self.latitude,
longitude=self.longitude,
altitude=self.altitude,
pressure=pressure,
temperature=temperature,
**kwargs)
|
[
"def",
"get_solarposition",
"(",
"self",
",",
"times",
",",
"pressure",
"=",
"None",
",",
"temperature",
"=",
"12",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"pressure",
"is",
"None",
":",
"pressure",
"=",
"atmosphere",
".",
"alt2pres",
"(",
"self",
".",
"altitude",
")",
"return",
"solarposition",
".",
"get_solarposition",
"(",
"times",
",",
"latitude",
"=",
"self",
".",
"latitude",
",",
"longitude",
"=",
"self",
".",
"longitude",
",",
"altitude",
"=",
"self",
".",
"altitude",
",",
"pressure",
"=",
"pressure",
",",
"temperature",
"=",
"temperature",
",",
"*",
"*",
"kwargs",
")"
] | 40.96875 | 22.40625 |
def p_InDecrement(p):
'''
InDecrement : INDECREMENT Expression
| Expression INDECREMENT
'''
from .helper import isString
if isString(p[1]):
p[0] = InDecrement(p[1], p[2], False)
else:
p[0] = InDecrement(p[2], p[1], True)
|
[
"def",
"p_InDecrement",
"(",
"p",
")",
":",
"from",
".",
"helper",
"import",
"isString",
"if",
"isString",
"(",
"p",
"[",
"1",
"]",
")",
":",
"p",
"[",
"0",
"]",
"=",
"InDecrement",
"(",
"p",
"[",
"1",
"]",
",",
"p",
"[",
"2",
"]",
",",
"False",
")",
"else",
":",
"p",
"[",
"0",
"]",
"=",
"InDecrement",
"(",
"p",
"[",
"2",
"]",
",",
"p",
"[",
"1",
"]",
",",
"True",
")"
] | 26.7 | 15.1 |
def get_filedata(self, condition=None, page_size=1000):
"""Return a generator over all results matching the provided condition
:param condition: An :class:`.Expression` which defines the condition
which must be matched on the filedata that will be retrieved from
file data store. If a condition is unspecified, the following condition
will be used ``fd_path == '~/'``. This condition will match all file
data in this accounts "home" directory (a sensible root).
:type condition: :class:`.Expression` or None
:param int page_size: The number of results to fetch in a single page. Regardless
of the size specified, :meth:`.get_filedata` will continue to fetch pages
and yield results until all items have been fetched.
:return: Generator yielding :class:`.FileDataObject` instances matching the
provided conditions.
"""
condition = validate_type(condition, type(None), Expression, *six.string_types)
page_size = validate_type(page_size, *six.integer_types)
if condition is None:
condition = (fd_path == "~/") # home directory
params = {"embed": "true", "condition": condition.compile()}
for fd_json in self._conn.iter_json_pages("/ws/FileData", page_size=page_size, **params):
yield FileDataObject.from_json(self, fd_json)
|
[
"def",
"get_filedata",
"(",
"self",
",",
"condition",
"=",
"None",
",",
"page_size",
"=",
"1000",
")",
":",
"condition",
"=",
"validate_type",
"(",
"condition",
",",
"type",
"(",
"None",
")",
",",
"Expression",
",",
"*",
"six",
".",
"string_types",
")",
"page_size",
"=",
"validate_type",
"(",
"page_size",
",",
"*",
"six",
".",
"integer_types",
")",
"if",
"condition",
"is",
"None",
":",
"condition",
"=",
"(",
"fd_path",
"==",
"\"~/\"",
")",
"# home directory",
"params",
"=",
"{",
"\"embed\"",
":",
"\"true\"",
",",
"\"condition\"",
":",
"condition",
".",
"compile",
"(",
")",
"}",
"for",
"fd_json",
"in",
"self",
".",
"_conn",
".",
"iter_json_pages",
"(",
"\"/ws/FileData\"",
",",
"page_size",
"=",
"page_size",
",",
"*",
"*",
"params",
")",
":",
"yield",
"FileDataObject",
".",
"from_json",
"(",
"self",
",",
"fd_json",
")"
] | 55.96 | 29.92 |
def iter_org_issues(self, name, filter='', state='', labels='', sort='',
direction='', since=None, number=-1, etag=None):
"""Iterate over the organnization's issues if the authenticated user
belongs to it.
:param str name: (required), name of the organization
:param str filter: accepted values:
('assigned', 'created', 'mentioned', 'subscribed')
api-default: 'assigned'
:param str state: accepted values: ('open', 'closed')
api-default: 'open'
:param str labels: comma-separated list of label names, e.g.,
'bug,ui,@high'
:param str sort: accepted values: ('created', 'updated', 'comments')
api-default: created
:param str direction: accepted values: ('asc', 'desc')
api-default: desc
:param since: (optional), Only issues after this date will
be returned. This can be a `datetime` or an ISO8601 formatted
date string, e.g., 2012-05-20T23:10:27Z
:type since: datetime or string
:param int number: (optional), number of issues to return. Default:
-1, returns all available issues
:param str etag: (optional), ETag from a previous request to the same
endpoint
:returns: generator of :class:`Issue <github3.issues.Issue>`
"""
url = self._build_url('orgs', name, 'issues')
# issue_params will handle the since parameter
params = issue_params(filter, state, labels, sort, direction, since)
return self._iter(int(number), url, Issue, params, etag)
|
[
"def",
"iter_org_issues",
"(",
"self",
",",
"name",
",",
"filter",
"=",
"''",
",",
"state",
"=",
"''",
",",
"labels",
"=",
"''",
",",
"sort",
"=",
"''",
",",
"direction",
"=",
"''",
",",
"since",
"=",
"None",
",",
"number",
"=",
"-",
"1",
",",
"etag",
"=",
"None",
")",
":",
"url",
"=",
"self",
".",
"_build_url",
"(",
"'orgs'",
",",
"name",
",",
"'issues'",
")",
"# issue_params will handle the since parameter",
"params",
"=",
"issue_params",
"(",
"filter",
",",
"state",
",",
"labels",
",",
"sort",
",",
"direction",
",",
"since",
")",
"return",
"self",
".",
"_iter",
"(",
"int",
"(",
"number",
")",
",",
"url",
",",
"Issue",
",",
"params",
",",
"etag",
")"
] | 51.612903 | 19.516129 |
def operations_contain_expected_statuses(operations, expected_statuses):
"""
Checks whether the operation list has an operation with the
expected status, then returns true
If it encounters operations in FAILED or ABORTED state
throw :class:`airflow.exceptions.AirflowException`.
:param operations: (Required) List of transfer operations to check.
:type operations: list[dict]
:param expected_statuses: (Required) status that is expected
See:
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status
:type expected_statuses: set[str]
:return: If there is an operation with the expected state
in the operation list, returns true,
:raises: airflow.exceptions.AirflowException If it encounters operations
with a state in the list,
:rtype: bool
"""
expected_statuses = (
{expected_statuses} if isinstance(expected_statuses, six.string_types) else set(expected_statuses)
)
if len(operations) == 0:
return False
current_statuses = {operation[METADATA][STATUS] for operation in operations}
if len(current_statuses - set(expected_statuses)) != len(current_statuses):
return True
if len(NEGATIVE_STATUSES - current_statuses) != len(NEGATIVE_STATUSES):
raise AirflowException(
'An unexpected operation status was encountered. Expected: {}'.format(
", ".join(expected_statuses)
)
)
return False
|
[
"def",
"operations_contain_expected_statuses",
"(",
"operations",
",",
"expected_statuses",
")",
":",
"expected_statuses",
"=",
"(",
"{",
"expected_statuses",
"}",
"if",
"isinstance",
"(",
"expected_statuses",
",",
"six",
".",
"string_types",
")",
"else",
"set",
"(",
"expected_statuses",
")",
")",
"if",
"len",
"(",
"operations",
")",
"==",
"0",
":",
"return",
"False",
"current_statuses",
"=",
"{",
"operation",
"[",
"METADATA",
"]",
"[",
"STATUS",
"]",
"for",
"operation",
"in",
"operations",
"}",
"if",
"len",
"(",
"current_statuses",
"-",
"set",
"(",
"expected_statuses",
")",
")",
"!=",
"len",
"(",
"current_statuses",
")",
":",
"return",
"True",
"if",
"len",
"(",
"NEGATIVE_STATUSES",
"-",
"current_statuses",
")",
"!=",
"len",
"(",
"NEGATIVE_STATUSES",
")",
":",
"raise",
"AirflowException",
"(",
"'An unexpected operation status was encountered. Expected: {}'",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"expected_statuses",
")",
")",
")",
"return",
"False"
] | 43.351351 | 24.864865 |
def count_quota_handler_factory(count_quota_field):
""" Creates handler that will recalculate count_quota on creation/deletion """
def recalculate_count_quota(sender, instance, **kwargs):
signal = kwargs['signal']
if signal == signals.post_save and kwargs.get('created'):
count_quota_field.add_usage(instance, delta=1)
elif signal == signals.post_delete:
count_quota_field.add_usage(instance, delta=-1, fail_silently=True)
return recalculate_count_quota
|
[
"def",
"count_quota_handler_factory",
"(",
"count_quota_field",
")",
":",
"def",
"recalculate_count_quota",
"(",
"sender",
",",
"instance",
",",
"*",
"*",
"kwargs",
")",
":",
"signal",
"=",
"kwargs",
"[",
"'signal'",
"]",
"if",
"signal",
"==",
"signals",
".",
"post_save",
"and",
"kwargs",
".",
"get",
"(",
"'created'",
")",
":",
"count_quota_field",
".",
"add_usage",
"(",
"instance",
",",
"delta",
"=",
"1",
")",
"elif",
"signal",
"==",
"signals",
".",
"post_delete",
":",
"count_quota_field",
".",
"add_usage",
"(",
"instance",
",",
"delta",
"=",
"-",
"1",
",",
"fail_silently",
"=",
"True",
")",
"return",
"recalculate_count_quota"
] | 45.909091 | 19 |
def get(code):
"""
Return an object that corresponds to the given EPSG code.
Currently supported object types are:
- :class:`GeodeticCRS`
- :class:`ProjectedCRS`
- :class:`CartesianCS`
- :class:`UOM`
For example::
>>> print(get(27700))
<ProjectedCRS: 27700, OSGB 1936 / British National Grid>
>>> print(get('4400-cs'))
<CartesianCS: Cartesian 2D CS. Axes: easting, northi..>
>>> print(get(5973))
<CompoundCRS: 5973, ETRS89 / UTM zone 33 + NN2000 height>
"""
instance = _cache.get(code)
if instance is None:
url = '{prefix}{code}.gml?download'.format(prefix=EPSG_IO_URL,
code=code)
xml = requests.get(url).content
root = ET.fromstring(xml)
class_for_tag = {
GML_NS + 'CartesianCS': CartesianCS,
GML_NS + 'GeodeticCRS': GeodeticCRS,
GML_NS + 'ProjectedCRS': ProjectedCRS,
GML_NS + 'CompoundCRS': CompoundCRS,
GML_NS + 'BaseUnit': UOM,
}
if root.tag in class_for_tag:
instance = class_for_tag[root.tag](root)
else:
raise ValueError('Unsupported code type: {}'.format(root.tag))
_cache[code] = instance
return instance
|
[
"def",
"get",
"(",
"code",
")",
":",
"instance",
"=",
"_cache",
".",
"get",
"(",
"code",
")",
"if",
"instance",
"is",
"None",
":",
"url",
"=",
"'{prefix}{code}.gml?download'",
".",
"format",
"(",
"prefix",
"=",
"EPSG_IO_URL",
",",
"code",
"=",
"code",
")",
"xml",
"=",
"requests",
".",
"get",
"(",
"url",
")",
".",
"content",
"root",
"=",
"ET",
".",
"fromstring",
"(",
"xml",
")",
"class_for_tag",
"=",
"{",
"GML_NS",
"+",
"'CartesianCS'",
":",
"CartesianCS",
",",
"GML_NS",
"+",
"'GeodeticCRS'",
":",
"GeodeticCRS",
",",
"GML_NS",
"+",
"'ProjectedCRS'",
":",
"ProjectedCRS",
",",
"GML_NS",
"+",
"'CompoundCRS'",
":",
"CompoundCRS",
",",
"GML_NS",
"+",
"'BaseUnit'",
":",
"UOM",
",",
"}",
"if",
"root",
".",
"tag",
"in",
"class_for_tag",
":",
"instance",
"=",
"class_for_tag",
"[",
"root",
".",
"tag",
"]",
"(",
"root",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Unsupported code type: {}'",
".",
"format",
"(",
"root",
".",
"tag",
")",
")",
"_cache",
"[",
"code",
"]",
"=",
"instance",
"return",
"instance"
] | 33.102564 | 16.74359 |
def supply_and_demand(
lcm, choosers, alternatives, alt_segmenter, price_col,
base_multiplier=None, clip_change_low=0.75, clip_change_high=1.25,
iterations=5, multiplier_func=None):
"""
Adjust real estate prices to compensate for supply and demand effects.
Parameters
----------
lcm : LocationChoiceModel
Used to calculate the probability of agents choosing among
alternatives. Must be fully configured and fitted.
choosers : pandas.DataFrame
alternatives : pandas.DataFrame
alt_segmenter : str, array, or pandas.Series
Will be used to segment alternatives and probabilities to do
comparisons of supply and demand by submarket.
If a string, it is expected to be the name of a column
in `alternatives`. If a Series it should have the same index
as `alternatives`.
price_col : str
The name of the column in `alternatives` that corresponds to price.
This column is what is adjusted by this model.
base_multiplier : pandas.Series, optional
A series describing a starting multiplier for submarket prices.
Index should be submarket IDs.
clip_change_low : float, optional
The minimum amount by which to multiply prices each iteration.
clip_change_high : float, optional
The maximum amount by which to multiply prices each iteration.
iterations : int, optional
Number of times to update prices based on supply/demand comparisons.
multiplier_func : function (returns Series, boolean)
A function which takes separate demand and supply Series
and returns a tuple where the first item is a Series with the
ratio of new price to old price (all indexes should be the same) -
by default the ratio of demand to supply is the ratio of the new
price to the old price. The second return value is a
boolean which when True tells this module to stop looping (that
convergence has been satisfied)
Returns
-------
new_prices : pandas.Series
Equivalent of the `price_col` in `alternatives`.
submarkets_ratios : pandas.Series
Price adjustment ratio for each submarket. If `base_multiplier` is
given this will be a cummulative multiplier including the
`base_multiplier` and the multipliers calculated for this year.
"""
logger.debug('start: calculating supply and demand price adjustment')
# copy alternatives so we don't modify the user's original
alternatives = alternatives.copy()
# if alt_segmenter is a string, get the actual column for segmenting demand
if isinstance(alt_segmenter, str):
alt_segmenter = alternatives[alt_segmenter]
elif isinstance(alt_segmenter, np.array):
alt_segmenter = pd.Series(alt_segmenter, index=alternatives.index)
choosers, alternatives = lcm.apply_predict_filters(choosers, alternatives)
alt_segmenter = alt_segmenter.loc[alternatives.index]
# check base ratio and apply it to prices if given
if base_multiplier is not None:
bm = base_multiplier.loc[alt_segmenter]
bm.index = alt_segmenter.index
alternatives[price_col] = alternatives[price_col] * bm
base_multiplier = base_multiplier.copy()
for _ in range(iterations):
alts_muliplier, submarkets_multiplier, finished = _calculate_adjustment(
lcm, choosers, alternatives, alt_segmenter,
clip_change_low, clip_change_high, multiplier_func=multiplier_func)
alternatives[price_col] = alternatives[price_col] * alts_muliplier
# might need to initialize this for holding cumulative multiplier
if base_multiplier is None:
base_multiplier = pd.Series(
np.ones(len(submarkets_multiplier)),
index=submarkets_multiplier.index)
base_multiplier *= submarkets_multiplier
if finished:
break
logger.debug('finish: calculating supply and demand price adjustment')
return alternatives[price_col], base_multiplier
|
[
"def",
"supply_and_demand",
"(",
"lcm",
",",
"choosers",
",",
"alternatives",
",",
"alt_segmenter",
",",
"price_col",
",",
"base_multiplier",
"=",
"None",
",",
"clip_change_low",
"=",
"0.75",
",",
"clip_change_high",
"=",
"1.25",
",",
"iterations",
"=",
"5",
",",
"multiplier_func",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"'start: calculating supply and demand price adjustment'",
")",
"# copy alternatives so we don't modify the user's original",
"alternatives",
"=",
"alternatives",
".",
"copy",
"(",
")",
"# if alt_segmenter is a string, get the actual column for segmenting demand",
"if",
"isinstance",
"(",
"alt_segmenter",
",",
"str",
")",
":",
"alt_segmenter",
"=",
"alternatives",
"[",
"alt_segmenter",
"]",
"elif",
"isinstance",
"(",
"alt_segmenter",
",",
"np",
".",
"array",
")",
":",
"alt_segmenter",
"=",
"pd",
".",
"Series",
"(",
"alt_segmenter",
",",
"index",
"=",
"alternatives",
".",
"index",
")",
"choosers",
",",
"alternatives",
"=",
"lcm",
".",
"apply_predict_filters",
"(",
"choosers",
",",
"alternatives",
")",
"alt_segmenter",
"=",
"alt_segmenter",
".",
"loc",
"[",
"alternatives",
".",
"index",
"]",
"# check base ratio and apply it to prices if given",
"if",
"base_multiplier",
"is",
"not",
"None",
":",
"bm",
"=",
"base_multiplier",
".",
"loc",
"[",
"alt_segmenter",
"]",
"bm",
".",
"index",
"=",
"alt_segmenter",
".",
"index",
"alternatives",
"[",
"price_col",
"]",
"=",
"alternatives",
"[",
"price_col",
"]",
"*",
"bm",
"base_multiplier",
"=",
"base_multiplier",
".",
"copy",
"(",
")",
"for",
"_",
"in",
"range",
"(",
"iterations",
")",
":",
"alts_muliplier",
",",
"submarkets_multiplier",
",",
"finished",
"=",
"_calculate_adjustment",
"(",
"lcm",
",",
"choosers",
",",
"alternatives",
",",
"alt_segmenter",
",",
"clip_change_low",
",",
"clip_change_high",
",",
"multiplier_func",
"=",
"multiplier_func",
")",
"alternatives",
"[",
"price_col",
"]",
"=",
"alternatives",
"[",
"price_col",
"]",
"*",
"alts_muliplier",
"# might need to initialize this for holding cumulative multiplier",
"if",
"base_multiplier",
"is",
"None",
":",
"base_multiplier",
"=",
"pd",
".",
"Series",
"(",
"np",
".",
"ones",
"(",
"len",
"(",
"submarkets_multiplier",
")",
")",
",",
"index",
"=",
"submarkets_multiplier",
".",
"index",
")",
"base_multiplier",
"*=",
"submarkets_multiplier",
"if",
"finished",
":",
"break",
"logger",
".",
"debug",
"(",
"'finish: calculating supply and demand price adjustment'",
")",
"return",
"alternatives",
"[",
"price_col",
"]",
",",
"base_multiplier"
] | 44.5 | 21.388889 |
def get_groups(self):
"""Get groups via provisioning API.
If you get back an error 999, then the provisioning API is not enabled.
:returns: list of groups
:raises: HTTPResponseError in case an HTTP error status was returned
"""
res = self._make_ocs_request(
'GET',
self.OCS_SERVICE_CLOUD,
'groups'
)
if res.status_code == 200:
tree = ET.fromstring(res.content)
groups = [x.text for x in tree.findall('data/groups/element')]
return groups
raise HTTPResponseError(res)
|
[
"def",
"get_groups",
"(",
"self",
")",
":",
"res",
"=",
"self",
".",
"_make_ocs_request",
"(",
"'GET'",
",",
"self",
".",
"OCS_SERVICE_CLOUD",
",",
"'groups'",
")",
"if",
"res",
".",
"status_code",
"==",
"200",
":",
"tree",
"=",
"ET",
".",
"fromstring",
"(",
"res",
".",
"content",
")",
"groups",
"=",
"[",
"x",
".",
"text",
"for",
"x",
"in",
"tree",
".",
"findall",
"(",
"'data/groups/element'",
")",
"]",
"return",
"groups",
"raise",
"HTTPResponseError",
"(",
"res",
")"
] | 28.333333 | 21.285714 |
def quick_response(self, status_code):
""" Quickly construct response using a status code """
translator = Translator(environ=self.environ)
if status_code == 404:
self.status(404)
self.message(translator.trans('http_messages.404'))
elif status_code == 401:
self.status(401)
self.message(translator.trans('http_messages.401'))
elif status_code == 400:
self.status(400)
self.message(translator.trans('http_messages.400'))
elif status_code == 200:
self.status(200)
self.message(translator.trans('http_messages.200'))
|
[
"def",
"quick_response",
"(",
"self",
",",
"status_code",
")",
":",
"translator",
"=",
"Translator",
"(",
"environ",
"=",
"self",
".",
"environ",
")",
"if",
"status_code",
"==",
"404",
":",
"self",
".",
"status",
"(",
"404",
")",
"self",
".",
"message",
"(",
"translator",
".",
"trans",
"(",
"'http_messages.404'",
")",
")",
"elif",
"status_code",
"==",
"401",
":",
"self",
".",
"status",
"(",
"401",
")",
"self",
".",
"message",
"(",
"translator",
".",
"trans",
"(",
"'http_messages.401'",
")",
")",
"elif",
"status_code",
"==",
"400",
":",
"self",
".",
"status",
"(",
"400",
")",
"self",
".",
"message",
"(",
"translator",
".",
"trans",
"(",
"'http_messages.400'",
")",
")",
"elif",
"status_code",
"==",
"200",
":",
"self",
".",
"status",
"(",
"200",
")",
"self",
".",
"message",
"(",
"translator",
".",
"trans",
"(",
"'http_messages.200'",
")",
")"
] | 43.8 | 12.266667 |
def get_kwargs_index(target) -> int:
"""
Returns the index of the "**kwargs" parameter if such a parameter exists in
the function arguments or -1 otherwise.
:param target:
The target function for which the kwargs index should be determined
:return:
The keyword arguments index if it exists or -1 if not
"""
code = target.__code__
if not bool(code.co_flags & inspect.CO_VARKEYWORDS):
return -1
return (
code.co_argcount +
code.co_kwonlyargcount +
(1 if code.co_flags & inspect.CO_VARARGS else 0)
)
|
[
"def",
"get_kwargs_index",
"(",
"target",
")",
"->",
"int",
":",
"code",
"=",
"target",
".",
"__code__",
"if",
"not",
"bool",
"(",
"code",
".",
"co_flags",
"&",
"inspect",
".",
"CO_VARKEYWORDS",
")",
":",
"return",
"-",
"1",
"return",
"(",
"code",
".",
"co_argcount",
"+",
"code",
".",
"co_kwonlyargcount",
"+",
"(",
"1",
"if",
"code",
".",
"co_flags",
"&",
"inspect",
".",
"CO_VARARGS",
"else",
"0",
")",
")"
] | 27.047619 | 22.190476 |
def get_topic_keyword_dictionary():
"""
Opens the topic-keyword map resource file and returns the corresponding python dictionary.
- Input: - file_path: The path pointing to the topic-keyword map resource file.
- Output: - topic_set: A topic to keyword python dictionary.
"""
topic_keyword_dictionary = dict()
file_row_gen = get_file_row_generator(get_package_path() + "/twitter/res/topics/topic_keyword_mapping" + ".txt",
",",
"utf-8")
for file_row in file_row_gen:
topic_keyword_dictionary[file_row[0]] = set([keyword for keyword in file_row[1:]])
return topic_keyword_dictionary
|
[
"def",
"get_topic_keyword_dictionary",
"(",
")",
":",
"topic_keyword_dictionary",
"=",
"dict",
"(",
")",
"file_row_gen",
"=",
"get_file_row_generator",
"(",
"get_package_path",
"(",
")",
"+",
"\"/twitter/res/topics/topic_keyword_mapping\"",
"+",
"\".txt\"",
",",
"\",\"",
",",
"\"utf-8\"",
")",
"for",
"file_row",
"in",
"file_row_gen",
":",
"topic_keyword_dictionary",
"[",
"file_row",
"[",
"0",
"]",
"]",
"=",
"set",
"(",
"[",
"keyword",
"for",
"keyword",
"in",
"file_row",
"[",
"1",
":",
"]",
"]",
")",
"return",
"topic_keyword_dictionary"
] | 43.625 | 25.25 |
def jpegrescan(ext_args):
"""Run the EXTERNAL program jpegrescan."""
args = copy.copy(_JPEGRESCAN_ARGS)
if Settings.jpegrescan_multithread:
args += ['-t']
if Settings.destroy_metadata:
args += ['-s']
args += [ext_args.old_filename, ext_args.new_filename]
extern.run_ext(args)
return _JPEG_FORMAT
|
[
"def",
"jpegrescan",
"(",
"ext_args",
")",
":",
"args",
"=",
"copy",
".",
"copy",
"(",
"_JPEGRESCAN_ARGS",
")",
"if",
"Settings",
".",
"jpegrescan_multithread",
":",
"args",
"+=",
"[",
"'-t'",
"]",
"if",
"Settings",
".",
"destroy_metadata",
":",
"args",
"+=",
"[",
"'-s'",
"]",
"args",
"+=",
"[",
"ext_args",
".",
"old_filename",
",",
"ext_args",
".",
"new_filename",
"]",
"extern",
".",
"run_ext",
"(",
"args",
")",
"return",
"_JPEG_FORMAT"
] | 33 | 11.2 |
def get_image(self, image_id_or_slug):
"""
Return a Image by its ID/Slug.
"""
return Image.get_object(
api_token=self.token,
image_id_or_slug=image_id_or_slug,
)
|
[
"def",
"get_image",
"(",
"self",
",",
"image_id_or_slug",
")",
":",
"return",
"Image",
".",
"get_object",
"(",
"api_token",
"=",
"self",
".",
"token",
",",
"image_id_or_slug",
"=",
"image_id_or_slug",
",",
")"
] | 27.75 | 7 |
def _get_sentences_dict(self):
"""
Returns sentence objects
:return: order dict of sentences
:rtype: collections.OrderedDict
"""
if self._sentences_dict is None:
sentences = [Sentence(element) for element in self._xml.xpath('/root/document/sentences/sentence')]
self._sentences_dict = OrderedDict([(s.id, s) for s in sentences])
return self._sentences_dict
|
[
"def",
"_get_sentences_dict",
"(",
"self",
")",
":",
"if",
"self",
".",
"_sentences_dict",
"is",
"None",
":",
"sentences",
"=",
"[",
"Sentence",
"(",
"element",
")",
"for",
"element",
"in",
"self",
".",
"_xml",
".",
"xpath",
"(",
"'/root/document/sentences/sentence'",
")",
"]",
"self",
".",
"_sentences_dict",
"=",
"OrderedDict",
"(",
"[",
"(",
"s",
".",
"id",
",",
"s",
")",
"for",
"s",
"in",
"sentences",
"]",
")",
"return",
"self",
".",
"_sentences_dict"
] | 35.583333 | 17.75 |
def delete_library_value(self, key: str) -> None:
"""Delete the library value for the given key.
Please consult the developer documentation for a list of valid keys.
.. versionadded:: 1.0
Scriptable: Yes
"""
desc = Metadata.session_key_map.get(key)
if desc is not None:
field_id = desc['path'][-1]
setattr(ApplicationData.get_session_metadata_model(), field_id, None)
return
raise KeyError()
|
[
"def",
"delete_library_value",
"(",
"self",
",",
"key",
":",
"str",
")",
"->",
"None",
":",
"desc",
"=",
"Metadata",
".",
"session_key_map",
".",
"get",
"(",
"key",
")",
"if",
"desc",
"is",
"not",
"None",
":",
"field_id",
"=",
"desc",
"[",
"'path'",
"]",
"[",
"-",
"1",
"]",
"setattr",
"(",
"ApplicationData",
".",
"get_session_metadata_model",
"(",
")",
",",
"field_id",
",",
"None",
")",
"return",
"raise",
"KeyError",
"(",
")"
] | 32 | 19.533333 |
def _sample_item(self, **kwargs):
"""Sample an item from the pool according to the instrumental
distribution
"""
t = self.t_
# Update instrumental distribution
self._calc_inst_pmf()
if self.record_inst_hist:
inst_pmf = self._inst_pmf[:,t]
else:
inst_pmf = self._inst_pmf
# Sample label and record weight
loc, stratum_idx = self.strata.sample(pmf = inst_pmf)
weight = self.strata.weights_[stratum_idx]/inst_pmf[stratum_idx]
return loc, weight, {'stratum': stratum_idx}
|
[
"def",
"_sample_item",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"t",
"=",
"self",
".",
"t_",
"# Update instrumental distribution",
"self",
".",
"_calc_inst_pmf",
"(",
")",
"if",
"self",
".",
"record_inst_hist",
":",
"inst_pmf",
"=",
"self",
".",
"_inst_pmf",
"[",
":",
",",
"t",
"]",
"else",
":",
"inst_pmf",
"=",
"self",
".",
"_inst_pmf",
"# Sample label and record weight",
"loc",
",",
"stratum_idx",
"=",
"self",
".",
"strata",
".",
"sample",
"(",
"pmf",
"=",
"inst_pmf",
")",
"weight",
"=",
"self",
".",
"strata",
".",
"weights_",
"[",
"stratum_idx",
"]",
"/",
"inst_pmf",
"[",
"stratum_idx",
"]",
"return",
"loc",
",",
"weight",
",",
"{",
"'stratum'",
":",
"stratum_idx",
"}"
] | 30.157895 | 17.105263 |
def control_loop():
'''Main loop, updating the capture agent state.
'''
set_service_status(Service.AGENTSTATE, ServiceStatus.BUSY)
notify.notify('READY=1')
notify.notify('STATUS=Running')
while not terminate():
notify.notify('WATCHDOG=1')
update_agent_state()
next_update = timestamp() + config()['agent']['update_frequency']
while not terminate() and timestamp() < next_update:
time.sleep(0.1)
logger.info('Shutting down agentstate service')
set_service_status(Service.AGENTSTATE, ServiceStatus.STOPPED)
|
[
"def",
"control_loop",
"(",
")",
":",
"set_service_status",
"(",
"Service",
".",
"AGENTSTATE",
",",
"ServiceStatus",
".",
"BUSY",
")",
"notify",
".",
"notify",
"(",
"'READY=1'",
")",
"notify",
".",
"notify",
"(",
"'STATUS=Running'",
")",
"while",
"not",
"terminate",
"(",
")",
":",
"notify",
".",
"notify",
"(",
"'WATCHDOG=1'",
")",
"update_agent_state",
"(",
")",
"next_update",
"=",
"timestamp",
"(",
")",
"+",
"config",
"(",
")",
"[",
"'agent'",
"]",
"[",
"'update_frequency'",
"]",
"while",
"not",
"terminate",
"(",
")",
"and",
"timestamp",
"(",
")",
"<",
"next_update",
":",
"time",
".",
"sleep",
"(",
"0.1",
")",
"logger",
".",
"info",
"(",
"'Shutting down agentstate service'",
")",
"set_service_status",
"(",
"Service",
".",
"AGENTSTATE",
",",
"ServiceStatus",
".",
"STOPPED",
")"
] | 35.4375 | 19.8125 |
def round_up(self, num):
"""Determine the length to use for this waveform by rounding.
Parameters
----------
num : int
Proposed size of waveform in seconds
Returns
-------
size: int
The rounded size to use for the waveform buffer in seconds. This
is calculaed using an internal `increment` attribute, which determines
the discreteness of the rounding.
"""
inc = self.increment
size = np.ceil(num / self.sample_rate / inc) * self.sample_rate * inc
return size
|
[
"def",
"round_up",
"(",
"self",
",",
"num",
")",
":",
"inc",
"=",
"self",
".",
"increment",
"size",
"=",
"np",
".",
"ceil",
"(",
"num",
"/",
"self",
".",
"sample_rate",
"/",
"inc",
")",
"*",
"self",
".",
"sample_rate",
"*",
"inc",
"return",
"size"
] | 31.722222 | 21.611111 |
def render(self, name, value, attrs=None, multi=False, renderer=None):
"""
Django <= 1.10 variant.
"""
DJANGO_111_OR_UP = (VERSION[0] == 1 and VERSION[1] >= 11) or (
VERSION[0] >= 2
)
if DJANGO_111_OR_UP:
return super(DynamicRawIDWidget, self).render(
name, value, attrs, renderer=renderer
)
if attrs is None:
attrs = {}
related_url = reverse(
'admin:{0}_{1}_changelist'.format(
self.rel.to._meta.app_label,
self.rel.to._meta.object_name.lower(),
),
current_app=self.admin_site.name,
)
params = self.url_parameters()
if params:
url = u'?' + u'&'.join(
[u'{0}={1}'.format(k, v) for k, v in params.items()]
)
else:
url = u''
if "class" not in attrs:
attrs[
'class'
] = (
'vForeignKeyRawIdAdminField'
) # The JavaScript looks for this hook.
app_name = self.rel.to._meta.app_label.strip()
model_name = self.rel.to._meta.object_name.lower().strip()
hidden_input = super(widgets.ForeignKeyRawIdWidget, self).render(
name, value, attrs
)
extra_context = {
'hidden_input': hidden_input,
'name': name,
'app_name': app_name,
'model_name': model_name,
'related_url': related_url,
'url': url,
}
return render_to_string(
'dynamic_raw_id/admin/widgets/dynamic_raw_id_field.html',
extra_context,
)
|
[
"def",
"render",
"(",
"self",
",",
"name",
",",
"value",
",",
"attrs",
"=",
"None",
",",
"multi",
"=",
"False",
",",
"renderer",
"=",
"None",
")",
":",
"DJANGO_111_OR_UP",
"=",
"(",
"VERSION",
"[",
"0",
"]",
"==",
"1",
"and",
"VERSION",
"[",
"1",
"]",
">=",
"11",
")",
"or",
"(",
"VERSION",
"[",
"0",
"]",
">=",
"2",
")",
"if",
"DJANGO_111_OR_UP",
":",
"return",
"super",
"(",
"DynamicRawIDWidget",
",",
"self",
")",
".",
"render",
"(",
"name",
",",
"value",
",",
"attrs",
",",
"renderer",
"=",
"renderer",
")",
"if",
"attrs",
"is",
"None",
":",
"attrs",
"=",
"{",
"}",
"related_url",
"=",
"reverse",
"(",
"'admin:{0}_{1}_changelist'",
".",
"format",
"(",
"self",
".",
"rel",
".",
"to",
".",
"_meta",
".",
"app_label",
",",
"self",
".",
"rel",
".",
"to",
".",
"_meta",
".",
"object_name",
".",
"lower",
"(",
")",
",",
")",
",",
"current_app",
"=",
"self",
".",
"admin_site",
".",
"name",
",",
")",
"params",
"=",
"self",
".",
"url_parameters",
"(",
")",
"if",
"params",
":",
"url",
"=",
"u'?'",
"+",
"u'&'",
".",
"join",
"(",
"[",
"u'{0}={1}'",
".",
"format",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"params",
".",
"items",
"(",
")",
"]",
")",
"else",
":",
"url",
"=",
"u''",
"if",
"\"class\"",
"not",
"in",
"attrs",
":",
"attrs",
"[",
"'class'",
"]",
"=",
"(",
"'vForeignKeyRawIdAdminField'",
")",
"# The JavaScript looks for this hook.",
"app_name",
"=",
"self",
".",
"rel",
".",
"to",
".",
"_meta",
".",
"app_label",
".",
"strip",
"(",
")",
"model_name",
"=",
"self",
".",
"rel",
".",
"to",
".",
"_meta",
".",
"object_name",
".",
"lower",
"(",
")",
".",
"strip",
"(",
")",
"hidden_input",
"=",
"super",
"(",
"widgets",
".",
"ForeignKeyRawIdWidget",
",",
"self",
")",
".",
"render",
"(",
"name",
",",
"value",
",",
"attrs",
")",
"extra_context",
"=",
"{",
"'hidden_input'",
":",
"hidden_input",
",",
"'name'",
":",
"name",
",",
"'app_name'",
":",
"app_name",
",",
"'model_name'",
":",
"model_name",
",",
"'related_url'",
":",
"related_url",
",",
"'url'",
":",
"url",
",",
"}",
"return",
"render_to_string",
"(",
"'dynamic_raw_id/admin/widgets/dynamic_raw_id_field.html'",
",",
"extra_context",
",",
")"
] | 30.777778 | 18.037037 |
def increment_extension_daily_stat(self, publisher_name, extension_name, version, stat_type):
"""IncrementExtensionDailyStat.
[Preview API] Increments a daily statistic associated with the extension
:param str publisher_name: Name of the publisher
:param str extension_name: Name of the extension
:param str version: Version of the extension
:param str stat_type: Type of stat to increment
"""
route_values = {}
if publisher_name is not None:
route_values['publisherName'] = self._serialize.url('publisher_name', publisher_name, 'str')
if extension_name is not None:
route_values['extensionName'] = self._serialize.url('extension_name', extension_name, 'str')
if version is not None:
route_values['version'] = self._serialize.url('version', version, 'str')
query_parameters = {}
if stat_type is not None:
query_parameters['statType'] = self._serialize.query('stat_type', stat_type, 'str')
self._send(http_method='POST',
location_id='4fa7adb6-ca65-4075-a232-5f28323288ea',
version='5.0-preview.1',
route_values=route_values,
query_parameters=query_parameters)
|
[
"def",
"increment_extension_daily_stat",
"(",
"self",
",",
"publisher_name",
",",
"extension_name",
",",
"version",
",",
"stat_type",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"publisher_name",
"is",
"not",
"None",
":",
"route_values",
"[",
"'publisherName'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'publisher_name'",
",",
"publisher_name",
",",
"'str'",
")",
"if",
"extension_name",
"is",
"not",
"None",
":",
"route_values",
"[",
"'extensionName'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'extension_name'",
",",
"extension_name",
",",
"'str'",
")",
"if",
"version",
"is",
"not",
"None",
":",
"route_values",
"[",
"'version'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'version'",
",",
"version",
",",
"'str'",
")",
"query_parameters",
"=",
"{",
"}",
"if",
"stat_type",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'statType'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'stat_type'",
",",
"stat_type",
",",
"'str'",
")",
"self",
".",
"_send",
"(",
"http_method",
"=",
"'POST'",
",",
"location_id",
"=",
"'4fa7adb6-ca65-4075-a232-5f28323288ea'",
",",
"version",
"=",
"'5.0-preview.1'",
",",
"route_values",
"=",
"route_values",
",",
"query_parameters",
"=",
"query_parameters",
")"
] | 55.304348 | 20.782609 |
def listdir(search_base, followlinks=False, filter='*',
relpath=False, bestprefix=False, system=NIST):
"""This is a generator which recurses the directory tree
`search_base`, yielding 2-tuples of:
* The absolute/relative path to a discovered file
* A bitmath instance representing the "apparent size" of the file.
- `search_base` - The directory to begin walking down.
- `followlinks` - Whether or not to follow symbolic links to directories
- `filter` - A glob (see :py:mod:`fnmatch`) to filter results with
(default: ``*``, everything)
- `relpath` - ``True`` to return the relative path from `pwd` or
``False`` (default) to return the fully qualified path
- ``bestprefix`` - set to ``False`` to get ``bitmath.Byte``
instances back instead.
- `system` - Provide a preferred unit system by setting `system`
to either ``bitmath.NIST`` (default) or ``bitmath.SI``.
.. note:: This function does NOT return tuples for directory entities.
.. note:: Symlinks to **files** are followed automatically
"""
for root, dirs, files in os.walk(search_base, followlinks=followlinks):
for name in fnmatch.filter(files, filter):
_path = os.path.join(root, name)
if relpath:
# RELATIVE path
_return_path = os.path.relpath(_path, '.')
else:
# REAL path
_return_path = os.path.realpath(_path)
if followlinks:
yield (_return_path, getsize(_path, bestprefix=bestprefix, system=system))
else:
if os.path.isdir(_path) or os.path.islink(_path):
pass
else:
yield (_return_path, getsize(_path, bestprefix=bestprefix, system=system))
|
[
"def",
"listdir",
"(",
"search_base",
",",
"followlinks",
"=",
"False",
",",
"filter",
"=",
"'*'",
",",
"relpath",
"=",
"False",
",",
"bestprefix",
"=",
"False",
",",
"system",
"=",
"NIST",
")",
":",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"search_base",
",",
"followlinks",
"=",
"followlinks",
")",
":",
"for",
"name",
"in",
"fnmatch",
".",
"filter",
"(",
"files",
",",
"filter",
")",
":",
"_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"name",
")",
"if",
"relpath",
":",
"# RELATIVE path",
"_return_path",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"_path",
",",
"'.'",
")",
"else",
":",
"# REAL path",
"_return_path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"_path",
")",
"if",
"followlinks",
":",
"yield",
"(",
"_return_path",
",",
"getsize",
"(",
"_path",
",",
"bestprefix",
"=",
"bestprefix",
",",
"system",
"=",
"system",
")",
")",
"else",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"_path",
")",
"or",
"os",
".",
"path",
".",
"islink",
"(",
"_path",
")",
":",
"pass",
"else",
":",
"yield",
"(",
"_return_path",
",",
"getsize",
"(",
"_path",
",",
"bestprefix",
"=",
"bestprefix",
",",
"system",
"=",
"system",
")",
")"
] | 43 | 22.536585 |
def connect_async(self, connection_id, connection_string, callback, retries=4):
"""Connect to a device by its connection_string
This function asynchronously connects to a device by its BLE address passed in the
connection_string parameter and calls callback when finished. Callback is called
on either success or failure with the signature:
callback(conn_id: int, result: bool, value: None)
The optional retries argument specifies how many times we should retry the connection
if the connection fails due to an early disconnect. Early disconnects are expected ble failure
modes in busy environments where the slave device misses the connection packet and the master
therefore fails immediately. Retrying a few times should succeed in this case.
Args:
connection_string (string): A BLE address is XX:YY:ZZ:AA:BB:CC format
connection_id (int): A unique integer set by the caller for referring to this connection
once created
callback (callable): A callback function called when the connection has succeeded or
failed
retries (int): The number of attempts to connect to this device that can end in early disconnect
before we give up and report that we could not connect. A retry count of 0 will mean that
we fail as soon as we receive the first early disconnect.
"""
context = {}
context['connection_id'] = connection_id
context['callback'] = callback
context['retries'] = retries
context['connection_string'] = connection_string
# Don't scan while we attempt to connect to this device
if self.scanning:
self.stop_scan()
with self.count_lock:
self.connecting_count += 1
self._command_task.async_command(['_connect', connection_string],
self._on_connection_finished, context)
|
[
"def",
"connect_async",
"(",
"self",
",",
"connection_id",
",",
"connection_string",
",",
"callback",
",",
"retries",
"=",
"4",
")",
":",
"context",
"=",
"{",
"}",
"context",
"[",
"'connection_id'",
"]",
"=",
"connection_id",
"context",
"[",
"'callback'",
"]",
"=",
"callback",
"context",
"[",
"'retries'",
"]",
"=",
"retries",
"context",
"[",
"'connection_string'",
"]",
"=",
"connection_string",
"# Don't scan while we attempt to connect to this device",
"if",
"self",
".",
"scanning",
":",
"self",
".",
"stop_scan",
"(",
")",
"with",
"self",
".",
"count_lock",
":",
"self",
".",
"connecting_count",
"+=",
"1",
"self",
".",
"_command_task",
".",
"async_command",
"(",
"[",
"'_connect'",
",",
"connection_string",
"]",
",",
"self",
".",
"_on_connection_finished",
",",
"context",
")"
] | 50.794872 | 31.820513 |
def search_repositories(query, sort=None, order=None, per_page=None,
text_match=False, number=-1, etag=None):
"""Find repositories via various criteria.
.. warning::
You will only be able to make 5 calls with this or other search
functions. To raise the rate-limit on this set of endpoints, create an
authenticated :class:`GitHub <github3.github.GitHub>` Session with
``login``.
The query can contain any combination of the following supported
qualifers:
- ``in`` Qualifies which fields are searched. With this qualifier you
can restrict the search to just the repository name, description,
readme, or any combination of these.
- ``size`` Finds repositories that match a certain size (in
kilobytes).
- ``forks`` Filters repositories based on the number of forks, and/or
whether forked repositories should be included in the results at
all.
- ``created`` or ``pushed`` Filters repositories based on times of
creation, or when they were last updated. Format: ``YYYY-MM-DD``.
Examples: ``created:<2011``, ``pushed:<2013-02``,
``pushed:>=2013-03-06``
- ``user`` or ``repo`` Limits searches to a specific user or
repository.
- ``language`` Searches repositories based on the language they're
written in.
- ``stars`` Searches repositories based on the number of stars.
For more information about these qualifiers, see: http://git.io/4Z8AkA
:param str query: (required), a valid query as described above, e.g.,
``tetris language:assembly``
:param str sort: (optional), how the results should be sorted;
options: ``stars``, ``forks``, ``updated``; default: best match
:param str order: (optional), the direction of the sorted results,
options: ``asc``, ``desc``; default: ``desc``
:param int per_page: (optional)
:param bool text_match: (optional), if True, return matching search
terms. See http://git.io/4ct1eQ for more information
:param int number: (optional), number of repositories to return.
Default: -1, returns all available repositories
:param str etag: (optional), previous ETag header value
:return: generator of :class:`Repository <github3.repos.Repository>`
"""
return gh.search_repositories(query, sort, order, per_page, text_match,
number, etag)
|
[
"def",
"search_repositories",
"(",
"query",
",",
"sort",
"=",
"None",
",",
"order",
"=",
"None",
",",
"per_page",
"=",
"None",
",",
"text_match",
"=",
"False",
",",
"number",
"=",
"-",
"1",
",",
"etag",
"=",
"None",
")",
":",
"return",
"gh",
".",
"search_repositories",
"(",
"query",
",",
"sort",
",",
"order",
",",
"per_page",
",",
"text_match",
",",
"number",
",",
"etag",
")"
] | 47.76 | 25.54 |
def SoS_exec(script: str, _dict: dict = None,
return_result: bool = True) -> None:
'''Execute a statement.'''
if _dict is None:
_dict = env.sos_dict.dict()
if not return_result:
exec(
compile(script, filename=stmtHash.hash(script), mode='exec'), _dict)
return None
try:
stmts = list(ast.iter_child_nodes(ast.parse(script)))
if not stmts:
return
if isinstance(stmts[-1], ast.Expr):
# the last one is an expression and we will try to return the results
# so we first execute the previous statements
if len(stmts) > 1:
exec(
compile(
ast.Module(body=stmts[:-1]),
filename=stmtHash.hash(script),
mode="exec"), _dict)
# then we eval the last one
res = eval(
compile(
ast.Expression(body=stmts[-1].value),
filename=stmtHash.hash(script),
mode="eval"), _dict)
else:
# otherwise we just execute the entire code
exec(
compile(script, filename=stmtHash.hash(script), mode='exec'),
_dict)
res = None
except SyntaxError as e:
raise SyntaxError(f"Invalid code {script}: {e}")
# if check_readonly:
# env.sos_dict.check_readonly_vars()
return res
|
[
"def",
"SoS_exec",
"(",
"script",
":",
"str",
",",
"_dict",
":",
"dict",
"=",
"None",
",",
"return_result",
":",
"bool",
"=",
"True",
")",
"->",
"None",
":",
"if",
"_dict",
"is",
"None",
":",
"_dict",
"=",
"env",
".",
"sos_dict",
".",
"dict",
"(",
")",
"if",
"not",
"return_result",
":",
"exec",
"(",
"compile",
"(",
"script",
",",
"filename",
"=",
"stmtHash",
".",
"hash",
"(",
"script",
")",
",",
"mode",
"=",
"'exec'",
")",
",",
"_dict",
")",
"return",
"None",
"try",
":",
"stmts",
"=",
"list",
"(",
"ast",
".",
"iter_child_nodes",
"(",
"ast",
".",
"parse",
"(",
"script",
")",
")",
")",
"if",
"not",
"stmts",
":",
"return",
"if",
"isinstance",
"(",
"stmts",
"[",
"-",
"1",
"]",
",",
"ast",
".",
"Expr",
")",
":",
"# the last one is an expression and we will try to return the results",
"# so we first execute the previous statements",
"if",
"len",
"(",
"stmts",
")",
">",
"1",
":",
"exec",
"(",
"compile",
"(",
"ast",
".",
"Module",
"(",
"body",
"=",
"stmts",
"[",
":",
"-",
"1",
"]",
")",
",",
"filename",
"=",
"stmtHash",
".",
"hash",
"(",
"script",
")",
",",
"mode",
"=",
"\"exec\"",
")",
",",
"_dict",
")",
"# then we eval the last one",
"res",
"=",
"eval",
"(",
"compile",
"(",
"ast",
".",
"Expression",
"(",
"body",
"=",
"stmts",
"[",
"-",
"1",
"]",
".",
"value",
")",
",",
"filename",
"=",
"stmtHash",
".",
"hash",
"(",
"script",
")",
",",
"mode",
"=",
"\"eval\"",
")",
",",
"_dict",
")",
"else",
":",
"# otherwise we just execute the entire code",
"exec",
"(",
"compile",
"(",
"script",
",",
"filename",
"=",
"stmtHash",
".",
"hash",
"(",
"script",
")",
",",
"mode",
"=",
"'exec'",
")",
",",
"_dict",
")",
"res",
"=",
"None",
"except",
"SyntaxError",
"as",
"e",
":",
"raise",
"SyntaxError",
"(",
"f\"Invalid code {script}: {e}\"",
")",
"# if check_readonly:",
"# env.sos_dict.check_readonly_vars()",
"return",
"res"
] | 34.309524 | 18.357143 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.