repo
stringlengths 1
29
| path
stringlengths 24
332
| code
stringlengths 39
579k
|
---|---|---|
buildbot
|
buildbot//interfaces.pyclass:IBuilderControl/ping
|
def ping():
"""Attempt to contact the worker and see if it is still alive. This
returns a Deferred which fires with either True (the worker is still
alive) or False (the worker did not respond). As a side effect, adds an
event to this builder's column in the waterfall display containing the
results of the ping. Note that this may not fail for a long time, it is
implemented in terms of the timeout on the underlying TCP connection."""
|
vcdriver
|
vcdriver//vm.pyclass:VirtualMachine/_get_snapshots_by_name
|
@classmethod
def _get_snapshots_by_name(cls, snapshots, name):
"""
Filter the snapshots by name
:param snapshots: The list of the snapshots
:param name: The name
:return: The list of snapshots filtered
"""
found_snapshots = []
for snapshot in snapshots:
if name == snapshot.name:
found_snapshots.append(snapshot)
found_snapshots = found_snapshots + cls._get_snapshots_by_name(snapshot
.childSnapshotList, name)
return found_snapshots
|
sine.properties-0.1.1
|
sine.properties-0.1.1//sine/properties/core.pyfile:/sine/properties/core.py:function:identifyKeyValue/identifyKeyValue
|
def identifyKeyValue(string):
"""
identify key-end and value-start position
from data start immediately with the key.
the data should not have leading whitespace
since whitespace will be read as seperator,
then the key will be empty.
@example
identifyKeyValue('key=value') -> 3, 4
identifyKeyValue('key = value') -> 3, 6
"""
limit = len(string)
keyEnd = 0
valueStart = limit
hasSep = False
precedingBackslash = False
while keyEnd < limit:
c = string[keyEnd]
if (c == '=' or c == ':') and not precedingBackslash:
valueStart = keyEnd + 1
hasSep = True
break
elif (c == ' ' or c == '\t' or c == '\x0c') and not precedingBackslash:
valueStart = keyEnd + 1
break
if c == '\\':
precedingBackslash = not precedingBackslash
else:
precedingBackslash = False
keyEnd += 1
while valueStart < limit:
c = string[valueStart]
if c != ' ' and c != '\t' and c != '\x0c':
if not hasSep and (c == '=' or c == ':'):
hasSep = True
else:
break
valueStart += 1
return keyEnd, valueStart
|
sndict
|
sndict//structurednesteddict.pyclass:StructuredNestedDict/_check_key_list
|
@staticmethod
def _check_key_list(key_list):
"""Check if key_list is valid"""
if len(key_list) == 0:
raise KeyError('key_list cannot be empty')
|
statsmodels-0.11.1
|
statsmodels-0.11.1//statsmodels/sandbox/distributions/estimators.pyfile:/statsmodels/sandbox/distributions/estimators.py:function:momentcondquant/momentcondquant
|
def momentcondquant(distfn, params, mom2, quantile=None, shape=None):
"""moment conditions for estimating distribution parameters by matching
quantiles, defines as many moment conditions as quantiles.
Returns
-------
difference : ndarray
difference between theoretical and empirical quantiles
Notes
-----
This can be used for method of moments or for generalized method of
moments.
"""
if len(params) == 2:
loc, scale = params
elif len(params) == 3:
shape, loc, scale = params
else:
pass
pq, xq = quantile
cdfdiff = distfn.cdf(xq, *params) - pq
return cdfdiff
|
pyshtrih-2.0.4
|
pyshtrih-2.0.4//pyshtrih/misc.pyfile:/pyshtrih/misc.py:function:decode/decode
|
def decode(text):
"""
Декодирование текста полученного с фискального регистратора.
"""
return text.decode('cp1251')
|
cnfformula
|
cnfformula//cmdline.pyclass:GraphHelper/setup_command_line
|
@staticmethod
def setup_command_line(parser):
"""Setup command line options for getting graphs"""
raise NotImplementedError('Graph Input helper must be subclassed')
|
pyboto3-1.4.4
|
pyboto3-1.4.4//pyboto3/applicationdiscoveryservice.pyfile:/pyboto3/applicationdiscoveryservice.py:function:get_discovery_summary/get_discovery_summary
|
def get_discovery_summary():
"""
Retrieves a short summary of discovered assets.
See also: AWS API Documentation
:example: response = client.get_discovery_summary()
:rtype: dict
:return: {
'servers': 123,
'applications': 123,
'serversMappedToApplications': 123,
'serversMappedtoTags': 123,
'agentSummary': {
'activeAgents': 123,
'healthyAgents': 123,
'blackListedAgents': 123,
'shutdownAgents': 123,
'unhealthyAgents': 123,
'totalAgents': 123,
'unknownAgents': 123
},
'connectorSummary': {
'activeConnectors': 123,
'healthyConnectors': 123,
'blackListedConnectors': 123,
'shutdownConnectors': 123,
'unhealthyConnectors': 123,
'totalConnectors': 123,
'unknownConnectors': 123
}
}
"""
pass
|
eric-ide-20.5
|
eric-ide-20.5//eric6/WebBrowser/Tools/Scripts.pyfile:/eric6/WebBrowser/Tools/Scripts.py:function:setCss/setCss
|
def setCss(css):
"""
Function generating a script to set a given CSS style sheet.
@param css style sheet
@type str
@return script to set the style sheet
@rtype str
"""
source = """
(function() {{
var css = document.createElement('style');
css.setAttribute('type', 'text/css');
css.appendChild(document.createTextNode('{0}'));
document.getElementsByTagName('head')[0].appendChild(css);
}})()"""
style = css.replace("'", "\\'").replace('\n', '\\n')
return source.format(style)
|
hurry.workflow-3.0.2
|
hurry.workflow-3.0.2//src/hurry/workflow/interfaces.pyclass:IWorkflowInfo/hasVersion
|
def hasVersion(state):
"""Return true if a version exists in state.
"""
|
obsplus-0.1.0
|
obsplus-0.1.0//versioneer.pyfile:/versioneer.py:function:render_git_describe/render_git_describe
|
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance']:
rendered += '-%d-g%s' % (pieces['distance'], pieces['short'])
else:
rendered = pieces['short']
if pieces['dirty']:
rendered += '-dirty'
return rendered
|
doker
|
doker//rst2pdf/extensions/sample.pyfile:/rst2pdf/extensions/sample.py:function:install/install
|
def install(createpdf, options):
""" This function is called with an object with the createpdf
module globals as attributes, and with the options from
the command line parser. This function does not have
to exist, but must have the correct call signature if
it does.
"""
|
optopsy
|
optopsy//filters.pyfile:/filters.py:function:entry_spread_price/entry_spread_price
|
def entry_spread_price(data, value, _idx):
"""
The net price of the spread.
For example, it would set a min max of $0.10 to $0.20 and find only spreads with prices
within that range.
"""
pass
|
neutron-15.0.2
|
neutron-15.0.2//neutron/agent/linux/dhcp.pyclass:DhcpBase/get_isolated_subnets
|
@classmethod
def get_isolated_subnets(cls, network):
"""Returns a dict indicating whether or not a subnet is isolated"""
raise NotImplementedError()
|
regraph
|
regraph//primitives.pyfile:/primitives.py:function:add_edge/add_edge
|
def add_edge(graph, s, t, attrs=None, **attr):
"""Add an edge to a graph.
Parameters
----------
graph : regraph.Graph
s : hashable, source node id.
t : hashable, target node id.
attrs : dict
Edge attributes.
Raises
------
ReGraphError
If `attrs` is not a dictionary
GraphError
If either one of the nodes does not exist in the graph or
an edge between `s` and `t` already
exists.
"""
graph.add_edge(s, t, attrs)
|
conclave
|
conclave//conclave_client.pyfile:/conclave_client.py:function:make_payload/make_payload
|
def make_payload(method, params):
"""
Makes the JSON-RPC payload
:param method: Name of the RPC method to invoke
:param params: Parameters of the method
:return: Full payload
"""
payload = {'method': method}
if len(params) > 0:
payload['params'] = params
return payload
|
flow
|
flow//learner.pyclass:DefaultLearner/_get_accumulators
|
@staticmethod
def _get_accumulators(outputs: dict):
"""
builds and returns a dictionary containing a key for each step output.
The outputs of each learning step is stored on this dictionary.
:param outputs: A dictionary containing the learning step outputs.
"""
accumulators = dict()
for output_name in outputs.keys():
accumulators[output_name] = list()
return accumulators
|
ts3ekkoclient
|
ts3ekkoclient//bot.pyclass:EkkoBot/color_message
|
@staticmethod
def color_message(text, color='#1433b1'):
"""
Puts BB-Code formatting around the provided text.
:param text: text which should be colored.
:param color: the color which should be put on the text.
:return: color BB-code formatted string.
"""
return f'[color={color}]{text}[/color]'
|
hipack-13
|
hipack-13//hipack.pyfile:/hipack.py:function:cast/cast
|
def cast(annotations, bytestring, value):
"""
Default “cast” function.
"""
return value
|
paynlsdk2
|
paynlsdk2//client/validate.pyclass:Validate/pay_server_ip_request
|
@staticmethod
def pay_server_ip_request():
"""
Get a Pay server IP validation :class:`paynlsdk2.api.validate.payserverip.Request` instance
:return: Request instance
:rtype: paynlsdk2.api.validate.payserverip.Request
"""
from paynlsdk2.api.validate.payserverip import Request
request = Request()
return request
|
Paste-3.4.0
|
Paste-3.4.0//paste/urlparser.pyclass:URLParser/register_constructor
|
def register_constructor(cls, extension, constructor):
"""
Register a function as a constructor. Registered constructors
apply to all instances of `URLParser`.
The extension should have a leading ``.``, or the special
extensions ``dir`` (for directories) and ``*`` (a catch-all).
`constructor` must be a callable that takes two arguments:
``environ`` and ``filename``, and returns a WSGI application.
"""
d = cls.global_constructors
assert extension not in d, 'A constructor already exists for the extension %r (%r) when attemption to register constructor %r' % (
extension, d[extension], constructor)
d[extension] = constructor
|
elfi
|
elfi//methods/utils.pyfile:/methods/utils.py:function:sample_object_to_dict/sample_object_to_dict
|
def sample_object_to_dict(data, elem, skip=''):
"""Process data from self object to data dictionary to prepare for json serialization.
Parameters
----------
data : dict, required
Stores collected data for json
elem : dict, required
Default data from Sample object(s)
skip : str, optional
Some keys in the object should be skipped, such as `outputs` or `populations`. Latter
is skipped in case if it is already processed previously.
"""
for key, val in elem.__dict__.items():
if key in ['outputs', skip]:
continue
if key == 'meta':
for meta_key, meta_val in elem.__dict__[key].items():
data[meta_key] = meta_val
continue
data[key] = val
|
lively_lights-0.1.1
|
lively_lights-0.1.1//lively_lights/types.pyfile:/lively_lights/types.py:function:saturation/saturation
|
def saturation(value):
"""Saturation of the light. 254 is the most saturated (colored) and 0 is
the least saturated (white)."""
value = int(value)
if value < 0 or value > 254:
raise ValueError('Minimum saturation is 0, to the maximum 254')
return value
|
khorosjx-2.5.3
|
khorosjx-2.5.3//khorosjx/utils/core_utils.pyfile:/khorosjx/utils/core_utils.py:function:__get_invite_dataset/__get_invite_dataset
|
def __get_invite_dataset(_query_uri):
"""This function identifies the appropriate invite dataset."""
_dataset = 'invite'
if 'invites/event' in _query_uri:
_dataset = 'event_invite'
return _dataset
|
miasm-0.1.3
|
miasm-0.1.3//miasm/expression/expression.pyfile:/miasm/expression/expression.py:function:_expr_compute_of/_expr_compute_of
|
def _expr_compute_of(op1, op2):
"""
Get overflow flag of @op1 - @op2
Ref: x86 of flag
@op1: Expression
@op2: Expression
"""
res = op1 - op2
of = ((op1 ^ res) & (op1 ^ op2)).msb()
return of
|
dsub-0.3.7
|
dsub-0.3.7//dsub/providers/google_base.pyfile:/dsub/providers/google_base.py:function:get_operation_full_job_id/get_operation_full_job_id
|
def get_operation_full_job_id(op):
"""Returns the job-id or job-id.task-id for the operation."""
job_id = op.get_field('job-id')
task_id = op.get_field('task-id')
if task_id:
return '%s.%s' % (job_id, task_id)
else:
return job_id
|
grimoire-elk-0.63.0
|
grimoire-elk-0.63.0//grimoire_elk/raw/elastic.pyclass:ElasticOcean/get_perceval_params_from_url
|
@classmethod
def get_perceval_params_from_url(cls, url):
""" Get the perceval params given a URL for the data source """
return [url]
|
whynot_estimators
|
whynot_estimators//utils.pyfile:/utils.py:function:extract/extract
|
def extract(listvector, argname):
"""Retrieve argument argname from R listvector."""
if argname not in listvector.names:
raise ValueError(f'{argname} not found.')
index = list(listvector.names).index(argname)
return listvector[index]
|
dorthrithil-networkx-1.11
|
dorthrithil-networkx-1.11//networkx/classes/function.pyfile:/networkx/classes/function.py:function:is_empty/is_empty
|
def is_empty(G):
"""Returns ``True`` if ``G`` has no edges.
Parameters
----------
G : graph
A NetworkX graph.
Returns
-------
bool
``True`` if ``G`` has no edges, and ``False`` otherwise.
Notes
-----
An empty graph can have nodes but not edges. The empty graph with zero
nodes is known as the null graph. This is an O(n) operation where n is the
number of nodes in the graph.
"""
return not any(G.adj.values())
|
selinon
|
selinon//system.pyclass:System/_dump_foreach_function_name
|
@staticmethod
def _dump_foreach_function_name(flow_name, idx):
"""Create foreach function name for a dump.
:param flow_name: flow name
:type flow_name: str
:param idx: index of condition within the flow
:type idx: int
:return: condition function representation
"""
assert idx >= 0
return '_foreach_{}_{}'.format(flow_name, idx)
|
ANYstructure-1.0.2
|
ANYstructure-1.0.2//ANYstructure/example_data.pyfile:/ANYstructure/example_data.py:function:get_grid_coord_from_points_coords/get_grid_coord_from_points_coords
|
def get_grid_coord_from_points_coords(point_coord):
"""
Converts coordinates to be used in the grid. Returns (row,col). This value will not change with slider.
:param point:
:return:
"""
canvas_origo = 50, 670
row = canvas_origo[1] - point_coord[1] * 10
col = point_coord[0] * 10
return row, col
|
dtale-1.8.11
|
dtale-1.8.11//dtale/utils.pyfile:/dtale/utils.py:function:get_float_arg/get_float_arg
|
def get_float_arg(r, name, default=None):
"""
Retrieve argument from :attr:`flask:flask.request` and convert to float
:param r: :attr:`flask:flask.request`
:param name: argument name
:type: str
:param default: default value if parameter is non-existent, defaults to None
:return: float argument value
"""
val = r.args.get(name)
if val is None or val == '':
return default
else:
try:
return float(val)
except BaseException:
return default
|
zuul
|
zuul//driver/github/githubconnection.pyfile:/driver/github/githubconnection.py:function:_status_as_tuple/_status_as_tuple
|
def _status_as_tuple(status):
"""Translate a status into a tuple of user, context, state"""
creator = status.get('creator')
if not creator:
user = 'Unknown'
else:
user = creator.get('login')
context = status.get('context')
state = status.get('state')
return user, context, state
|
google-cloud-bigquery-1.24.0
|
google-cloud-bigquery-1.24.0//google/cloud/bigquery/_helpers.pyfile:/google/cloud/bigquery/_helpers.py:function:_get_sub_prop/_get_sub_prop
|
def _get_sub_prop(container, keys, default=None):
"""Get a nested value from a dictionary.
This method works like ``dict.get(key)``, but for nested values.
Arguments:
container (Dict):
A dictionary which may contain other dictionaries as values.
keys (Iterable):
A sequence of keys to attempt to get the value for. Each item in
the sequence represents a deeper nesting. The first key is for
the top level. If there is a dictionary there, the second key
attempts to get the value within that, and so on.
default (object):
(Optional) Value to returned if any of the keys are not found.
Defaults to ``None``.
Examples:
Get a top-level value (equivalent to ``container.get('key')``).
>>> _get_sub_prop({'key': 'value'}, ['key'])
'value'
Get a top-level value, providing a default (equivalent to
``container.get('key', default='default')``).
>>> _get_sub_prop({'nothere': 123}, ['key'], default='not found')
'not found'
Get a nested value.
>>> _get_sub_prop({'key': {'subkey': 'value'}}, ['key', 'subkey'])
'value'
Returns:
object: The value if present or the default.
"""
sub_val = container
for key in keys:
if key not in sub_val:
return default
sub_val = sub_val[key]
return sub_val
|
iotlabcli
|
iotlabcli//parser/profile.pyfile:/parser/profile.py:function:_add_profile/_add_profile
|
def _add_profile(api, profile, json_out=False):
""" Add user profile. if json, dump json dict to stdout """
if json_out:
return profile
return api.add_profile(profile)
|
mbdb-0.6
|
mbdb-0.6//mbdb/sqlparser.pyfile:/mbdb/sqlparser.py:function:p_update_values/p_update_values
|
def p_update_values(p):
"""update_values : identifier '=' value
| update_values ',' identifier '=' value"""
if len(p) == 4:
p[0] = {p[1]: p[3]}
elif isinstance(p[1], dict):
p[1][p[3]] = p[5]
p[0] = p[1]
else:
p[0] = None
|
spyne-2.12.16
|
spyne-2.12.16//spyne/util/django.pyfile:/spyne/util/django.py:function:strip_regex_metachars/strip_regex_metachars
|
def strip_regex_metachars(pattern):
"""Strip ^ and $ from pattern begining and end.
According to http://www.w3.org/TR/xmlschema-0/#regexAppendix XMLSchema
expression language does not contain the metacharacters ^ and $.
:returns: stripped pattern string
"""
start = 0
till = len(pattern)
if pattern.startswith('^'):
start = 1
if pattern.endswith('$'):
till -= 1
return pattern[start:till]
|
pya2l-0.0.1
|
pya2l-0.0.1//pya2l/parser/grammar/parser.pyclass:A2lParser/p_number_string_value_list
|
@staticmethod
def p_number_string_value_list(p):
"""number_string_value_list : number_string_value
| number_string_value number_string_value_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
|
fake-bpy-module-2.80-20200428
|
fake-bpy-module-2.80-20200428//bpy/ops/view3d.pyfile:/bpy/ops/view3d.py:function:object_mode_pie_or_toggle/object_mode_pie_or_toggle
|
def object_mode_pie_or_toggle():
"""Undocumented contribute <https://developer.blender.org/T51061>
"""
pass
|
pyiw-1.0.0
|
pyiw-1.0.0//pyiw/scheme.pyclass:Scheme/find
|
@classmethod
def find(cls, interface, name):
"""
Returns a :class:`Scheme` or `None` based on interface and
name.
"""
try:
return cls.where(lambda s: s.interface == interface and s.name == name
)[0]
except IndexError:
return None
|
mookoo-0.1.0
|
mookoo-0.1.0//bottle.pyclass:BaseTemplate/global_config
|
@classmethod
def global_config(cls, key, *args):
""" This reads or sets the global settings stored in class.settings. """
if args:
cls.settings = cls.settings.copy()
cls.settings[key] = args[0]
else:
return cls.settings[key]
|
psf_utils-0.6.0
|
psf_utils-0.6.0//psf_utils/parse.pyfile:/psf_utils/parse.py:function:p_prop/p_prop
|
def p_prop(p):
"""prop : PROP '(' named_values ')'"""
p[0] = dict(p[3])
|
mercurial-5.4
|
mercurial-5.4//mercurial/thirdparty/zope/interface/common/sequence.pyclass:IExtendedReadSequence/index
|
def index(item, *args):
"""Return first index of value
`L.index(value, [start, [stop]])` -> integer"""
|
sm2
|
sm2//tsa/vector_ar/plotting.pyfile:/tsa/vector_ar/plotting.py:function:plot_mts/plot_mts
|
def plot_mts(Y, names=None, index=None):
"""
Plot multiple time series
"""
import matplotlib.pyplot as plt
k = Y.shape[1]
rows, cols = k, 1
fig = plt.figure(figsize=(10, 10))
for j in range(k):
ts = Y[:, (j)]
ax = fig.add_subplot(rows, cols, j + 1)
if index is not None:
ax.plot(index, ts)
else:
ax.plot(ts)
if names is not None:
ax.set_title(names[j])
return fig
|
ask_sdk_model
|
ask_sdk_model//services/timer_management/operation.pyclass:Operation/get_real_child_model
|
@classmethod
def get_real_child_model(cls, data):
"""Returns the real base class specified by the discriminator"""
discriminator_value = data[cls.json_discriminator_key]
return cls.discriminator_value_class_map.get(discriminator_value)
|
resistics-0.0.6
|
resistics-0.0.6//resistics/common/print.pyfile:/resistics/common/print.py:function:breakPrint/breakPrint
|
def breakPrint() ->None:
"""Prints a break in the terminal to make things easier to read"""
print('---------------------------------------------------')
print('')
print('---------------------------------------------------')
|
firestore-0.0.8
|
firestore-0.0.8//firestore/containers/collection.pyclass:Collection/get_json_schema
|
@classmethod
def get_json_schema(cls):
"""
Get a json schema of this document with datatypes and required
status
"""
if cls.__schema__:
return cls.__schema__
|
sonicparanoid-1.3.2
|
sonicparanoid-1.3.2//sonicparanoid/sys_tools.pyfile:/sonicparanoid/sys_tools.py:function:evalCpuNeeds/evalCpuNeeds
|
def evalCpuNeeds(totLn, algorithm=None, debug=False):
"""
Estimates the number of needed threads (cores) based on the total lines to be processed
For the supported algorithms an estimation is given but should be updated when possible
NOTES: it is very experimental and based on a very few samples
"""
import multiprocessing
maxCores = multiprocessing.cpu_count()
buckets = [2, 4, 8, 12, 16, 24, 32, 48, 64, 96, 128]
if debug:
print('\n evalCpuNeeds START:')
print('TOT LINES ::\t%s' % str(totLn))
print('ALGORITHM ::\t%s' % str(algorithm))
chunkDict = {'bwa': 720000, 'map_pct_calc': 400000, 'avg': 560000}
if algorithm == None:
algorithm = 'avg'
chunkSize = chunkDict[algorithm]
if chunkSize >= totLn:
if debug:
print(
'Chunk size is bigger than the line to be porcessed, 1 core will do just fine...'
)
return 1
cores = int(totLn / chunkSize)
if debug:
print('Selected Algorimth for Estimation:\t%s' % algorithm)
print('CHUNK SIZE FOR %s ALGORITHM:\t%d' % (algorithm.upper(),
chunkSize))
if cores >= maxCores:
return maxCores
if algorithm != 'map_pct_calc':
for el in buckets:
if float(cores / el) <= 1:
return el
return cores
|
MIP
|
MIP//mip/utils.pyfile:/mip/utils.py:function:shorten/shorten
|
def shorten(s, N=80):
"""
Return short representation of string s.
"""
if len(s) <= N:
return s[:]
else:
l = (N - 5) / 2
return '{} ... {}'.format(s[:l], s[-l:])
|
graphql-example-0.4.4
|
graphql-example-0.4.4//vendor/pipenv/vendor/jinja2/filters.pyfile:/vendor/pipenv/vendor/jinja2/filters.py:function:evalcontextfilter/evalcontextfilter
|
def evalcontextfilter(f):
"""Decorator for marking eval-context dependent filters. An eval
context object is passed as first argument. For more information
about the eval context, see :ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfilter = True
return f
|
pyspark
|
pyspark//ml/util.pyclass:DefaultParamsReader/__get_class
|
@staticmethod
def __get_class(clazz):
"""
Loads Python class from its name.
"""
parts = clazz.split('.')
module = '.'.join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
|
python-zaqarclient-1.13.1
|
python-zaqarclient-1.13.1//zaqarclient/queues/v2/core.pyfile:/zaqarclient/queues/v2/core.py:function:subscription_get/subscription_get
|
def subscription_get(transport, request, queue_name, subscription_id):
"""Gets a particular subscription data
:param transport: Transport instance to use
:type transport: `transport.base.Transport`
:param request: Request instance ready to be sent.
:type request: `transport.request.Request`
:param queue_name: Queue reference name.
:type queue_name: `six.text_type`
:param subscription_id: ID of subscription.
:type subscription_id: `six.text_type`
"""
request.operation = 'subscription_get'
request.params['queue_name'] = queue_name
request.params['subscription_id'] = subscription_id
resp = transport.send(request)
return resp.deserialized_content
|
pyboto3-1.4.4
|
pyboto3-1.4.4//pyboto3/glacier.pyfile:/pyboto3/glacier.py:function:list_vaults/list_vaults
|
def list_vaults(accountId=None, marker=None, limit=None):
"""
This operation lists all vaults owned by the calling user's account. The list returned in the response is ASCII-sorted by vault name.
By default, this operation returns up to 1,000 items. If there are more vaults to list, the response marker field contains the vault Amazon Resource Name (ARN) at which to continue the list with a new List Vaults request; otherwise, the marker field is null . To return a list of vaults that begins at a specific vault, set the marker request parameter to the vault ARN you obtained from a previous List Vaults request. You can also limit the number of vaults returned in the response by specifying the limit parameter in the request.
An AWS account has full permission to perform all operations (actions). However, AWS Identity and Access Management (IAM) users don't have any permissions by default. You must grant them explicit permission to perform specific actions. For more information, see Access Control Using AWS Identity and Access Management (IAM) .
For conceptual information and underlying REST API, see Retrieving Vault Metadata in Amazon Glacier and List Vaults in the Amazon Glacier Developer Guide .
See also: AWS API Documentation
Examples
The example lists all vaults owned by the specified AWS account.
Expected Output:
:example: response = client.list_vaults(
marker='string',
limit='string'
)
:type accountId: string
:param accountId: The AccountId value is the AWS account ID. This value must match the AWS account ID associated with the credentials used to sign the request. You can either specify an AWS account ID or optionally a single '- ' (hyphen), in which case Amazon Glacier uses the AWS account ID associated with the credentials used to sign the request. If you specify your account ID, do not include any hyphens ('-') in the ID.
Note: this parameter is set to '-' bydefault if no value is not specified.
:type marker: string
:param marker: A string used for pagination. The marker specifies the vault ARN after which the listing of vaults should begin.
:type limit: string
:param limit: The maximum number of vaults to be returned. The default limit is 1000. The number of vaults returned might be fewer than the specified limit, but the number of returned vaults never exceeds the limit.
:rtype: dict
:return: {
'VaultList': [
{
'VaultARN': 'string',
'VaultName': 'string',
'CreationDate': 'string',
'LastInventoryDate': 'string',
'NumberOfArchives': 123,
'SizeInBytes': 123
},
],
'Marker': 'string'
}
"""
pass
|
hdx
|
hdx//utilities/dictandlist.pyfile:/utilities/dictandlist.py:function:key_value_convert/key_value_convert
|
def key_value_convert(dictin, keyfn=lambda x: x, valuefn=lambda x: x,
dropfailedkeys=False, dropfailedvalues=False, exception=ValueError):
"""Convert keys and/or values of dictionary using functions passed in as parameters
Args:
dictin (DictUpperBound): Input dictionary
keyfn (Callable[[Any], Any]): Function to convert keys. Defaults to lambda x: x
valuefn (Callable[[Any], Any]): Function to convert values. Defaults to lambda x: x
dropfailedkeys (bool): Whether to drop dictionary entries where key conversion fails. Defaults to False.
dropfailedvalues (bool): Whether to drop dictionary entries where value conversion fails. Defaults to False.
exception (ExceptionUpperBound): The exception to expect if keyfn or valuefn fail. Defaults to ValueError.
Returns:
Dict: Dictionary with converted keys and/or values
"""
dictout = dict()
for key in dictin:
try:
new_key = keyfn(key)
except exception:
if dropfailedkeys:
continue
new_key = key
value = dictin[key]
try:
new_value = valuefn(value)
except exception:
if dropfailedvalues:
continue
new_value = value
dictout[new_key] = new_value
return dictout
|
recordlinkage
|
recordlinkage//utils.pyfile:/utils.py:function:unique/unique
|
def unique(x):
"""Convert a list in a unique list."""
return list(set(x))
|
scrapy
|
scrapy//xlib/tx/interfaces.pyclass:IResolver/lookupService
|
def lookupService(name, timeout=None):
"""
Perform an SRV record lookup.
@type name: C{str}
@param name: DNS name to resolve.
@type timeout: Sequence of C{int}
@param timeout: Number of seconds after which to reissue the query.
When the last timeout expires, the query is considered failed.
@rtype: L{Deferred}
@return: A L{Deferred} which fires with a three-tuple of lists of
L{twisted.names.dns.RRHeader} instances. The first element of the
tuple gives answers. The second element of the tuple gives
authorities. The third element of the tuple gives additional
information. The L{Deferred} may instead fail with one of the
exceptions defined in L{twisted.names.error} or with
C{NotImplementedError}.
"""
|
pyspark-2.4.5
|
pyspark-2.4.5//pyspark/sql/utils.pyfile:/pyspark/sql/utils.py:function:require_minimum_pandas_version/require_minimum_pandas_version
|
def require_minimum_pandas_version():
""" Raise ImportError if minimum version of Pandas is not installed
"""
minimum_pandas_version = '0.19.2'
from distutils.version import LooseVersion
try:
import pandas
have_pandas = True
except ImportError:
have_pandas = False
if not have_pandas:
raise ImportError(
'Pandas >= %s must be installed; however, it was not found.' %
minimum_pandas_version)
if LooseVersion(pandas.__version__) < LooseVersion(minimum_pandas_version):
raise ImportError(
'Pandas >= %s must be installed; however, your version was %s.' %
(minimum_pandas_version, pandas.__version__))
|
zou
|
zou//app/utils/thumbnail.pyfile:/app/utils/thumbnail.py:function:url_path/url_path
|
def url_path(data_type, instance_id):
"""
Build thumbnail download path for given data type and instance ID.
"""
data_type = data_type.replace('_', '-')
return 'pictures/thumbnails/%s/%s.png' % (data_type, instance_id)
|
tohu
|
tohu//v6/utils.pyfile:/v6/utils.py:function:print_generated_sequence/print_generated_sequence
|
def print_generated_sequence(gen, num, *, sep=', ', fmt='', seed=None):
"""
Helper function which prints a sequence of `num` items
produced by the random generator `gen`.
"""
if seed:
gen.reset(seed)
elems = [format(next(gen), fmt) for _ in range(num)]
sep_initial = '\n\n' if '\n' in sep else ' '
print('Generated sequence:{}{}'.format(sep_initial, sep.join(elems)))
|
stix_shifter_modules
|
stix_shifter_modules//aws_cloud_watch_logs/stix_translation/query_constructor.pyclass:QueryStringPatternTranslator/_format_equality
|
@staticmethod
def _format_equality(value) ->str:
"""
Formatting value in the event of equality operation
:param value: str
:return: str
"""
return '{}'.format(value) if not isinstance(value, list) else value
|
pygameday-0.2
|
pygameday-0.2//pygameday/parse.pyfile:/pygameday/parse.py:function:save_page/save_page
|
def save_page(page, filename):
"""Writes the text of an html page to disk
Parameters
----------
page : requests page object
The page to write
filename
The name of the file to create
"""
with open(filename, 'w') as f:
f.write(page.text)
|
datashader-0.10.0
|
datashader-0.10.0//datashader/resampling.pyfile:/datashader/resampling.py:function:compute_chunksize/compute_chunksize
|
def compute_chunksize(src, w, h, chunksize=None, max_mem=None):
"""
Attempts to compute a chunksize for the resampling output array
that is as close as possible to the input array chunksize, while
also respecting the maximum memory constraint to avoid loading
to much data into memory at the same time.
Parameters
----------
src : dask.array.Array
The source array to resample
w : int
New grid width
h : int
New grid height
chunksize : tuple(int, int) (optional)
Size of the output chunks. By default the chunk size is
inherited from the *src* array.
max_mem : int (optional)
The maximum number of bytes that should be loaded into memory
during the regridding operation.
Returns
-------
chunksize : tuple(int, int)
Size of the output chunks.
"""
start_chunksize = src.chunksize if chunksize is None else chunksize
if max_mem is None:
return start_chunksize
sh, sw = src.shape
height_fraction = float(sh) / h
width_fraction = float(sw) / w
ch, cw = start_chunksize
dim = True
nbytes = src.dtype.itemsize
while ch * height_fraction * (cw * width_fraction) * nbytes > max_mem:
if dim:
cw -= 1
else:
ch -= 1
dim = not dim
if ch == 0 or cw == 0:
min_mem = height_fraction * width_fraction * nbytes
raise ValueError(
'Given the memory constraints the resampling operation could not find a chunksize that avoids loading too much data into memory. Either relax the memory constraint to a minimum of %d bytes or resample to a larger grid size. Note: A future implementation could handle this condition by declaring temporary arrays.'
% min_mem)
return ch, cw
|
musicalgestures
|
musicalgestures//_utils.pyfile:/_utils.py:function:rotate_video/rotate_video
|
def rotate_video(filename, angle):
"""
Rotates a video by an `angle` using ffmpeg.
Parameters
----------
- filename : str
The path to the input video.
- angle : int or float
The angle (in degrees) specifying the amount of rotation. Positive values rotate clockwise.
Outputs
-------
- `filename`_rot.avi
The rotated video file.
Returns
-------
- str
The path to the output (rotated) video file.
"""
import os
import math
of = os.path.splitext(filename)[0]
fex = os.path.splitext(filename)[1]
if os.path.isfile(of + '_rot.avi'):
os.remove(of + '_rot.avi')
cmds = ' '.join(['ffmpeg', '-i', filename, '-c:v', 'mjpeg', '-q:v', '3',
'-vf', f'rotate={math.radians(angle)}', of + '_rot.avi'])
os.system(cmds)
return of + '_rot', fex
|
pyipv8-2.1.0
|
pyipv8-2.1.0//ipv8/messaging/lazy_payload.pyfile:/ipv8/messaging/lazy_payload.py:function:_raw_compile/_raw_compile
|
def _raw_compile(f_code):
"""
Compile some code for injection into the locals().
We cheat the systems by giving the f_code as the filename.
This allows programmers to actually see the autogenerated output instead of just "<string>".
:param f_code: the code to compile for execution
:type f_code: str
:return: the compiled code object
:rtype: code
"""
return compile(f_code, '<string>' + f_code, 'exec')
|
pg13-0.2.0
|
pg13-0.2.0//pg13/pg.pyclass:Row/kwinsert
|
@classmethod
def kwinsert(clas, pool_or_cursor, **kwargs):
"""kwargs version of insert"""
returning = kwargs.pop('returning', None)
fields, vals = list(zip(*list(kwargs.items())))
return clas.insert(pool_or_cursor, fields, vals, returning=returning)
|
lexor-0.1.5rc0
|
lexor-0.1.5rc0//lexor/core/writer.pyclass:NodeWriter/child
|
@classmethod
def child(cls, _):
"""This method gets called for |Element| nodes that have
children. If it is overwritten then it will not traverse
through child nodes unless you return something other than
``None``.
This method by default returns ``True`` so that the |Writer|
can traverse through the child nodes. """
return True
|
graphite_beacon-0.27.0
|
graphite_beacon-0.27.0//graphite_beacon/handlers/cli.pyfile:/graphite_beacon/handlers/cli.py:function:substitute_variables/substitute_variables
|
def substitute_variables(command, level, name, value, target=None, **kwargs):
"""Substitute variables in command fragments by values e.g. ${level} => 'warning'."""
rule = kwargs.get('rule', {})
rule_value = rule.get('value', '') if rule else ''
substitutes = {'${level}': str(level), '${target}': str(target),
'${name}': '"' + str(name) + '"', '${value}': str(value),
'${limit_value}': str(rule_value)}
result = command
for pattern, value in substitutes.items():
result = result.replace(pattern, value)
return result
|
salmon-mail-3.2.0
|
salmon-mail-3.2.0//salmon/routing.pyfile:/salmon/routing.py:function:salmon_setting/salmon_setting
|
def salmon_setting(func, key):
"""Simple way to get the salmon setting off the function, or None."""
return func._salmon_settings.get(key)
|
dropbox
|
dropbox//team_log.pyclass:EventDetails/showcase_untrashed_details
|
@classmethod
def showcase_untrashed_details(cls, val):
"""
Create an instance of this class set to the
``showcase_untrashed_details`` tag with value ``val``.
:param ShowcaseUntrashedDetails val:
:rtype: EventDetails
"""
return cls('showcase_untrashed_details', val)
|
wx
|
wx//lib/ogl/canvas.pyfile:/lib/ogl/canvas.py:function:WhollyContains/WhollyContains
|
def WhollyContains(contains, contained):
"""Helper function.
:param `contains`: the containing shape
:param `contained`: the contained shape
:returns: `True` if 'contains' wholly contains 'contained'
"""
xp1, yp1 = contains.GetX(), contains.GetY()
xp2, yp2 = contained.GetX(), contained.GetY()
w1, h1 = contains.GetBoundingBoxMax()
w2, h2 = contained.GetBoundingBoxMax()
left1 = xp1 - w1 / 2.0
top1 = yp1 - h1 / 2.0
right1 = xp1 + w1 / 2.0
bottom1 = yp1 + h1 / 2.0
left2 = xp2 - w2 / 2.0
top2 = yp2 - h2 / 2.0
right2 = xp2 + w2 / 2.0
bottom2 = yp2 + h2 / 2.0
return (left1 <= left2 and top1 <= top2 and right1 >= right2 and
bottom1 >= bottom2)
|
census
|
census//core.pyfile:/core.py:function:chunks/chunks
|
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
|
fake-bpy-module-2.80-20200428
|
fake-bpy-module-2.80-20200428//bpy/ops/object.pyfile:/bpy/ops/object.py:function:collection_objects_select/collection_objects_select
|
def collection_objects_select():
"""Select all objects in collection
"""
pass
|
yubioath-desktop-3.1.0
|
yubioath-desktop-3.1.0//yubioath/gui/qrdecode.pyfile:/yubioath/gui/qrdecode.py:function:zero_region/zero_region
|
def zero_region(data, x, y, w, h):
"""Fills a region with zeroes."""
for by in range(y, y + h):
line = data[by]
data[by] = line[:x] + [0] * w + line[x + w:]
|
EGCG-Core-0.13
|
EGCG-Core-0.13//egcg_core/util.pyfile:/egcg_core/util.py:function:query_dict/query_dict
|
def query_dict(data, query_string, ret_default=None):
"""
Drill down into a dict using dot notation, e.g. query_dict({'this': {'that': 'other'}}, 'this.that'}).
:param dict data:
:param str query_string:
:param ret_default:
"""
_data = data.copy()
for q in query_string.split('.'):
d = _data.get(q)
if d is not None:
_data = d
else:
return ret_default
return _data
|
causaldag-0.1a132
|
causaldag-0.1a132//causaldag/structure_learning/difference/difference_dag.pyfile:/causaldag/structure_learning/difference/difference_dag.py:function:choose_stable_variables/choose_stable_variables
|
def choose_stable_variables(stability_scores, bootstrap_threshold=0.5):
"""Returns adjacency matrix corresponding to edges with stability scores above threshold."""
return (stability_scores.max(axis=0) > bootstrap_threshold).astype('float')
|
aws-lambda-decorators-0.48
|
aws-lambda-decorators-0.48//aws_lambda_decorators/utils.pyfile:/aws_lambda_decorators/utils.py:function:find_key_case_insensitive/find_key_case_insensitive
|
def find_key_case_insensitive(key_name, the_dict):
"""
Finds if a dictionary (the_dict) has a string key (key_name) in any string case
Args:
key_name: the key to search in the dictionary
the_dict: the dictionary to search
Returns:
The found key name in its original case, if found. Otherwise, returns the searching key name
"""
for key in the_dict:
if key.lower() == key_name:
return key
return key_name
|
dropbox-10.1.2
|
dropbox-10.1.2//dropbox/team_log.pyclass:EventDetails/legal_holds_report_a_hold_details
|
@classmethod
def legal_holds_report_a_hold_details(cls, val):
"""
Create an instance of this class set to the
``legal_holds_report_a_hold_details`` tag with value ``val``.
:param LegalHoldsReportAHoldDetails val:
:rtype: EventDetails
"""
return cls('legal_holds_report_a_hold_details', val)
|
menpocli-0.1.0
|
menpocli-0.1.0//versioneer.pyfile:/versioneer.py:function:render_git_describe/render_git_describe
|
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces['closest-tag']:
rendered = pieces['closest-tag']
if pieces['distance']:
rendered += '-%d-g%s' % (pieces['distance'], pieces['short'])
else:
rendered = pieces['short']
if pieces['dirty']:
rendered += '-dirty'
return rendered
|
lognplot
|
lognplot//time/duration.pyclass:Duration/from_milli_seconds
|
@classmethod
def from_milli_seconds(cls, millis):
""" Create a duration with the given amount of milli-seconds. """
return cls.from_seconds(millis * 0.001)
|
aitools
|
aitools//core/utils/word_stemmer.pyfile:/core/utils/word_stemmer.py:function:is_acceptable/is_acceptable
|
def is_acceptable(word, remove_total):
"""
Determine if the word is acceptable for stemming.
"""
word_is_acceptable = False
if word[0] in 'aeiouy':
if len(word) - remove_total >= 2:
word_is_acceptable = True
elif len(word) - remove_total >= 3:
if word[1] in 'aeiouy':
word_is_acceptable = True
elif word[2] in 'aeiouy':
word_is_acceptable = True
return word_is_acceptable
|
neshan
|
neshan//convert.pyfile:/convert.py:function:is_string/is_string
|
def is_string(val):
"""Determines whether the passed value is a string."""
return isinstance(val, str)
|
fake-bpy-module-2.79-20200428
|
fake-bpy-module-2.79-20200428//bpy/ops/view3d.pyfile:/bpy/ops/view3d.py:function:view_selected/view_selected
|
def view_selected(use_all_regions: bool=False):
"""Move the view to the selection center
:param use_all_regions: All Regions, View selected for all regions
:type use_all_regions: bool
"""
pass
|
birdhouse-birdy-0.6.9
|
birdhouse-birdy-0.6.9//birdy/client/notebook.pyfile:/birdy/client/notebook.py:function:output2widget/output2widget
|
def output2widget(output):
"""Return notebook widget based on output mime-type.
"""
pass
|
gns3-gui-2.2.8
|
gns3-gui-2.2.8//gns3/modules/module.pyclass:Module/getNodeClass
|
@staticmethod
def getNodeClass(node_type, platform=None):
"""
Returns the class corresponding to node type.
Must be overloaded.
:param node_type: name of the node
:param platform: platform (for Dynamips only)
:returns: class or None
"""
raise NotImplementedError()
|
melusine
|
melusine//prepare_email/cleaning.pyfile:/prepare_email/cleaning.py:function:remove_apostrophe/remove_apostrophe
|
def remove_apostrophe(text):
"""Remove apostrophes from text"""
return text.replace("'", ' ')
|
guano-1.0.12
|
guano-1.0.12//bin/d500x2guano.pyfile:/bin/d500x2guano.py:function:dms2decimal/dms2decimal
|
def dms2decimal(dms_str):
"""Convert D500X Degrees-Minuts-Seconds to Decimal Degrees"""
d, m, s, direction = dms_str.split()
sign = -1 if direction in ('S', 'W') else 1
return sign * (int(d) + float(m) / 60 + float(s) / 3600)
|
pympress-1.5.3
|
pympress-1.5.3//pympress/extras.pyclass:FileWatcher/stop_daemon
|
@classmethod
def stop_daemon(cls, wait=False):
""" Stop the watchdog observer thread.
Args:
wait (`bool`): whether to wait for the thread to have joined before returning
"""
cls.observer.unschedule_all()
if cls.observer.is_alive():
cls.observer.stop()
while wait and cls.observer.is_alive():
cls.observer.join()
cls.observer = None
cls.monitor = None
|
ringcentral_bot_framework
|
ringcentral_bot_framework//core/config_default.pyfile:/core/config_default.py:function:defaultEventHandler/defaultEventHandler
|
def defaultEventHandler(bot, groupId, creatorId, user, text, dbAction,
handledByExtension, event):
"""
default event handler, for event not match any above
"""
return
|
asyncssh-2.2.1
|
asyncssh-2.2.1//asyncssh/misc.pyfile:/asyncssh/misc.py:function:plural/plural
|
def plural(length, label, suffix='s'):
"""Return a label with an optional plural suffix"""
return '%d %s%s' % (length, label, suffix if length != 1 else '')
|
social
|
social//storage/base.pyclass:NonceMixin/use
|
@classmethod
def use(cls, server_url, timestamp, salt):
"""Create a Nonce instance"""
raise NotImplementedError('Implement in subclass')
|
pagebot
|
pagebot//fonttoolbox/unicodes/unicoderanges.pyfile:/fonttoolbox/unicodes/unicoderanges.py:function:unpackRangeBits/unpackRangeBits
|
def unpackRangeBits(ur1, ur2, ur3, ur4):
"""Given the ulUnicodeRange1, ulUnicodeRange2, ulUnicodeRange3,
ulUnicodeRange4 values from the OS/2 table, return a set of bit numbers.
>>> unpackRangeBits(0x0, 0x0, 0x0, 0x0)
set()
>>> unpackRangeBits(0x1, 0x0, 0x0, 0x0)
{0}
>>> unpackRangeBits(0x1, 0x1, 0x1, 0x1)
{0, 32, 64, 96}
>>> unpackRangeBits(0xffffffff, 0x1, 0x2, 0x4)
{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 65, 98}
"""
bitNum = 0
bitSet = set()
for bitField in [ur1, ur2, ur3, ur4]:
for localBitNum in range(32):
mask = 1 << localBitNum
if bitField & mask:
bitSet.add(bitNum)
bitNum += 1
assert bitNum == 128
return bitSet
|
Products.listen-0.7.1
|
Products.listen-0.7.1//Products/listen/interfaces/utilities.pyclass:IMemberLookup/to_memberid
|
def to_memberid(email):
"""Translate the email address to a member id, or None"""
|
mxnet-1.6.0.data
|
mxnet-1.6.0.data//purelib/mxnet/ndarray/gen_sparse.pyfile:/purelib/mxnet/ndarray/gen_sparse.py:function:log2/log2
|
def log2(data=None, out=None, name=None, **kwargs):
"""Returns element-wise Base-2 logarithmic value of the input.
``2**log2(x) = x``
The storage type of ``log2`` output is always dense
Defined in src/operator/tensor/elemwise_unary_op_logexp.cc:L105
Parameters
----------
data : NDArray
The input array.
out : NDArray, optional
The output NDArray to hold the result.
Returns
-------
out : NDArray or list of NDArrays
The output of this function.
"""
return 0,
|
Clumsy
|
Clumsy//sleep/staging.pyclass:VigilanceIndexPipeline/valid_band
|
@staticmethod
def valid_band(freq, low, high):
"""Given an array of frequencies return a boolean array that is True bounded between high and low
Parameters
----------
freq: np.array like,
array of frequency values
low: int,
lower bound of desired frequency range
high: int,
upper bound of desired frequency range
Returns
-------
boolean_array of valid boundaries
"""
return (freq >= low) & (freq <= high)
|
tbee-0.1
|
tbee-0.1//tbee/error_handling.pyfile:/tbee/error_handling.py:function:set_onsite/set_onsite
|
def set_onsite(onsite, tags):
"""
Check method *set_onsite*.
:raises TypeError: Parameter onsite must be a dictionary.
:raises ValueError: Parameter onsite keys must be a tag.
:raises ValueError: Parameter onsite values must be
real and/or complex numbers.
"""
if not isinstance(onsite, dict):
raise TypeError('\n\nParameter onsite must be a dictionary.\n')
for tag, val in onsite.items():
if tag not in tags:
raise ValueError('\n\nParameter onsite keys must be a tag.\n')
if not isinstance(val, (int, float, complex)):
raise ValueError(
'\n\nParameter onsite values must be\nreal and/or complex numbers.\n'
)
|
pyboto3-1.4.4
|
pyboto3-1.4.4//pyboto3/swf.pyfile:/pyboto3/swf.py:function:respond_decision_task_completed/respond_decision_task_completed
|
def respond_decision_task_completed(taskToken=None, decisions=None,
executionContext=None):
"""
Used by deciders to tell the service that the DecisionTask identified by the taskToken has successfully completed. The decisions argument specifies the list of decisions made while processing the task.
A DecisionTaskCompleted event is added to the workflow history. The executionContext specified is attached to the event in the workflow execution history.
Access Control
If an IAM policy grants permission to use RespondDecisionTaskCompleted , it can express permissions for the list of decisions in the decisions parameter. Each of the decisions has one or more parameters, much like a regular API call. To allow for policies to be as readable as possible, you can express permissions on decisions as if they were actual API calls, including applying conditions to some parameters. For more information, see Using IAM to Manage Access to Amazon SWF Workflows .
See also: AWS API Documentation
:example: response = client.respond_decision_task_completed(
taskToken='string',
decisions=[
{
'decisionType': 'ScheduleActivityTask'|'RequestCancelActivityTask'|'CompleteWorkflowExecution'|'FailWorkflowExecution'|'CancelWorkflowExecution'|'ContinueAsNewWorkflowExecution'|'RecordMarker'|'StartTimer'|'CancelTimer'|'SignalExternalWorkflowExecution'|'RequestCancelExternalWorkflowExecution'|'StartChildWorkflowExecution'|'ScheduleLambdaFunction',
'scheduleActivityTaskDecisionAttributes': {
'activityType': {
'name': 'string',
'version': 'string'
},
'activityId': 'string',
'control': 'string',
'input': 'string',
'scheduleToCloseTimeout': 'string',
'taskList': {
'name': 'string'
},
'taskPriority': 'string',
'scheduleToStartTimeout': 'string',
'startToCloseTimeout': 'string',
'heartbeatTimeout': 'string'
},
'requestCancelActivityTaskDecisionAttributes': {
'activityId': 'string'
},
'completeWorkflowExecutionDecisionAttributes': {
'result': 'string'
},
'failWorkflowExecutionDecisionAttributes': {
'reason': 'string',
'details': 'string'
},
'cancelWorkflowExecutionDecisionAttributes': {
'details': 'string'
},
'continueAsNewWorkflowExecutionDecisionAttributes': {
'input': 'string',
'executionStartToCloseTimeout': 'string',
'taskList': {
'name': 'string'
},
'taskPriority': 'string',
'taskStartToCloseTimeout': 'string',
'childPolicy': 'TERMINATE'|'REQUEST_CANCEL'|'ABANDON',
'tagList': [
'string',
],
'workflowTypeVersion': 'string',
'lambdaRole': 'string'
},
'recordMarkerDecisionAttributes': {
'markerName': 'string',
'details': 'string'
},
'startTimerDecisionAttributes': {
'timerId': 'string',
'control': 'string',
'startToFireTimeout': 'string'
},
'cancelTimerDecisionAttributes': {
'timerId': 'string'
},
'signalExternalWorkflowExecutionDecisionAttributes': {
'workflowId': 'string',
'runId': 'string',
'signalName': 'string',
'input': 'string',
'control': 'string'
},
'requestCancelExternalWorkflowExecutionDecisionAttributes': {
'workflowId': 'string',
'runId': 'string',
'control': 'string'
},
'startChildWorkflowExecutionDecisionAttributes': {
'workflowType': {
'name': 'string',
'version': 'string'
},
'workflowId': 'string',
'control': 'string',
'input': 'string',
'executionStartToCloseTimeout': 'string',
'taskList': {
'name': 'string'
},
'taskPriority': 'string',
'taskStartToCloseTimeout': 'string',
'childPolicy': 'TERMINATE'|'REQUEST_CANCEL'|'ABANDON',
'tagList': [
'string',
],
'lambdaRole': 'string'
},
'scheduleLambdaFunctionDecisionAttributes': {
'id': 'string',
'name': 'string',
'input': 'string',
'startToCloseTimeout': 'string'
}
},
],
executionContext='string'
)
:type taskToken: string
:param taskToken: [REQUIRED]
The taskToken from the DecisionTask .
Warning
taskToken is generated by the service and should be treated as an opaque value. If the task is passed to another process, its taskToken must also be passed. This enables it to provide its progress and respond with results.
:type decisions: list
:param decisions: The list of decisions (possibly empty) made by the decider while processing this decision task. See the docs for the decision structure for details.
(dict) --Specifies a decision made by the decider. A decision can be one of these types:
CancelTimer : cancels a previously started timer and records a TimerCanceled event in the history.
CancelWorkflowExecution : closes the workflow execution and records a WorkflowExecutionCanceled event in the history.
CompleteWorkflowExecution : closes the workflow execution and records a WorkflowExecutionCompleted event in the history .
ContinueAsNewWorkflowExecution : closes the workflow execution and starts a new workflow execution of the same type using the same workflow ID and a unique run ID. A WorkflowExecutionContinuedAsNew event is recorded in the history.
FailWorkflowExecution : closes the workflow execution and records a WorkflowExecutionFailed event in the history.
RecordMarker : records a MarkerRecorded event in the history. Markers can be used for adding custom information in the history for instance to let deciders know that they do not need to look at the history beyond the marker event.
RequestCancelActivityTask : attempts to cancel a previously scheduled activity task. If the activity task was scheduled but has not been assigned to a worker, then it will be canceled. If the activity task was already assigned to a worker, then the worker will be informed that cancellation has been requested in the response to RecordActivityTaskHeartbeat .
RequestCancelExternalWorkflowExecution : requests that a request be made to cancel the specified external workflow execution and records a RequestCancelExternalWorkflowExecutionInitiated event in the history.
ScheduleActivityTask : schedules an activity task.
ScheduleLambdaFunction : schedules a AWS Lambda function.
SignalExternalWorkflowExecution : requests a signal to be delivered to the specified external workflow execution and records a SignalExternalWorkflowExecutionInitiated event in the history.
StartChildWorkflowExecution : requests that a child workflow execution be started and records a StartChildWorkflowExecutionInitiated event in the history. The child workflow execution is a separate workflow execution with its own history.
StartTimer : starts a timer for this workflow execution and records a TimerStarted event in the history. This timer will fire after the specified delay and record a TimerFired event.
Access Control
If you grant permission to use RespondDecisionTaskCompleted , you can use IAM policies to express permissions for the list of decisions returned by this action as if they were members of the API. Treating decisions as a pseudo API maintains a uniform conceptual model and helps keep policies readable. For details and example IAM policies, see Using IAM to Manage Access to Amazon SWF Workflows .
Decision Failure
Decisions can fail for several reasons
The ordering of decisions should follow a logical flow. Some decisions might not make sense in the current context of the workflow execution and will therefore fail.
A limit on your account was reached.
The decision lacks sufficient permissions.
One of the following events might be added to the history to indicate an error. The event attribute's cause parameter indicates the cause. If cause is set to OPERATION_NOT_PERMITTED, the decision failed because it lacked sufficient permissions. For details and example IAM policies, see Using IAM to Manage Access to Amazon SWF Workflows .
ScheduleActivityTaskFailed : a ScheduleActivityTask decision failed. This could happen if the activity type specified in the decision is not registered, is in a deprecated state, or the decision is not properly configured.
ScheduleLambdaFunctionFailed : a ScheduleLambdaFunctionFailed decision failed. This could happen if the AWS Lambda function specified in the decision does not exist, or the AWS Lambda service's limits are exceeded.
RequestCancelActivityTaskFailed : a RequestCancelActivityTask decision failed. This could happen if there is no open activity task with the specified activityId.
StartTimerFailed : a StartTimer decision failed. This could happen if there is another open timer with the same timerId.
CancelTimerFailed : a CancelTimer decision failed. This could happen if there is no open timer with the specified timerId.
StartChildWorkflowExecutionFailed : a StartChildWorkflowExecution decision failed. This could happen if the workflow type specified is not registered, is deprecated, or the decision is not properly configured.
SignalExternalWorkflowExecutionFailed : a SignalExternalWorkflowExecution decision failed. This could happen if the workflowID specified in the decision was incorrect.
RequestCancelExternalWorkflowExecutionFailed : a RequestCancelExternalWorkflowExecution decision failed. This could happen if the workflowID specified in the decision was incorrect.
CancelWorkflowExecutionFailed : a CancelWorkflowExecution decision failed. This could happen if there is an unhandled decision task pending in the workflow execution.
CompleteWorkflowExecutionFailed : a CompleteWorkflowExecution decision failed. This could happen if there is an unhandled decision task pending in the workflow execution.
ContinueAsNewWorkflowExecutionFailed : a ContinueAsNewWorkflowExecution decision failed. This could happen if there is an unhandled decision task pending in the workflow execution or the ContinueAsNewWorkflowExecution decision was not configured correctly.
FailWorkflowExecutionFailed : a FailWorkflowExecution decision failed. This could happen if there is an unhandled decision task pending in the workflow execution.
The preceding error events might occur due to an error in the decider logic, which might put the workflow execution in an unstable state The cause field in the event structure for the error event indicates the cause of the error.
Note
A workflow execution may be closed by the decider by returning one of the following decisions when completing a decision task: CompleteWorkflowExecution , FailWorkflowExecution , CancelWorkflowExecution and ContinueAsNewWorkflowExecution . An UnhandledDecision fault will be returned if a workflow closing decision is specified and a signal or activity event had been added to the history while the decision task was being performed by the decider. Unlike the above situations which are logic issues, this fault is always possible because of race conditions in a distributed system. The right action here is to call RespondDecisionTaskCompleted without any decisions. This would result in another decision task with these new events included in the history. The decider should handle the new events and may decide to close the workflow execution.
How to code a decision
You code a decision by first setting the decision type field to one of the above decision values, and then set the corresponding attributes field shown below:
ScheduleActivityTaskDecisionAttributes
ScheduleLambdaFunctionDecisionAttributes
RequestCancelActivityTaskDecisionAttributes
CompleteWorkflowExecutionDecisionAttributes
FailWorkflowExecutionDecisionAttributes
CancelWorkflowExecutionDecisionAttributes
ContinueAsNewWorkflowExecutionDecisionAttributes
RecordMarkerDecisionAttributes
StartTimerDecisionAttributes
CancelTimerDecisionAttributes
SignalExternalWorkflowExecutionDecisionAttributes
RequestCancelExternalWorkflowExecutionDecisionAttributes
StartChildWorkflowExecutionDecisionAttributes
decisionType (string) -- [REQUIRED]Specifies the type of the decision.
scheduleActivityTaskDecisionAttributes (dict) --Provides details of the ScheduleActivityTask decision. It is not set for other decision types.
activityType (dict) -- [REQUIRED]Required. The type of the activity task to schedule.
name (string) -- [REQUIRED]The name of this activity.
Note
The combination of activity type name and version must be unique within a domain.
version (string) -- [REQUIRED]The version of this activity.
Note
The combination of activity type name and version must be unique with in a domain.
activityId (string) -- [REQUIRED]Required. The activityId of the activity task.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
control (string) --Optional. Data attached to the event that can be used by the decider in subsequent workflow tasks. This data is not sent to the activity.
input (string) --The input provided to the activity task.
scheduleToCloseTimeout (string) --The maximum duration for this activity task.
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
Note
A schedule-to-close timeout for this activity task must be specified either as a default for the activity type or through this field. If neither this field is set nor a default schedule-to-close timeout was specified at registration time then a fault will be returned.
taskList (dict) --If set, specifies the name of the task list in which to schedule the activity task. If not specified, the defaultTaskList registered with the activity type will be used.
Note
A task list for this activity task must be specified either as a default for the activity type or through this field. If neither this field is set nor a default task list was specified at registration time then a fault will be returned.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
name (string) -- [REQUIRED]The name of the task list.
taskPriority (string) --Optional. If set, specifies the priority with which the activity task is to be assigned to a worker. This overrides the defaultTaskPriority specified when registering the activity type using RegisterActivityType . Valid values are integers that range from Java's Integer.MIN_VALUE (-2147483648) to Integer.MAX_VALUE (2147483647). Higher numbers indicate higher priority.
For more information about setting task priority, see Setting Task Priority in the Amazon Simple Workflow Developer Guide .
scheduleToStartTimeout (string) --Optional. If set, specifies the maximum duration the activity task can wait to be assigned to a worker. This overrides the default schedule-to-start timeout specified when registering the activity type using RegisterActivityType .
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
Note
A schedule-to-start timeout for this activity task must be specified either as a default for the activity type or through this field. If neither this field is set nor a default schedule-to-start timeout was specified at registration time then a fault will be returned.
startToCloseTimeout (string) --If set, specifies the maximum duration a worker may take to process this activity task. This overrides the default start-to-close timeout specified when registering the activity type using RegisterActivityType .
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
Note
A start-to-close timeout for this activity task must be specified either as a default for the activity type or through this field. If neither this field is set nor a default start-to-close timeout was specified at registration time then a fault will be returned.
heartbeatTimeout (string) --If set, specifies the maximum time before which a worker processing a task of this type must report progress by calling RecordActivityTaskHeartbeat . If the timeout is exceeded, the activity task is automatically timed out. If the worker subsequently attempts to record a heartbeat or returns a result, it will be ignored. This overrides the default heartbeat timeout specified when registering the activity type using RegisterActivityType .
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
requestCancelActivityTaskDecisionAttributes (dict) --Provides details of the RequestCancelActivityTask decision. It is not set for other decision types.
activityId (string) -- [REQUIRED]The activityId of the activity task to be canceled.
completeWorkflowExecutionDecisionAttributes (dict) --Provides details of the CompleteWorkflowExecution decision. It is not set for other decision types.
result (string) --The result of the workflow execution. The form of the result is implementation defined.
failWorkflowExecutionDecisionAttributes (dict) --Provides details of the FailWorkflowExecution decision. It is not set for other decision types.
reason (string) --A descriptive reason for the failure that may help in diagnostics.
details (string) --Optional. Details of the failure.
cancelWorkflowExecutionDecisionAttributes (dict) --Provides details of the CancelWorkflowExecution decision. It is not set for other decision types.
details (string) --Optional. details of the cancellation.
continueAsNewWorkflowExecutionDecisionAttributes (dict) --Provides details of the ContinueAsNewWorkflowExecution decision. It is not set for other decision types.
input (string) --The input provided to the new workflow execution.
executionStartToCloseTimeout (string) --If set, specifies the total duration for this workflow execution. This overrides the defaultExecutionStartToCloseTimeout specified when registering the workflow type.
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
Note
An execution start-to-close timeout for this workflow execution must be specified either as a default for the workflow type or through this field. If neither this field is set nor a default execution start-to-close timeout was specified at registration time then a fault will be returned.
taskList (dict) --Represents a task list.
name (string) -- [REQUIRED]The name of the task list.
taskPriority (string) --Optional. The task priority that, if set, specifies the priority for the decision tasks for this workflow execution. This overrides the defaultTaskPriority specified when registering the workflow type. Valid values are integers that range from Java's Integer.MIN_VALUE (-2147483648) to Integer.MAX_VALUE (2147483647). Higher numbers indicate higher priority.
For more information about setting task priority, see Setting Task Priority in the Amazon Simple Workflow Developer Guide .
taskStartToCloseTimeout (string) --Specifies the maximum duration of decision tasks for the new workflow execution. This parameter overrides the defaultTaskStartToCloseTimout specified when registering the workflow type using RegisterWorkflowType .
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
Note
A task start-to-close timeout for the new workflow execution must be specified either as a default for the workflow type or through this parameter. If neither this parameter is set nor a default task start-to-close timeout was specified at registration time then a fault will be returned.
childPolicy (string) --If set, specifies the policy to use for the child workflow executions of the new execution if it is terminated by calling the TerminateWorkflowExecution action explicitly or due to an expired timeout. This policy overrides the default child policy specified when registering the workflow type using RegisterWorkflowType .
The supported child policies are:
TERMINATE: the child executions will be terminated.
REQUEST_CANCEL: a request to cancel will be attempted for each child execution by recording a WorkflowExecutionCancelRequested event in its history. It is up to the decider to take appropriate actions when it receives an execution history with this event.
ABANDON: no action will be taken. The child executions will continue to run.
Note
A child policy for this workflow execution must be specified either as a default for the workflow type or through this parameter. If neither this parameter is set nor a default child policy was specified at registration time then a fault will be returned.
tagList (list) --The list of tags to associate with the new workflow execution. A maximum of 5 tags can be specified. You can list workflow executions with a specific tag by calling ListOpenWorkflowExecutions or ListClosedWorkflowExecutions and specifying a TagFilter .
(string) --
workflowTypeVersion (string) --
lambdaRole (string) --The ARN of an IAM role that authorizes Amazon SWF to invoke AWS Lambda functions.
Note
In order for this workflow execution to invoke AWS Lambda functions, an appropriate IAM role must be specified either as a default for the workflow type or through this field.
recordMarkerDecisionAttributes (dict) --Provides details of the RecordMarker decision. It is not set for other decision types.
markerName (string) -- [REQUIRED]Required. The name of the marker.
details (string) --Optional. details of the marker.
startTimerDecisionAttributes (dict) --Provides details of the StartTimer decision. It is not set for other decision types.
timerId (string) -- [REQUIRED]Required. The unique ID of the timer.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
control (string) --Optional. Data attached to the event that can be used by the decider in subsequent workflow tasks.
startToFireTimeout (string) -- [REQUIRED]Required. The duration to wait before firing the timer.
The duration is specified in seconds; an integer greater than or equal to 0.
cancelTimerDecisionAttributes (dict) --Provides details of the CancelTimer decision. It is not set for other decision types.
timerId (string) -- [REQUIRED]Required. The unique ID of the timer to cancel.
signalExternalWorkflowExecutionDecisionAttributes (dict) --Provides details of the SignalExternalWorkflowExecution decision. It is not set for other decision types.
workflowId (string) -- [REQUIRED]Required. The workflowId of the workflow execution to be signaled.
runId (string) --The runId of the workflow execution to be signaled.
signalName (string) -- [REQUIRED]Required. The name of the signal.The target workflow execution will use the signal name and input to process the signal.
input (string) --Optional. Input data to be provided with the signal. The target workflow execution will use the signal name and input data to process the signal.
control (string) --Optional. Data attached to the event that can be used by the decider in subsequent decision tasks.
requestCancelExternalWorkflowExecutionDecisionAttributes (dict) --Provides details of the RequestCancelExternalWorkflowExecution decision. It is not set for other decision types.
workflowId (string) -- [REQUIRED]Required. The workflowId of the external workflow execution to cancel.
runId (string) --The runId of the external workflow execution to cancel.
control (string) --Optional. Data attached to the event that can be used by the decider in subsequent workflow tasks.
startChildWorkflowExecutionDecisionAttributes (dict) --Provides details of the StartChildWorkflowExecution decision. It is not set for other decision types.
workflowType (dict) -- [REQUIRED]Required. The type of the workflow execution to be started.
name (string) -- [REQUIRED]Required. The name of the workflow type.
Note
The combination of workflow type name and version must be unique with in a domain.
version (string) -- [REQUIRED]Required. The version of the workflow type.
Note
The combination of workflow type name and version must be unique with in a domain.
workflowId (string) -- [REQUIRED]Required. The workflowId of the workflow execution.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
control (string) --Optional. Data attached to the event that can be used by the decider in subsequent workflow tasks. This data is not sent to the child workflow execution.
input (string) --The input to be provided to the workflow execution.
executionStartToCloseTimeout (string) --The total duration for this workflow execution. This overrides the defaultExecutionStartToCloseTimeout specified when registering the workflow type.
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
Note
An execution start-to-close timeout for this workflow execution must be specified either as a default for the workflow type or through this parameter. If neither this parameter is set nor a default execution start-to-close timeout was specified at registration time then a fault will be returned.
taskList (dict) --The name of the task list to be used for decision tasks of the child workflow execution.
Note
A task list for this workflow execution must be specified either as a default for the workflow type or through this parameter. If neither this parameter is set nor a default task list was specified at registration time then a fault will be returned.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
name (string) -- [REQUIRED]The name of the task list.
taskPriority (string) --Optional. A task priority that, if set, specifies the priority for a decision task of this workflow execution. This overrides the defaultTaskPriority specified when registering the workflow type. Valid values are integers that range from Java's Integer.MIN_VALUE (-2147483648) to Integer.MAX_VALUE (2147483647). Higher numbers indicate higher priority.
For more information about setting task priority, see Setting Task Priority in the Amazon Simple Workflow Developer Guide .
taskStartToCloseTimeout (string) --Specifies the maximum duration of decision tasks for this workflow execution. This parameter overrides the defaultTaskStartToCloseTimout specified when registering the workflow type using RegisterWorkflowType .
The duration is specified in seconds; an integer greater than or equal to 0. The value 'NONE' can be used to specify unlimited duration.
Note
A task start-to-close timeout for this workflow execution must be specified either as a default for the workflow type or through this parameter. If neither this parameter is set nor a default task start-to-close timeout was specified at registration time then a fault will be returned.
childPolicy (string) --Optional. If set, specifies the policy to use for the child workflow executions if the workflow execution being started is terminated by calling the TerminateWorkflowExecution action explicitly or due to an expired timeout. This policy overrides the default child policy specified when registering the workflow type using RegisterWorkflowType .
The supported child policies are:
TERMINATE: the child executions will be terminated.
REQUEST_CANCEL: a request to cancel will be attempted for each child execution by recording a WorkflowExecutionCancelRequested event in its history. It is up to the decider to take appropriate actions when it receives an execution history with this event.
ABANDON: no action will be taken. The child executions will continue to run.
Note
A child policy for this workflow execution must be specified either as a default for the workflow type or through this parameter. If neither this parameter is set nor a default child policy was specified at registration time then a fault will be returned.
tagList (list) --The list of tags to associate with the child workflow execution. A maximum of 5 tags can be specified. You can list workflow executions with a specific tag by calling ListOpenWorkflowExecutions or ListClosedWorkflowExecutions and specifying a TagFilter .
(string) --
lambdaRole (string) --The ARN of an IAM role that authorizes Amazon SWF to invoke AWS Lambda functions.
Note
In order for this workflow execution to invoke AWS Lambda functions, an appropriate IAM role must be specified either as a default for the workflow type or through this field.
scheduleLambdaFunctionDecisionAttributes (dict) --Provides details of the ScheduleLambdaFunction decision.
Access Control
You can use IAM policies to control this decision's access to Amazon SWF resources as follows:
Use a Resource element with the domain name to limit the action to only specified domains.
Use an Action element to allow or deny permission to call this action.
Constrain the following parameters by using a Condition element with the appropriate keys.
activityType.name : String constraint. The key is swf:activityType.name .
activityType.version : String constraint. The key is swf:activityType.version .
taskList : String constraint. The key is swf:taskList.name .
If the caller does not have sufficient permissions to invoke the action, or the parameter values fall outside the specified constraints, the action fails. The associated event attribute's cause parameter will be set to OPERATION_NOT_PERMITTED. For details and example IAM policies, see Using IAM to Manage Access to Amazon SWF Workflows .
id (string) -- [REQUIRED]Required. The SWF id of the AWS Lambda task.
The specified string must not start or end with whitespace. It must not contain a : (colon), / (slash), | (vertical bar), or any control characters (u0000-u001f | u007f - u009f). Also, it must not contain the literal string quotarnquot.
name (string) -- [REQUIRED]Required. The name of the AWS Lambda function to invoke.
input (string) --The input provided to the AWS Lambda function.
startToCloseTimeout (string) --If set, specifies the maximum duration the function may take to execute.
:type executionContext: string
:param executionContext: User defined context to add to workflow execution.
"""
pass
|
spruceup-2020.2.19
|
spruceup-2020.2.19//spruceup/spruceup.pyfile:/spruceup/spruceup.py:function:dists_per_taxon/dists_per_taxon
|
def dists_per_taxon(means_tuple_list):
"""Get mean pairwise distances for taxon in alignment.
Given tuple list [(alignment name, {taxon : mean distance within alignment})]
return dict of {taxon : (alignment name, mean distance within alignment)}.
"""
taxa_dists = {}
for aln_name, dist_dict in sorted(means_tuple_list):
for sp, mean_dist in dist_dict.items():
if sp not in taxa_dists.keys():
taxa_dists[sp] = [(aln_name, mean_dist)]
else:
taxa_dists[sp].append((aln_name, mean_dist))
return taxa_dists
|
aggregation_builder-0.0.4
|
aggregation_builder-0.0.4//aggregation_builder/operators/string.pyfile:/aggregation_builder/operators/string.py:function:CONCAT/CONCAT
|
def CONCAT(*expressions):
"""
Concatenates strings and returns the concatenated string.
https://docs.mongodb.com/manual/reference/operator/aggregation/concat/
for more details
:param expressions: The list of strings (variables or expressions)
:return: Aggregation operator
"""
return {'$concat': list(expressions)}
|
BTrees
|
BTrees//Interfaces.pyclass:IDictionaryIsh/update
|
def update(collection):
"""Add the items from the given collection object to the collection.
The input collection must be a sequence of (key, value) 2-tuples,
or an object with an 'items' method that returns a sequence of
(key, value) pairs.
"""
|
pygorithm
|
pygorithm//math/conversion.pyfile:/math/conversion.py:function:binary_to_decimal/binary_to_decimal
|
def binary_to_decimal(number):
"""
This function calculates the decimal of the given binary number
:param number: decimal number in string or integer format
:return : integer of the equivalent decimal number
Algo:
1. Get the last digit of the binary number.
2. Multiply the current digit with (2^power), store the result.
3. Increment power by 1.
4. Repeat from step 2 until all digits have been multiplied.
5. Sum the result of step 2 to get the answer number.
"""
decimal = []
number = list(str(number)[::-1])
for i in range(len(number)):
decimal.append(int(number[i]) * 2 ** i)
return sum(decimal)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.