text
stringlengths 89
104k
| code_tokens
list | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def compare(items, params, rank=False):
"""Generate a comparison outcome that follows Luce's axiom.
This function samples an outcome for the comparison of a subset of items,
from a model parametrized by ``params``. If ``rank`` is True, it returns a
ranking over the items, otherwise it returns a single item.
Parameters
----------
items : list
Subset of items to compare.
params : array_like
Model parameters.
rank : bool, optional
If true, returns a ranking over the items instead of a single item.
Returns
-------
outcome : int or list of int
The chosen item, or a ranking over ``items``.
"""
probs = probabilities(items, params)
if rank:
return np.random.choice(items, size=len(items), replace=False, p=probs)
else:
return np.random.choice(items, p=probs)
|
[
"def",
"compare",
"(",
"items",
",",
"params",
",",
"rank",
"=",
"False",
")",
":",
"probs",
"=",
"probabilities",
"(",
"items",
",",
"params",
")",
"if",
"rank",
":",
"return",
"np",
".",
"random",
".",
"choice",
"(",
"items",
",",
"size",
"=",
"len",
"(",
"items",
")",
",",
"replace",
"=",
"False",
",",
"p",
"=",
"probs",
")",
"else",
":",
"return",
"np",
".",
"random",
".",
"choice",
"(",
"items",
",",
"p",
"=",
"probs",
")"
] | 32.615385 | 21.769231 |
def sign_data(self, name, hash_input, key_version=None, hash_algorithm="sha2-256", context="", prehashed=False,
signature_algorithm="pss", mount_point=DEFAULT_MOUNT_POINT):
"""Return the cryptographic signature of the given data using the named key and the specified hash algorithm.
The key must be of a type that supports signing.
Supported methods:
POST: /{mount_point}/sign/{name}(/{hash_algorithm}). Produces: 200 application/json
:param name: Specifies the name of the encryption key to use for signing. This is specified as part of the URL.
:type name: str | unicode
:param hash_input: Specifies the base64 encoded input data.
:type hash_input: str | unicode
:param key_version: Specifies the version of the key to use for signing. If not set, uses the latest version.
Must be greater than or equal to the key's min_encryption_version, if set.
:type key_version: int
:param hash_algorithm: Specifies the hash algorithm to use for supporting key types (notably, not including
ed25519 which specifies its own hash algorithm). This can also be specified as part of the URL.
Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512
:type hash_algorithm: str | unicode
:param context: Base64 encoded context for key derivation. Required if key derivation is enabled; currently only
available with ed25519 keys.
:type context: str | unicode
:param prehashed: Set to true when the input is already hashed. If the key type is rsa-2048 or rsa-4096, then
the algorithm used to hash the input should be indicated by the hash_algorithm parameter. Just as the value
to sign should be the base64-encoded representation of the exact binary data you want signed, when set, input
is expected to be base64-encoded binary hashed data, not hex-formatted. (As an example, on the command line,
you could generate a suitable input via openssl dgst -sha256 -binary | base64.)
:type prehashed: bool
:param signature_algorithm: When using a RSA key, specifies the RSA signature algorithm to use for signing.
Supported signature types are: pss, pkcs1v15
:type signature_algorithm: str | unicode
:param mount_point: The "path" the method/backend was mounted on.
:type mount_point: str | unicode
:return: The JSON response of the request.
:rtype: requests.Response
"""
if hash_algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS:
error_msg = 'invalid hash_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=hash_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_ALGORITHMS),
))
if signature_algorithm not in transit_constants.ALLOWED_SIGNATURE_ALGORITHMS:
error_msg = 'invalid signature_algorithm argument provided "{arg}", supported types: "{allowed_types}"'
raise exceptions.ParamValidationError(error_msg.format(
arg=signature_algorithm,
allowed_types=', '.join(transit_constants.ALLOWED_SIGNATURE_ALGORITHMS),
))
params = {
'input': hash_input,
'key_version': key_version,
'hash_algorithm': hash_algorithm,
'context': context,
'prehashed': prehashed,
'signature_algorithm': signature_algorithm,
}
api_path = '/v1/{mount_point}/sign/{name}'.format(
mount_point=mount_point,
name=name,
)
response = self._adapter.post(
url=api_path,
json=params,
)
return response.json()
|
[
"def",
"sign_data",
"(",
"self",
",",
"name",
",",
"hash_input",
",",
"key_version",
"=",
"None",
",",
"hash_algorithm",
"=",
"\"sha2-256\"",
",",
"context",
"=",
"\"\"",
",",
"prehashed",
"=",
"False",
",",
"signature_algorithm",
"=",
"\"pss\"",
",",
"mount_point",
"=",
"DEFAULT_MOUNT_POINT",
")",
":",
"if",
"hash_algorithm",
"not",
"in",
"transit_constants",
".",
"ALLOWED_HASH_DATA_ALGORITHMS",
":",
"error_msg",
"=",
"'invalid hash_algorithm argument provided \"{arg}\", supported types: \"{allowed_types}\"'",
"raise",
"exceptions",
".",
"ParamValidationError",
"(",
"error_msg",
".",
"format",
"(",
"arg",
"=",
"hash_algorithm",
",",
"allowed_types",
"=",
"', '",
".",
"join",
"(",
"transit_constants",
".",
"ALLOWED_HASH_DATA_ALGORITHMS",
")",
",",
")",
")",
"if",
"signature_algorithm",
"not",
"in",
"transit_constants",
".",
"ALLOWED_SIGNATURE_ALGORITHMS",
":",
"error_msg",
"=",
"'invalid signature_algorithm argument provided \"{arg}\", supported types: \"{allowed_types}\"'",
"raise",
"exceptions",
".",
"ParamValidationError",
"(",
"error_msg",
".",
"format",
"(",
"arg",
"=",
"signature_algorithm",
",",
"allowed_types",
"=",
"', '",
".",
"join",
"(",
"transit_constants",
".",
"ALLOWED_SIGNATURE_ALGORITHMS",
")",
",",
")",
")",
"params",
"=",
"{",
"'input'",
":",
"hash_input",
",",
"'key_version'",
":",
"key_version",
",",
"'hash_algorithm'",
":",
"hash_algorithm",
",",
"'context'",
":",
"context",
",",
"'prehashed'",
":",
"prehashed",
",",
"'signature_algorithm'",
":",
"signature_algorithm",
",",
"}",
"api_path",
"=",
"'/v1/{mount_point}/sign/{name}'",
".",
"format",
"(",
"mount_point",
"=",
"mount_point",
",",
"name",
"=",
"name",
",",
")",
"response",
"=",
"self",
".",
"_adapter",
".",
"post",
"(",
"url",
"=",
"api_path",
",",
"json",
"=",
"params",
",",
")",
"return",
"response",
".",
"json",
"(",
")"
] | 58.515152 | 30.939394 |
def split_template_path(template):
"""Split a path into segments and perform a sanity check. If it detects
'..' in the path it will raise a `TemplateNotFound` error.
"""
pieces = []
for piece in template.split('/'):
if path.sep in piece \
or (path.altsep and path.altsep in piece) or \
piece == path.pardir:
raise TemplateNotFound(template)
elif piece and piece != '.':
pieces.append(piece)
return pieces
|
[
"def",
"split_template_path",
"(",
"template",
")",
":",
"pieces",
"=",
"[",
"]",
"for",
"piece",
"in",
"template",
".",
"split",
"(",
"'/'",
")",
":",
"if",
"path",
".",
"sep",
"in",
"piece",
"or",
"(",
"path",
".",
"altsep",
"and",
"path",
".",
"altsep",
"in",
"piece",
")",
"or",
"piece",
"==",
"path",
".",
"pardir",
":",
"raise",
"TemplateNotFound",
"(",
"template",
")",
"elif",
"piece",
"and",
"piece",
"!=",
"'.'",
":",
"pieces",
".",
"append",
"(",
"piece",
")",
"return",
"pieces"
] | 36.846154 | 10 |
def values(self):
"""
TimeSeries of values.
"""
# if accessing and stale - update first
if self._needupdate or self.now != self.parent.now:
self.update(self.root.now)
if self.root.stale:
self.root.update(self.root.now, None)
return self._values.loc[:self.now]
|
[
"def",
"values",
"(",
"self",
")",
":",
"# if accessing and stale - update first",
"if",
"self",
".",
"_needupdate",
"or",
"self",
".",
"now",
"!=",
"self",
".",
"parent",
".",
"now",
":",
"self",
".",
"update",
"(",
"self",
".",
"root",
".",
"now",
")",
"if",
"self",
".",
"root",
".",
"stale",
":",
"self",
".",
"root",
".",
"update",
"(",
"self",
".",
"root",
".",
"now",
",",
"None",
")",
"return",
"self",
".",
"_values",
".",
"loc",
"[",
":",
"self",
".",
"now",
"]"
] | 33 | 8.6 |
def _finalize_step(self):
"""Finalize simulation step after all agents have acted for the current
step.
"""
t = time.time()
if self._callback is not None:
self._callback(self.age)
t2 = time.time()
self._step_processing_time += t2 - t
self._log(logging.INFO, "Step {} run in: {:.3f}s ({:.3f}s of "
"actual processing time used)"
.format(self.age, self._step_processing_time,
t2 - self._step_start_time))
self._processing_time += self._step_processing_time
|
[
"def",
"_finalize_step",
"(",
"self",
")",
":",
"t",
"=",
"time",
".",
"time",
"(",
")",
"if",
"self",
".",
"_callback",
"is",
"not",
"None",
":",
"self",
".",
"_callback",
"(",
"self",
".",
"age",
")",
"t2",
"=",
"time",
".",
"time",
"(",
")",
"self",
".",
"_step_processing_time",
"+=",
"t2",
"-",
"t",
"self",
".",
"_log",
"(",
"logging",
".",
"INFO",
",",
"\"Step {} run in: {:.3f}s ({:.3f}s of \"",
"\"actual processing time used)\"",
".",
"format",
"(",
"self",
".",
"age",
",",
"self",
".",
"_step_processing_time",
",",
"t2",
"-",
"self",
".",
"_step_start_time",
")",
")",
"self",
".",
"_processing_time",
"+=",
"self",
".",
"_step_processing_time"
] | 41.928571 | 12.785714 |
def read_config(config_path=default_config_path):
"""
Read configuration file and produce a dictionary of the following structure:
{'<instance1>': {'username': '<user>', 'password': '<pass>',
'verify': <True/False>, 'cert': '<path-to-cert>'}
'<instance2>': {...},
...}
Format of the file:
[https://artifactory-instance.local/artifactory]
username = foo
password = @dmin
verify = false
cert = ~/path-to-cert
config-path - specifies where to read the config from
"""
config_path = os.path.expanduser(config_path)
if not os.path.isfile(config_path):
raise OSError(errno.ENOENT,
"Artifactory configuration file not found: '%s'" %
config_path)
p = configparser.ConfigParser()
p.read(config_path)
result = {}
for section in p.sections():
username = p.get(section, 'username') if p.has_option(section, 'username') else None
password = p.get(section, 'password') if p.has_option(section, 'password') else None
verify = p.getboolean(section, 'verify') if p.has_option(section, 'verify') else True
cert = p.get(section, 'cert') if p.has_option(section, 'cert') else None
result[section] = {'username': username,
'password': password,
'verify': verify,
'cert': cert}
# certificate path may contain '~', and we'd better expand it properly
if result[section]['cert']:
result[section]['cert'] = \
os.path.expanduser(result[section]['cert'])
return result
|
[
"def",
"read_config",
"(",
"config_path",
"=",
"default_config_path",
")",
":",
"config_path",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"config_path",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"config_path",
")",
":",
"raise",
"OSError",
"(",
"errno",
".",
"ENOENT",
",",
"\"Artifactory configuration file not found: '%s'\"",
"%",
"config_path",
")",
"p",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"p",
".",
"read",
"(",
"config_path",
")",
"result",
"=",
"{",
"}",
"for",
"section",
"in",
"p",
".",
"sections",
"(",
")",
":",
"username",
"=",
"p",
".",
"get",
"(",
"section",
",",
"'username'",
")",
"if",
"p",
".",
"has_option",
"(",
"section",
",",
"'username'",
")",
"else",
"None",
"password",
"=",
"p",
".",
"get",
"(",
"section",
",",
"'password'",
")",
"if",
"p",
".",
"has_option",
"(",
"section",
",",
"'password'",
")",
"else",
"None",
"verify",
"=",
"p",
".",
"getboolean",
"(",
"section",
",",
"'verify'",
")",
"if",
"p",
".",
"has_option",
"(",
"section",
",",
"'verify'",
")",
"else",
"True",
"cert",
"=",
"p",
".",
"get",
"(",
"section",
",",
"'cert'",
")",
"if",
"p",
".",
"has_option",
"(",
"section",
",",
"'cert'",
")",
"else",
"None",
"result",
"[",
"section",
"]",
"=",
"{",
"'username'",
":",
"username",
",",
"'password'",
":",
"password",
",",
"'verify'",
":",
"verify",
",",
"'cert'",
":",
"cert",
"}",
"# certificate path may contain '~', and we'd better expand it properly",
"if",
"result",
"[",
"section",
"]",
"[",
"'cert'",
"]",
":",
"result",
"[",
"section",
"]",
"[",
"'cert'",
"]",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"result",
"[",
"section",
"]",
"[",
"'cert'",
"]",
")",
"return",
"result"
] | 37.318182 | 21.772727 |
def volumes(self):
"""
Returns a `list` of all the `Volume` known to the cluster. Updates every time - no caching.
:return: a `list` of all the `Volume` known to the cluster.
:rtype: list
"""
self.connection._check_login()
response = self.connection._do_get("{}/{}".format(self.connection._api_url, "types/Volume/instances")).json()
all_volumes = []
for volume in response:
all_volumes.append(
SIO_Volume.from_dict(volume)
)
return all_volumes
|
[
"def",
"volumes",
"(",
"self",
")",
":",
"self",
".",
"connection",
".",
"_check_login",
"(",
")",
"response",
"=",
"self",
".",
"connection",
".",
"_do_get",
"(",
"\"{}/{}\"",
".",
"format",
"(",
"self",
".",
"connection",
".",
"_api_url",
",",
"\"types/Volume/instances\"",
")",
")",
".",
"json",
"(",
")",
"all_volumes",
"=",
"[",
"]",
"for",
"volume",
"in",
"response",
":",
"all_volumes",
".",
"append",
"(",
"SIO_Volume",
".",
"from_dict",
"(",
"volume",
")",
")",
"return",
"all_volumes"
] | 39.357143 | 20.5 |
def watch_one_event(event_filter, callback, timeout_callback, timeout, args,
start_time=None):
"""
Start to watch one event.
:param event_filter:
:param callback:
:param timeout_callback:
:param timeout:
:param args:
:param start_time:
:return:
"""
if timeout and not start_time:
start_time = int(datetime.now().timestamp())
if not args:
args = []
while True:
try:
events = event_filter.get_all_entries()
if events:
callback(events[0], *args)
return
except (ValueError, Exception) as err:
# ignore error, but log it
logger.debug(f'Got error grabbing keeper events: {str(err)}')
time.sleep(0.1)
if timeout:
elapsed = int(datetime.now().timestamp()) - start_time
if elapsed > timeout:
if timeout_callback:
timeout_callback(*args)
else:
callback(None, *args)
break
|
[
"def",
"watch_one_event",
"(",
"event_filter",
",",
"callback",
",",
"timeout_callback",
",",
"timeout",
",",
"args",
",",
"start_time",
"=",
"None",
")",
":",
"if",
"timeout",
"and",
"not",
"start_time",
":",
"start_time",
"=",
"int",
"(",
"datetime",
".",
"now",
"(",
")",
".",
"timestamp",
"(",
")",
")",
"if",
"not",
"args",
":",
"args",
"=",
"[",
"]",
"while",
"True",
":",
"try",
":",
"events",
"=",
"event_filter",
".",
"get_all_entries",
"(",
")",
"if",
"events",
":",
"callback",
"(",
"events",
"[",
"0",
"]",
",",
"*",
"args",
")",
"return",
"except",
"(",
"ValueError",
",",
"Exception",
")",
"as",
"err",
":",
"# ignore error, but log it",
"logger",
".",
"debug",
"(",
"f'Got error grabbing keeper events: {str(err)}'",
")",
"time",
".",
"sleep",
"(",
"0.1",
")",
"if",
"timeout",
":",
"elapsed",
"=",
"int",
"(",
"datetime",
".",
"now",
"(",
")",
".",
"timestamp",
"(",
")",
")",
"-",
"start_time",
"if",
"elapsed",
">",
"timeout",
":",
"if",
"timeout_callback",
":",
"timeout_callback",
"(",
"*",
"args",
")",
"else",
":",
"callback",
"(",
"None",
",",
"*",
"args",
")",
"break"
] | 30.051282 | 16.923077 |
def update_node(availability=str,
node_name=str,
role=str,
node_id=str,
version=int):
'''
Updates docker swarm nodes/needs to target a manager node/minion
availability
Drain or Active
node_name
minion/node
role
role of manager or worker
node_id
The Id and that can be obtained via swarm.node_ls
version
Is obtained by swarm.node_ls
CLI Example:
.. code-block:: bash
salt '*' swarm.update_node availability=drain node_name=minion2 \
role=worker node_id=3k9x7t8m4pel9c0nqr3iajnzp version=19
'''
client = docker.APIClient(base_url='unix://var/run/docker.sock')
try:
salt_return = {}
node_spec = {'Availability': availability,
'Name': node_name,
'Role': role}
client.update_node(node_id=node_id,
version=version,
node_spec=node_spec)
salt_return.update({'Node Information': node_spec})
except TypeError:
salt_return = {}
salt_return.update({'Error': 'Make sure all args are passed [availability, node_name, role, node_id, version]'})
return salt_return
|
[
"def",
"update_node",
"(",
"availability",
"=",
"str",
",",
"node_name",
"=",
"str",
",",
"role",
"=",
"str",
",",
"node_id",
"=",
"str",
",",
"version",
"=",
"int",
")",
":",
"client",
"=",
"docker",
".",
"APIClient",
"(",
"base_url",
"=",
"'unix://var/run/docker.sock'",
")",
"try",
":",
"salt_return",
"=",
"{",
"}",
"node_spec",
"=",
"{",
"'Availability'",
":",
"availability",
",",
"'Name'",
":",
"node_name",
",",
"'Role'",
":",
"role",
"}",
"client",
".",
"update_node",
"(",
"node_id",
"=",
"node_id",
",",
"version",
"=",
"version",
",",
"node_spec",
"=",
"node_spec",
")",
"salt_return",
".",
"update",
"(",
"{",
"'Node Information'",
":",
"node_spec",
"}",
")",
"except",
"TypeError",
":",
"salt_return",
"=",
"{",
"}",
"salt_return",
".",
"update",
"(",
"{",
"'Error'",
":",
"'Make sure all args are passed [availability, node_name, role, node_id, version]'",
"}",
")",
"return",
"salt_return"
] | 28.113636 | 23.522727 |
def manager_view(request, managerTitle):
''' View the details of a manager position.
Parameters:
request is an HTTP request
managerTitle is the URL title of the manager.
'''
targetManager = get_object_or_404(Manager, url_title=managerTitle)
if not targetManager.active:
messages.add_message(request, messages.ERROR, MESSAGES['INACTIVE_MANAGER'].format(managerTitle=targetManager.title))
return HttpResponseRedirect(reverse('managers:list_managers'))
else:
return render_to_response('view_manager.html', {
'page_name': "View Manager",
'targetManager': targetManager,
}, context_instance=RequestContext(request))
|
[
"def",
"manager_view",
"(",
"request",
",",
"managerTitle",
")",
":",
"targetManager",
"=",
"get_object_or_404",
"(",
"Manager",
",",
"url_title",
"=",
"managerTitle",
")",
"if",
"not",
"targetManager",
".",
"active",
":",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"ERROR",
",",
"MESSAGES",
"[",
"'INACTIVE_MANAGER'",
"]",
".",
"format",
"(",
"managerTitle",
"=",
"targetManager",
".",
"title",
")",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'managers:list_managers'",
")",
")",
"else",
":",
"return",
"render_to_response",
"(",
"'view_manager.html'",
",",
"{",
"'page_name'",
":",
"\"View Manager\"",
",",
"'targetManager'",
":",
"targetManager",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] | 43.5 | 21.375 |
def get_queryset(self, **kwargs):
"""
Gets our queryset. This takes care of filtering if there are any
fields to filter by.
"""
queryset = self.derive_queryset(**kwargs)
return self.order_queryset(queryset)
|
[
"def",
"get_queryset",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"queryset",
"=",
"self",
".",
"derive_queryset",
"(",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"order_queryset",
"(",
"queryset",
")"
] | 31.125 | 13.125 |
def connect(self, addr):
"""Initiate a connection request to the peer router."""
if _debug: RouterToRouterService._debug("connect %r", addr)
# make a connection
conn = ConnectionState(addr)
self.multiplexer.connections[addr] = conn
# associate with this service, but it is not connected until the ack comes back
conn.service = self
# keep a list of pending NPDU objects until the ack comes back
conn.pendingNPDU = []
# build a service request
request = ServiceRequest(ROUTER_TO_ROUTER_SERVICE_ID)
request.pduDestination = addr
# send it
self.service_request(request)
# return the connection object
return conn
|
[
"def",
"connect",
"(",
"self",
",",
"addr",
")",
":",
"if",
"_debug",
":",
"RouterToRouterService",
".",
"_debug",
"(",
"\"connect %r\"",
",",
"addr",
")",
"# make a connection",
"conn",
"=",
"ConnectionState",
"(",
"addr",
")",
"self",
".",
"multiplexer",
".",
"connections",
"[",
"addr",
"]",
"=",
"conn",
"# associate with this service, but it is not connected until the ack comes back",
"conn",
".",
"service",
"=",
"self",
"# keep a list of pending NPDU objects until the ack comes back",
"conn",
".",
"pendingNPDU",
"=",
"[",
"]",
"# build a service request",
"request",
"=",
"ServiceRequest",
"(",
"ROUTER_TO_ROUTER_SERVICE_ID",
")",
"request",
".",
"pduDestination",
"=",
"addr",
"# send it",
"self",
".",
"service_request",
"(",
"request",
")",
"# return the connection object",
"return",
"conn"
] | 31.347826 | 21.304348 |
def import_words_from_file(self,
inputfile: str,
is_diceware: bool) -> None:
"""Import words for the wordlist from a given file.
The file can have a single column with words or be diceware-like
(two columns).
Keyword arguments:
inputfile -- A string with the path to the wordlist file to load, or
the value 'internal' to load the internal one.
is_diceware -- True if the file is diceware-like.
"""
if not Aux.isfile_notempty(inputfile):
raise FileNotFoundError('Input file does not exists, is not valid '
'or is empty: {}'.format(inputfile))
self._wordlist_entropy_bits = None
if is_diceware:
self._wordlist = self._read_words_from_diceware(inputfile)
else:
self._wordlist = self._read_words_from_wordfile(inputfile)
|
[
"def",
"import_words_from_file",
"(",
"self",
",",
"inputfile",
":",
"str",
",",
"is_diceware",
":",
"bool",
")",
"->",
"None",
":",
"if",
"not",
"Aux",
".",
"isfile_notempty",
"(",
"inputfile",
")",
":",
"raise",
"FileNotFoundError",
"(",
"'Input file does not exists, is not valid '",
"'or is empty: {}'",
".",
"format",
"(",
"inputfile",
")",
")",
"self",
".",
"_wordlist_entropy_bits",
"=",
"None",
"if",
"is_diceware",
":",
"self",
".",
"_wordlist",
"=",
"self",
".",
"_read_words_from_diceware",
"(",
"inputfile",
")",
"else",
":",
"self",
".",
"_wordlist",
"=",
"self",
".",
"_read_words_from_wordfile",
"(",
"inputfile",
")"
] | 40.347826 | 22 |
def list_overview_fmt_gen(self):
"""Generator for the LIST OVERVIEW.FMT
See list_overview_fmt() for more information.
Yields:
An element in the list returned by list_overview_fmt().
"""
code, message = self.command("LIST OVERVIEW.FMT")
if code != 215:
raise NNTPReplyError(code, message)
for line in self.info_gen(code, message):
try:
name, suffix = line.rstrip().split(":")
except ValueError:
raise NNTPDataError("Invalid LIST OVERVIEW.FMT")
if suffix and not name:
name, suffix = suffix, name
if suffix and suffix != "full":
raise NNTPDataError("Invalid LIST OVERVIEW.FMT")
yield (name, suffix == "full")
|
[
"def",
"list_overview_fmt_gen",
"(",
"self",
")",
":",
"code",
",",
"message",
"=",
"self",
".",
"command",
"(",
"\"LIST OVERVIEW.FMT\"",
")",
"if",
"code",
"!=",
"215",
":",
"raise",
"NNTPReplyError",
"(",
"code",
",",
"message",
")",
"for",
"line",
"in",
"self",
".",
"info_gen",
"(",
"code",
",",
"message",
")",
":",
"try",
":",
"name",
",",
"suffix",
"=",
"line",
".",
"rstrip",
"(",
")",
".",
"split",
"(",
"\":\"",
")",
"except",
"ValueError",
":",
"raise",
"NNTPDataError",
"(",
"\"Invalid LIST OVERVIEW.FMT\"",
")",
"if",
"suffix",
"and",
"not",
"name",
":",
"name",
",",
"suffix",
"=",
"suffix",
",",
"name",
"if",
"suffix",
"and",
"suffix",
"!=",
"\"full\"",
":",
"raise",
"NNTPDataError",
"(",
"\"Invalid LIST OVERVIEW.FMT\"",
")",
"yield",
"(",
"name",
",",
"suffix",
"==",
"\"full\"",
")"
] | 36 | 16.045455 |
def distance_calc(s1, s2):
"""
Calculate Levenshtein distance between two words.
:param s1: first word
:type s1 : str
:param s2: second word
:type s2 : str
:return: distance between two word
References :
1- https://stackoverflow.com/questions/2460177/edit-distance-in-python
2- https://en.wikipedia.org/wiki/Levenshtein_distance
"""
if len(s1) > len(s2):
s1, s2 = s2, s1
distances = range(len(s1) + 1)
for i2, c2 in enumerate(s2):
distances_ = [i2 + 1]
for i1, c1 in enumerate(s1):
if c1 == c2:
distances_.append(distances[i1])
else:
distances_.append(
1 + min((distances[i1], distances[i1 + 1], distances_[-1])))
distances = distances_
return distances[-1]
|
[
"def",
"distance_calc",
"(",
"s1",
",",
"s2",
")",
":",
"if",
"len",
"(",
"s1",
")",
">",
"len",
"(",
"s2",
")",
":",
"s1",
",",
"s2",
"=",
"s2",
",",
"s1",
"distances",
"=",
"range",
"(",
"len",
"(",
"s1",
")",
"+",
"1",
")",
"for",
"i2",
",",
"c2",
"in",
"enumerate",
"(",
"s2",
")",
":",
"distances_",
"=",
"[",
"i2",
"+",
"1",
"]",
"for",
"i1",
",",
"c1",
"in",
"enumerate",
"(",
"s1",
")",
":",
"if",
"c1",
"==",
"c2",
":",
"distances_",
".",
"append",
"(",
"distances",
"[",
"i1",
"]",
")",
"else",
":",
"distances_",
".",
"append",
"(",
"1",
"+",
"min",
"(",
"(",
"distances",
"[",
"i1",
"]",
",",
"distances",
"[",
"i1",
"+",
"1",
"]",
",",
"distances_",
"[",
"-",
"1",
"]",
")",
")",
")",
"distances",
"=",
"distances_",
"return",
"distances",
"[",
"-",
"1",
"]"
] | 28.607143 | 17.035714 |
def _set_arp_entry(self, v, load=False):
"""
Setter method for arp_entry, mapped from YANG variable /rbridge_id/arp_entry (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_arp_entry is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_arp_entry() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("arp_ip_address",arp_entry.arp_entry, yang_name="arp-entry", rest_name="arp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='arp-ip-address', extensions={u'tailf-common': {u'info': u'Address Resolution Protocol (ARP)', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'arp', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'ArpStaticConfigCallpoint'}}), is_container='list', yang_name="arp-entry", rest_name="arp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Address Resolution Protocol (ARP)', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'arp', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'ArpStaticConfigCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-arp', defining_module='brocade-arp', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """arp_entry must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("arp_ip_address",arp_entry.arp_entry, yang_name="arp-entry", rest_name="arp", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='arp-ip-address', extensions={u'tailf-common': {u'info': u'Address Resolution Protocol (ARP)', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'arp', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'ArpStaticConfigCallpoint'}}), is_container='list', yang_name="arp-entry", rest_name="arp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Address Resolution Protocol (ARP)', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'arp', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'ArpStaticConfigCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-arp', defining_module='brocade-arp', yang_type='list', is_config=True)""",
})
self.__arp_entry = t
if hasattr(self, '_set'):
self._set()
|
[
"def",
"_set_arp_entry",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
"=",
"YANGListType",
"(",
"\"arp_ip_address\"",
",",
"arp_entry",
".",
"arp_entry",
",",
"yang_name",
"=",
"\"arp-entry\"",
",",
"rest_name",
"=",
"\"arp\"",
",",
"parent",
"=",
"self",
",",
"is_container",
"=",
"'list'",
",",
"user_ordered",
"=",
"False",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"yang_keys",
"=",
"'arp-ip-address'",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'info'",
":",
"u'Address Resolution Protocol (ARP)'",
",",
"u'cli-suppress-mode'",
":",
"None",
",",
"u'cli-suppress-list-no'",
":",
"None",
",",
"u'alt-name'",
":",
"u'arp'",
",",
"u'cli-compact-syntax'",
":",
"None",
",",
"u'cli-sequence-commands'",
":",
"None",
",",
"u'cli-incomplete-command'",
":",
"None",
",",
"u'callpoint'",
":",
"u'ArpStaticConfigCallpoint'",
"}",
"}",
")",
",",
"is_container",
"=",
"'list'",
",",
"yang_name",
"=",
"\"arp-entry\"",
",",
"rest_name",
"=",
"\"arp\"",
",",
"parent",
"=",
"self",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"extmethods",
"=",
"self",
".",
"_extmethods",
",",
"register_paths",
"=",
"True",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'info'",
":",
"u'Address Resolution Protocol (ARP)'",
",",
"u'cli-suppress-mode'",
":",
"None",
",",
"u'cli-suppress-list-no'",
":",
"None",
",",
"u'alt-name'",
":",
"u'arp'",
",",
"u'cli-compact-syntax'",
":",
"None",
",",
"u'cli-sequence-commands'",
":",
"None",
",",
"u'cli-incomplete-command'",
":",
"None",
",",
"u'callpoint'",
":",
"u'ArpStaticConfigCallpoint'",
"}",
"}",
",",
"namespace",
"=",
"'urn:brocade.com:mgmt:brocade-arp'",
",",
"defining_module",
"=",
"'brocade-arp'",
",",
"yang_type",
"=",
"'list'",
",",
"is_config",
"=",
"True",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"ValueError",
"(",
"{",
"'error-string'",
":",
"\"\"\"arp_entry must be of a type compatible with list\"\"\"",
",",
"'defined-type'",
":",
"\"list\"",
",",
"'generated-type'",
":",
"\"\"\"YANGDynClass(base=YANGListType(\"arp_ip_address\",arp_entry.arp_entry, yang_name=\"arp-entry\", rest_name=\"arp\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='arp-ip-address', extensions={u'tailf-common': {u'info': u'Address Resolution Protocol (ARP)', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'arp', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'ArpStaticConfigCallpoint'}}), is_container='list', yang_name=\"arp-entry\", rest_name=\"arp\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Address Resolution Protocol (ARP)', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'alt-name': u'arp', u'cli-compact-syntax': None, u'cli-sequence-commands': None, u'cli-incomplete-command': None, u'callpoint': u'ArpStaticConfigCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-arp', defining_module='brocade-arp', yang_type='list', is_config=True)\"\"\"",
",",
"}",
")",
"self",
".",
"__arp_entry",
"=",
"t",
"if",
"hasattr",
"(",
"self",
",",
"'_set'",
")",
":",
"self",
".",
"_set",
"(",
")"
] | 131.772727 | 63.227273 |
def export_envars(self, env):
"""Export the environment variables contained in the dict env."""
for k, v in env.items():
self.export_envar(k, v)
|
[
"def",
"export_envars",
"(",
"self",
",",
"env",
")",
":",
"for",
"k",
",",
"v",
"in",
"env",
".",
"items",
"(",
")",
":",
"self",
".",
"export_envar",
"(",
"k",
",",
"v",
")"
] | 42.25 | 6 |
def from_api_repr(cls, resource):
"""Factory: construct instance from the JSON repr.
:type resource: dict
:param resource: JSON representation of the entry
:rtype: :class:`QueryPlanEntryStep`
:return: new instance built from the resource
"""
return cls(kind=resource.get("kind"), substeps=resource.get("substeps", ()))
|
[
"def",
"from_api_repr",
"(",
"cls",
",",
"resource",
")",
":",
"return",
"cls",
"(",
"kind",
"=",
"resource",
".",
"get",
"(",
"\"kind\"",
")",
",",
"substeps",
"=",
"resource",
".",
"get",
"(",
"\"substeps\"",
",",
"(",
")",
")",
")"
] | 36.7 | 17.6 |
def getIfStats(self):
"""Return dictionary of Traffic Stats for Network Interfaces.
@return: Nested dictionary of statistics for each interface.
"""
info_dict = {}
try:
fp = open(ifaceStatsFile, 'r')
data = fp.read()
fp.close()
except:
raise IOError('Failed reading interface stats from file: %s'
% ifaceStatsFile)
for line in data.splitlines():
mobj = re.match('^\s*([\w\d:]+):\s*(.*\S)\s*$', line)
if mobj:
iface = mobj.group(1)
statline = mobj.group(2)
info_dict[iface] = dict(zip(
('rxbytes', 'rxpackets', 'rxerrs', 'rxdrop', 'rxfifo',
'rxframe', 'rxcompressed', 'rxmulticast',
'txbytes', 'txpackets', 'txerrs', 'txdrop', 'txfifo',
'txcolls', 'txcarrier', 'txcompressed'),
[int(x) for x in statline.split()]))
return info_dict
|
[
"def",
"getIfStats",
"(",
"self",
")",
":",
"info_dict",
"=",
"{",
"}",
"try",
":",
"fp",
"=",
"open",
"(",
"ifaceStatsFile",
",",
"'r'",
")",
"data",
"=",
"fp",
".",
"read",
"(",
")",
"fp",
".",
"close",
"(",
")",
"except",
":",
"raise",
"IOError",
"(",
"'Failed reading interface stats from file: %s'",
"%",
"ifaceStatsFile",
")",
"for",
"line",
"in",
"data",
".",
"splitlines",
"(",
")",
":",
"mobj",
"=",
"re",
".",
"match",
"(",
"'^\\s*([\\w\\d:]+):\\s*(.*\\S)\\s*$'",
",",
"line",
")",
"if",
"mobj",
":",
"iface",
"=",
"mobj",
".",
"group",
"(",
"1",
")",
"statline",
"=",
"mobj",
".",
"group",
"(",
"2",
")",
"info_dict",
"[",
"iface",
"]",
"=",
"dict",
"(",
"zip",
"(",
"(",
"'rxbytes'",
",",
"'rxpackets'",
",",
"'rxerrs'",
",",
"'rxdrop'",
",",
"'rxfifo'",
",",
"'rxframe'",
",",
"'rxcompressed'",
",",
"'rxmulticast'",
",",
"'txbytes'",
",",
"'txpackets'",
",",
"'txerrs'",
",",
"'txdrop'",
",",
"'txfifo'",
",",
"'txcolls'",
",",
"'txcarrier'",
",",
"'txcompressed'",
")",
",",
"[",
"int",
"(",
"x",
")",
"for",
"x",
"in",
"statline",
".",
"split",
"(",
")",
"]",
")",
")",
"return",
"info_dict"
] | 39.111111 | 17.259259 |
def kron_dot(A, B, C, out=None):
r""" Kronecker product followed by dot product.
Let :math:`\mathrm A`, :math:`\mathrm B`, and :math:`\mathrm C` be matrices of
dimensions :math:`p\times p`, :math:`n\times d`, and :math:`d\times p`.
It computes
.. math::
\text{unvec}((\mathrm A\otimes\mathrm B)\text{vec}(\mathrm C))
\in n\times p,
which is equivalent to :math:`\mathrm B\mathrm C\mathrm A^{\intercal}`.
Parameters
----------
A : array_like
Matrix A.
B : array_like
Matrix B.
C : array_like
Matrix C.
out : :class:`numpy.ndarray`, optional
Copy result to. Defaults to ``None``.
Returns
-------
:class:`numpy.ndarray`
unvec((A ⊗ B) vec(C))
"""
from numpy import dot, zeros, asarray
A = asarray(A)
B = asarray(B)
C = asarray(C)
if out is None:
out = zeros((B.shape[0], A.shape[0]))
dot(B, dot(C, A.T), out=out)
return out
|
[
"def",
"kron_dot",
"(",
"A",
",",
"B",
",",
"C",
",",
"out",
"=",
"None",
")",
":",
"from",
"numpy",
"import",
"dot",
",",
"zeros",
",",
"asarray",
"A",
"=",
"asarray",
"(",
"A",
")",
"B",
"=",
"asarray",
"(",
"B",
")",
"C",
"=",
"asarray",
"(",
"C",
")",
"if",
"out",
"is",
"None",
":",
"out",
"=",
"zeros",
"(",
"(",
"B",
".",
"shape",
"[",
"0",
"]",
",",
"A",
".",
"shape",
"[",
"0",
"]",
")",
")",
"dot",
"(",
"B",
",",
"dot",
"(",
"C",
",",
"A",
".",
"T",
")",
",",
"out",
"=",
"out",
")",
"return",
"out"
] | 23.02439 | 24 |
def is_constraint(self):
"""
returns the expression of the constraint that constrains this parameter
"""
if self._is_constraint is None:
return None
return self._bundle.get_parameter(context='constraint', uniqueid=self._is_constraint)
|
[
"def",
"is_constraint",
"(",
"self",
")",
":",
"if",
"self",
".",
"_is_constraint",
"is",
"None",
":",
"return",
"None",
"return",
"self",
".",
"_bundle",
".",
"get_parameter",
"(",
"context",
"=",
"'constraint'",
",",
"uniqueid",
"=",
"self",
".",
"_is_constraint",
")"
] | 40 | 18 |
def load_exon_bulk(self, exon_objs):
"""Load a bulk of exon objects to the database
Arguments:
exon_objs(iterable(scout.models.hgnc_exon))
"""
try:
result = self.exon_collection.insert_many(transcript_objs)
except (DuplicateKeyError, BulkWriteError) as err:
raise IntegrityError(err)
return result
|
[
"def",
"load_exon_bulk",
"(",
"self",
",",
"exon_objs",
")",
":",
"try",
":",
"result",
"=",
"self",
".",
"exon_collection",
".",
"insert_many",
"(",
"transcript_objs",
")",
"except",
"(",
"DuplicateKeyError",
",",
"BulkWriteError",
")",
"as",
"err",
":",
"raise",
"IntegrityError",
"(",
"err",
")",
"return",
"result"
] | 29.230769 | 19.307692 |
def run_and_measure(self, quil_program: Program, qubits: List[int] = None, trials: int = 1,
memory_map: Any = None) -> np.ndarray:
"""
Run a Quil program once to determine the final wavefunction, and measure multiple times.
Alternatively, consider using ``wavefunction`` and calling ``sample_bitstrings`` on the
resulting object.
For a large wavefunction and a low-medium number of trials, use this function.
On the other hand, if you're sampling a small system many times you might want to
use ``Wavefunction.sample_bitstrings``.
.. note:: If your program contains measurements or noisy gates, this method may not do what
you want. If the execution of ``quil_program`` is **non-deterministic** then the
final wavefunction from which the returned bitstrings are sampled itself only
represents a stochastically generated sample and the outcomes sampled from
*different* ``run_and_measure`` calls *generally sample different bitstring
distributions*.
:param quil_program: The program to run and measure
:param qubits: An optional list of qubits to measure. The order of this list is
respected in the returned bitstrings. If not provided, all qubits used in
the program will be measured and returned in their sorted order.
:param int trials: Number of times to sample from the prepared wavefunction.
:param memory_map: An assignment of classical registers to values, representing an initial
state for the QAM's classical memory.
This is expected to be of type Dict[str, List[Union[int, float]]],
where the keys are memory region names and the values are arrays of
initialization data.
For now, we also support input of type Dict[MemoryReference, Any],
but this is deprecated and will be removed in a future release.
:return: An array of measurement results (0 or 1) of shape (trials, len(qubits))
"""
if qubits is None:
qubits = sorted(quil_program.get_qubits(indices=True))
if memory_map is not None:
quil_program = self.augment_program_with_memory_values(quil_program, memory_map)
return self.connection._run_and_measure(quil_program=quil_program, qubits=qubits,
trials=trials,
random_seed=self.random_seed)
|
[
"def",
"run_and_measure",
"(",
"self",
",",
"quil_program",
":",
"Program",
",",
"qubits",
":",
"List",
"[",
"int",
"]",
"=",
"None",
",",
"trials",
":",
"int",
"=",
"1",
",",
"memory_map",
":",
"Any",
"=",
"None",
")",
"->",
"np",
".",
"ndarray",
":",
"if",
"qubits",
"is",
"None",
":",
"qubits",
"=",
"sorted",
"(",
"quil_program",
".",
"get_qubits",
"(",
"indices",
"=",
"True",
")",
")",
"if",
"memory_map",
"is",
"not",
"None",
":",
"quil_program",
"=",
"self",
".",
"augment_program_with_memory_values",
"(",
"quil_program",
",",
"memory_map",
")",
"return",
"self",
".",
"connection",
".",
"_run_and_measure",
"(",
"quil_program",
"=",
"quil_program",
",",
"qubits",
"=",
"qubits",
",",
"trials",
"=",
"trials",
",",
"random_seed",
"=",
"self",
".",
"random_seed",
")"
] | 59.022727 | 37.068182 |
def safe_file_dump(filename, payload='', mode='w'):
"""Write a string to a file.
This method is "safe" to the extent that `safe_open` is "safe". See the explanation on the method
doc there.
When `payload` is an empty string (the default), this method can be used as a concise way to
create an empty file along with its containing directory (or truncate it if it already exists).
:param string filename: The filename of the file to write to.
:param string payload: The string to write to the file.
:param string mode: A mode argument for the python `open` builtin which should be a write mode
variant. Defaults to 'w'.
"""
with safe_open(filename, mode=mode) as f:
f.write(payload)
|
[
"def",
"safe_file_dump",
"(",
"filename",
",",
"payload",
"=",
"''",
",",
"mode",
"=",
"'w'",
")",
":",
"with",
"safe_open",
"(",
"filename",
",",
"mode",
"=",
"mode",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"payload",
")"
] | 44.625 | 28.4375 |
def identify_image(image):
"""Provides a tuple of image's UNIQUE_IMAGE_ATTRIBUTES. Note:
this is not guaranteed to be unique (and will often not be)
for pre-1.1 metadata, as subvariant did not exist. Provided as
a function so consumers can use it on plain image dicts read from
the metadata or PDC.
"""
try:
# Image instance case
attrs = tuple(getattr(image, attr) for attr in UNIQUE_IMAGE_ATTRIBUTES)
except AttributeError:
# Plain dict case
attrs = tuple(image.get(attr, None) for attr in UNIQUE_IMAGE_ATTRIBUTES)
ui = UniqueImage(*attrs)
# If unified is None (which could happen in the dict case, we want default
# value of False instead. Also convert additional_variants to a list.
return ui._replace(
unified=ui.unified or False, additional_variants=ui.additional_variants or []
)
|
[
"def",
"identify_image",
"(",
"image",
")",
":",
"try",
":",
"# Image instance case",
"attrs",
"=",
"tuple",
"(",
"getattr",
"(",
"image",
",",
"attr",
")",
"for",
"attr",
"in",
"UNIQUE_IMAGE_ATTRIBUTES",
")",
"except",
"AttributeError",
":",
"# Plain dict case",
"attrs",
"=",
"tuple",
"(",
"image",
".",
"get",
"(",
"attr",
",",
"None",
")",
"for",
"attr",
"in",
"UNIQUE_IMAGE_ATTRIBUTES",
")",
"ui",
"=",
"UniqueImage",
"(",
"*",
"attrs",
")",
"# If unified is None (which could happen in the dict case, we want default",
"# value of False instead. Also convert additional_variants to a list.",
"return",
"ui",
".",
"_replace",
"(",
"unified",
"=",
"ui",
".",
"unified",
"or",
"False",
",",
"additional_variants",
"=",
"ui",
".",
"additional_variants",
"or",
"[",
"]",
")"
] | 45.210526 | 23.105263 |
def bend_rounded(Di, angle, fd=None, rc=None, bend_diameters=5.0,
Re=None, roughness=0.0, L_unimpeded=None, method='Rennels'):
r'''Returns loss coefficient for rounded bend in a pipe of diameter `Di`,
`angle`, with a specified either radius of curvature `rc` or curvature
defined by `bend_diameters`, Reynolds number `Re` and optionally pipe
roughness, unimpeded length downstrean, and with the specified method.
This calculation has five methods available.
It is hard to describe one method as more conservative than another as
depending on the conditions, the relative results change significantly.
The 'Miller' method is the most complicated and slowest method; the 'Ito'
method comprehensive as well and a source of original data, and the primary
basis for the 'Rennels' method. The 'Swamee' method is very simple and
generally does not match the other methods. The 'Crane' method may match
or not match other methods depending on the inputs.
The Rennels [1]_ formula is:
.. math::
K = f\alpha\frac{r}{d} + (0.10 + 2.4f)\sin(\alpha/2)
+ \frac{6.6f(\sqrt{\sin(\alpha/2)}+\sin(\alpha/2))}
{(r/d)^{\frac{4\alpha}{\pi}}}
The Swamee [5]_ formula is:
.. math::
K = \left[0.0733 + 0.923 \left(\frac{d}{rc}\right)^{3.5} \right]
\theta^{0.5}
.. figure:: fittings/bend_rounded.png
:scale: 30 %
:alt: rounded bend; after [1]_
Parameters
----------
Di : float
Inside diameter of pipe, [m]
angle : float
Angle of bend, [degrees]
fd : float, optional
Darcy friction factor; used only in Rennels method; calculated if not
provided from Reynolds number, diameter, and roughness [-]
rc : float, optional
Radius of curvature of the entrance, optional [m]
bend_diameters : float, optional (used if rc not provided)
Number of diameters of pipe making up the bend radius [-]
Re : float, optional
Reynolds number of the pipe (used in Miller, Ito methods primarily, and
Rennels method if no friction factor given), [m]
roughness : float, optional
Roughness of bend wall (used in Miller, Ito methods primarily, and
Rennels method if no friction factor given), [m]
L_unimpeded : float, optional
The length of unimpeded pipe without any fittings, instrumentation,
or flow disturbances downstream (assumed 20 diameters if not
specified); used only in Miller method, [m]
method : str, optional
One of 'Rennels', 'Miller', 'Crane', 'Ito', or 'Swamee', [-]
Returns
-------
K : float
Loss coefficient [-]
Notes
-----
When inputting bend diameters, note that manufacturers often specify
this as a multiplier of nominal diameter, which is different than actual
diameter. Those require that rc be specified.
In the 'Rennels' method, `rc` is limited to 0.5 or above; which represents
a sharp, square, inner edge - and an outer bend radius of 1.0. Losses are
at a minimum when this value is large. Its first term represents surface
friction loss; the second, secondary flows; and the third, flow separation.
It encompasses the entire range of elbow and pipe bend configurations.
It was developed for bend angles between 0 and 180 degrees; and r/D
ratios above 0.5. Only smooth pipe data was used in its development.
Note the loss coefficient includes the surface friction of the pipe as if
it was straight.
Examples
--------
>>> bend_rounded(Di=4.020, rc=4.0*5, angle=30, Re=1E5)
0.11519070808085191
References
----------
.. [1] Rennels, Donald C., and Hobart M. Hudson. Pipe Flow: A Practical
and Comprehensive Guide. 1st edition. Hoboken, N.J: Wiley, 2012.
.. [2] Miller, Donald S. Internal Flow Systems: Design and Performance
Prediction. Gulf Publishing Company, 1990.
.. [3] Crane Co. Flow of Fluids Through Valves, Fittings, and Pipe. Crane,
2009.
.. [4] Swamee, Prabhata K., and Ashok K. Sharma. Design of Water Supply
Pipe Networks. John Wiley & Sons, 2008.
.. [5] Itō, H."Pressure Losses in Smooth Pipe Bends." Journal of Fluids
Engineering 82, no. 1 (March 1, 1960): 131-40. doi:10.1115/1.3662501
.. [6] Blevins, Robert D. Applied Fluid Dynamics Handbook. New York, N.Y.:
Van Nostrand Reinhold Co., 1984.
'''
if method is None:
method = 'Rennels'
if rc is None:
rc = Di*bend_diameters
if method == 'Rennels':
angle = radians(angle)
if fd is None:
if Re is None:
raise ValueError("The `Rennels` method requires either a "
"specified friction factor or `Re`")
fd = Colebrook(Re=Re, eD=roughness/Di, tol=-1)
sin_term = sin(0.5*angle)
return (fd*angle*rc/Di + (0.10 + 2.4*fd)*sin_term
+ 6.6*fd*(sin_term**0.5 + sin_term)/(rc/Di)**(4.*angle/pi))
elif method == 'Miller':
if Re is None:
raise ValueError('Miller method requires Reynolds number')
return bend_rounded_Miller(Di=Di, angle=angle, Re=Re, rc=rc,
bend_diameters=bend_diameters,
roughness=roughness,
L_unimpeded=L_unimpeded)
elif method == 'Crane':
return bend_rounded_Crane(Di=Di, angle=angle, rc=rc,
bend_diameters=bend_diameters)
elif method == 'Ito':
if Re is None:
raise ValueError("The `Iso` method requires`Re`")
return bend_rounded_Ito(Di=Di, angle=angle, Re=Re, rc=rc, bend_diameters=bend_diameters,
roughness=roughness)
elif method == 'Swamee':
return (0.0733 + 0.923*(Di/rc)**3.5)*radians(angle)**0.5
else:
raise ValueError('Specified method not recognized; methods are %s'
%(bend_rounded_methods))
|
[
"def",
"bend_rounded",
"(",
"Di",
",",
"angle",
",",
"fd",
"=",
"None",
",",
"rc",
"=",
"None",
",",
"bend_diameters",
"=",
"5.0",
",",
"Re",
"=",
"None",
",",
"roughness",
"=",
"0.0",
",",
"L_unimpeded",
"=",
"None",
",",
"method",
"=",
"'Rennels'",
")",
":",
"if",
"method",
"is",
"None",
":",
"method",
"=",
"'Rennels'",
"if",
"rc",
"is",
"None",
":",
"rc",
"=",
"Di",
"*",
"bend_diameters",
"if",
"method",
"==",
"'Rennels'",
":",
"angle",
"=",
"radians",
"(",
"angle",
")",
"if",
"fd",
"is",
"None",
":",
"if",
"Re",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"The `Rennels` method requires either a \"",
"\"specified friction factor or `Re`\"",
")",
"fd",
"=",
"Colebrook",
"(",
"Re",
"=",
"Re",
",",
"eD",
"=",
"roughness",
"/",
"Di",
",",
"tol",
"=",
"-",
"1",
")",
"sin_term",
"=",
"sin",
"(",
"0.5",
"*",
"angle",
")",
"return",
"(",
"fd",
"*",
"angle",
"*",
"rc",
"/",
"Di",
"+",
"(",
"0.10",
"+",
"2.4",
"*",
"fd",
")",
"*",
"sin_term",
"+",
"6.6",
"*",
"fd",
"*",
"(",
"sin_term",
"**",
"0.5",
"+",
"sin_term",
")",
"/",
"(",
"rc",
"/",
"Di",
")",
"**",
"(",
"4.",
"*",
"angle",
"/",
"pi",
")",
")",
"elif",
"method",
"==",
"'Miller'",
":",
"if",
"Re",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Miller method requires Reynolds number'",
")",
"return",
"bend_rounded_Miller",
"(",
"Di",
"=",
"Di",
",",
"angle",
"=",
"angle",
",",
"Re",
"=",
"Re",
",",
"rc",
"=",
"rc",
",",
"bend_diameters",
"=",
"bend_diameters",
",",
"roughness",
"=",
"roughness",
",",
"L_unimpeded",
"=",
"L_unimpeded",
")",
"elif",
"method",
"==",
"'Crane'",
":",
"return",
"bend_rounded_Crane",
"(",
"Di",
"=",
"Di",
",",
"angle",
"=",
"angle",
",",
"rc",
"=",
"rc",
",",
"bend_diameters",
"=",
"bend_diameters",
")",
"elif",
"method",
"==",
"'Ito'",
":",
"if",
"Re",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"The `Iso` method requires`Re`\"",
")",
"return",
"bend_rounded_Ito",
"(",
"Di",
"=",
"Di",
",",
"angle",
"=",
"angle",
",",
"Re",
"=",
"Re",
",",
"rc",
"=",
"rc",
",",
"bend_diameters",
"=",
"bend_diameters",
",",
"roughness",
"=",
"roughness",
")",
"elif",
"method",
"==",
"'Swamee'",
":",
"return",
"(",
"0.0733",
"+",
"0.923",
"*",
"(",
"Di",
"/",
"rc",
")",
"**",
"3.5",
")",
"*",
"radians",
"(",
"angle",
")",
"**",
"0.5",
"else",
":",
"raise",
"ValueError",
"(",
"'Specified method not recognized; methods are %s'",
"%",
"(",
"bend_rounded_methods",
")",
")"
] | 44.2 | 24.955556 |
def save_graph(cn_topo, filename, showintfs=False, showaddrs=False):
'''
Save the topology to an image file
'''
__do_draw(cn_topo, showintfs=showintfs, showaddrs=showaddrs)
pyp.savefig(filename)
|
[
"def",
"save_graph",
"(",
"cn_topo",
",",
"filename",
",",
"showintfs",
"=",
"False",
",",
"showaddrs",
"=",
"False",
")",
":",
"__do_draw",
"(",
"cn_topo",
",",
"showintfs",
"=",
"showintfs",
",",
"showaddrs",
"=",
"showaddrs",
")",
"pyp",
".",
"savefig",
"(",
"filename",
")"
] | 35 | 22.333333 |
def send_action(action, params=None):
"""
helper method for sending actions
"""
data={"msg_type":"action", "action":action}
if params is not None:
data['params']=params
_comm.send(data)
|
[
"def",
"send_action",
"(",
"action",
",",
"params",
"=",
"None",
")",
":",
"data",
"=",
"{",
"\"msg_type\"",
":",
"\"action\"",
",",
"\"action\"",
":",
"action",
"}",
"if",
"params",
"is",
"not",
"None",
":",
"data",
"[",
"'params'",
"]",
"=",
"params",
"_comm",
".",
"send",
"(",
"data",
")"
] | 23.25 | 8.75 |
def encode(self, sequence):
"""Returns a tuple (binary reprensentation, default sequence, polymorphisms list)"""
polymorphisms = []
defaultSequence = ''
binSequence = array.array(self.forma.typecode)
b = 0
i = 0
trueI = 0 #not inc in case if poly
poly = set()
while i < len(sequence)-1:
b = b | self.forma[self.charToBin[sequence[i]]]
if sequence[i+1] == '/' :
poly.add(sequence[i])
i += 2
else :
binSequence.append(b)
if len(poly) > 0 :
poly.add(sequence[i])
polymorphisms.append((trueI, poly))
poly = set()
bb = 0
while b % 2 != 0 :
b = b/2
defaultSequence += sequence[i]
b = 0
i += 1
trueI += 1
if i < len(sequence) :
b = b | self.forma[self.charToBin[sequence[i]]]
binSequence.append(b)
if len(poly) > 0 :
if sequence[i] not in poly :
poly.add(sequence[i])
polymorphisms.append((trueI, poly))
defaultSequence += sequence[i]
return (binSequence, defaultSequence, polymorphisms)
|
[
"def",
"encode",
"(",
"self",
",",
"sequence",
")",
":",
"polymorphisms",
"=",
"[",
"]",
"defaultSequence",
"=",
"''",
"binSequence",
"=",
"array",
".",
"array",
"(",
"self",
".",
"forma",
".",
"typecode",
")",
"b",
"=",
"0",
"i",
"=",
"0",
"trueI",
"=",
"0",
"#not inc in case if poly",
"poly",
"=",
"set",
"(",
")",
"while",
"i",
"<",
"len",
"(",
"sequence",
")",
"-",
"1",
":",
"b",
"=",
"b",
"|",
"self",
".",
"forma",
"[",
"self",
".",
"charToBin",
"[",
"sequence",
"[",
"i",
"]",
"]",
"]",
"if",
"sequence",
"[",
"i",
"+",
"1",
"]",
"==",
"'/'",
":",
"poly",
".",
"add",
"(",
"sequence",
"[",
"i",
"]",
")",
"i",
"+=",
"2",
"else",
":",
"binSequence",
".",
"append",
"(",
"b",
")",
"if",
"len",
"(",
"poly",
")",
">",
"0",
":",
"poly",
".",
"add",
"(",
"sequence",
"[",
"i",
"]",
")",
"polymorphisms",
".",
"append",
"(",
"(",
"trueI",
",",
"poly",
")",
")",
"poly",
"=",
"set",
"(",
")",
"bb",
"=",
"0",
"while",
"b",
"%",
"2",
"!=",
"0",
":",
"b",
"=",
"b",
"/",
"2",
"defaultSequence",
"+=",
"sequence",
"[",
"i",
"]",
"b",
"=",
"0",
"i",
"+=",
"1",
"trueI",
"+=",
"1",
"if",
"i",
"<",
"len",
"(",
"sequence",
")",
":",
"b",
"=",
"b",
"|",
"self",
".",
"forma",
"[",
"self",
".",
"charToBin",
"[",
"sequence",
"[",
"i",
"]",
"]",
"]",
"binSequence",
".",
"append",
"(",
"b",
")",
"if",
"len",
"(",
"poly",
")",
">",
"0",
":",
"if",
"sequence",
"[",
"i",
"]",
"not",
"in",
"poly",
":",
"poly",
".",
"add",
"(",
"sequence",
"[",
"i",
"]",
")",
"polymorphisms",
".",
"append",
"(",
"(",
"trueI",
",",
"poly",
")",
")",
"defaultSequence",
"+=",
"sequence",
"[",
"i",
"]",
"return",
"(",
"binSequence",
",",
"defaultSequence",
",",
"polymorphisms",
")"
] | 23.829268 | 19.341463 |
def _exit_gracefully(self, signum, frame):
"""
Helper method to clean up DAG file processors to avoid leaving orphan processes.
"""
self.log.info("Exiting gracefully upon receiving signal %s", signum)
self.terminate()
self.end()
self.log.debug("Finished terminating DAG processors.")
sys.exit(os.EX_OK)
|
[
"def",
"_exit_gracefully",
"(",
"self",
",",
"signum",
",",
"frame",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Exiting gracefully upon receiving signal %s\"",
",",
"signum",
")",
"self",
".",
"terminate",
"(",
")",
"self",
".",
"end",
"(",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"Finished terminating DAG processors.\"",
")",
"sys",
".",
"exit",
"(",
"os",
".",
"EX_OK",
")"
] | 39.777778 | 17.777778 |
def check_dependencies():
"""
Check that dependencies are installed:
- require git 2.7+, so that credential-cache--daemon ignores SIGHUP
https://github.com/git/git/blob/v2.7.0/credential-cache--daemon.c
"""
# Check that git is installed
if not shutil.which("git"):
raise Error(_("You don't have git. Install git, then re-run!"))
# Check that git --version > 2.7
version = subprocess.check_output(["git", "--version"]).decode("utf-8")
matches = re.search(r"^git version (\d+\.\d+\.\d+).*$", version)
if not matches or pkg_resources.parse_version(matches.group(1)) < pkg_resources.parse_version("2.7.0"):
raise Error(_("You have an old version of git. Install version 2.7 or later, then re-run!"))
|
[
"def",
"check_dependencies",
"(",
")",
":",
"# Check that git is installed",
"if",
"not",
"shutil",
".",
"which",
"(",
"\"git\"",
")",
":",
"raise",
"Error",
"(",
"_",
"(",
"\"You don't have git. Install git, then re-run!\"",
")",
")",
"# Check that git --version > 2.7",
"version",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"\"git\"",
",",
"\"--version\"",
"]",
")",
".",
"decode",
"(",
"\"utf-8\"",
")",
"matches",
"=",
"re",
".",
"search",
"(",
"r\"^git version (\\d+\\.\\d+\\.\\d+).*$\"",
",",
"version",
")",
"if",
"not",
"matches",
"or",
"pkg_resources",
".",
"parse_version",
"(",
"matches",
".",
"group",
"(",
"1",
")",
")",
"<",
"pkg_resources",
".",
"parse_version",
"(",
"\"2.7.0\"",
")",
":",
"raise",
"Error",
"(",
"_",
"(",
"\"You have an old version of git. Install version 2.7 or later, then re-run!\"",
")",
")"
] | 46.625 | 25.125 |
def redhat_release(rh_release, un):
"""
.. warning::
This combiner methode is deprecated, please use
:py:class:`insights.combiners.redhat_release.RedHatRelease` instead.
Combiner method to check uname and redhat-release for rhel major/minor
version.
Prefer uname to redhat-release.
Returns:
Release: A named tuple with the following items:
- major: integer
- minor: integer
Raises:
ParseException: If the version can't be determined even though a Uname
or RedhatRelease was provided.
Examples:
>>> rh_release.major
7
>>> rh_release.minor
2
>>> rh_release
Release(major=7, minor=2)
"""
deprecated(redhat_release, "Use the `RedHatRelease` class instead.")
if un and un.release_tuple[0] != -1:
return Release(*un.release_tuple)
if rh_release:
return Release(rh_release.major, rh_release.minor)
raise ParseException("Unabled to determine release.")
|
[
"def",
"redhat_release",
"(",
"rh_release",
",",
"un",
")",
":",
"deprecated",
"(",
"redhat_release",
",",
"\"Use the `RedHatRelease` class instead.\"",
")",
"if",
"un",
"and",
"un",
".",
"release_tuple",
"[",
"0",
"]",
"!=",
"-",
"1",
":",
"return",
"Release",
"(",
"*",
"un",
".",
"release_tuple",
")",
"if",
"rh_release",
":",
"return",
"Release",
"(",
"rh_release",
".",
"major",
",",
"rh_release",
".",
"minor",
")",
"raise",
"ParseException",
"(",
"\"Unabled to determine release.\"",
")"
] | 25.641026 | 23.384615 |
def get_historic_trades(self, start, end, granularity, product_id='BTC-USD'):
"""`<https://docs.exchange.coinbase.com/#get-historic-rates>`_
:param start: either datetime.datetime or str in ISO 8601
:param end: either datetime.datetime or str in ISO 8601
:pram int granularity: desired timeslice in seconds
:returns: desired data
"""
params = {
'start':self._format_iso_time(start),
'end':self._format_iso_time(end),
'granularity':granularity
}
return self._get('products', product_id, 'candles', params=params)
|
[
"def",
"get_historic_trades",
"(",
"self",
",",
"start",
",",
"end",
",",
"granularity",
",",
"product_id",
"=",
"'BTC-USD'",
")",
":",
"params",
"=",
"{",
"'start'",
":",
"self",
".",
"_format_iso_time",
"(",
"start",
")",
",",
"'end'",
":",
"self",
".",
"_format_iso_time",
"(",
"end",
")",
",",
"'granularity'",
":",
"granularity",
"}",
"return",
"self",
".",
"_get",
"(",
"'products'",
",",
"product_id",
",",
"'candles'",
",",
"params",
"=",
"params",
")"
] | 36.866667 | 19.333333 |
def create_networking_resource_from_context(shell_name, supported_os, context):
"""
Creates an instance of Networking Resource by given context
:param shell_name: Shell Name
:type shell_name: str
:param supported_os: list of supported OS
:type supported_os: list
:param context: cloudshell.shell.core.driver_context.ResourceCommandContext
:type context: cloudshell.shell.core.driver_context.ResourceCommandContext
:return:
:rtype GenericNetworkingResource
"""
result = GenericNetworkingResource(shell_name=shell_name, name=context.resource.name, supported_os=supported_os)
result.address = context.resource.address
result.family = context.resource.family
result.fullname = context.resource.fullname
result.attributes = dict(context.resource.attributes)
return result
|
[
"def",
"create_networking_resource_from_context",
"(",
"shell_name",
",",
"supported_os",
",",
"context",
")",
":",
"result",
"=",
"GenericNetworkingResource",
"(",
"shell_name",
"=",
"shell_name",
",",
"name",
"=",
"context",
".",
"resource",
".",
"name",
",",
"supported_os",
"=",
"supported_os",
")",
"result",
".",
"address",
"=",
"context",
".",
"resource",
".",
"address",
"result",
".",
"family",
"=",
"context",
".",
"resource",
".",
"family",
"result",
".",
"fullname",
"=",
"context",
".",
"resource",
".",
"fullname",
"result",
".",
"attributes",
"=",
"dict",
"(",
"context",
".",
"resource",
".",
"attributes",
")",
"return",
"result"
] | 38.904762 | 21.952381 |
def list(self, **kwargs):
"""Return a list of roles.
=====API DOCS=====
Retrieve a list of objects.
:param all_pages: Flag that if set, collect all pages of content from the API when returning results.
:type all_pages: bool
:param page: The page to show. Ignored if all_pages is set.
:type page: int
:param query: Contains 2-tuples used as query parameters to filter resulting resource objects.
:type query: list
:param `**kwargs`: Keyword arguments list of available fields used for searching resource objects.
:returns: A JSON object containing details of all resource objects returned by Tower backend.
:rtype: dict
=====API DOCS=====
"""
data, self.endpoint = self.data_endpoint(kwargs)
r = super(Resource, self).list(**data)
# Change display settings and data format for human consumption
self.configure_display(r)
return r
|
[
"def",
"list",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
",",
"self",
".",
"endpoint",
"=",
"self",
".",
"data_endpoint",
"(",
"kwargs",
")",
"r",
"=",
"super",
"(",
"Resource",
",",
"self",
")",
".",
"list",
"(",
"*",
"*",
"data",
")",
"# Change display settings and data format for human consumption",
"self",
".",
"configure_display",
"(",
"r",
")",
"return",
"r"
] | 40.041667 | 26.666667 |
def check_exists(name, path):
'''
Check if the given path is an alternative for a name.
.. versionadded:: 2015.8.4
CLI Example:
.. code-block:: bash
salt '*' alternatives.check_exists name path
'''
cmd = [_get_cmd(), '--display', name]
out = __salt__['cmd.run_all'](cmd, python_shell=False)
if out['retcode'] > 0 and out['stderr'] != '':
return False
return any((line.startswith(path) for line in out['stdout'].splitlines()))
|
[
"def",
"check_exists",
"(",
"name",
",",
"path",
")",
":",
"cmd",
"=",
"[",
"_get_cmd",
"(",
")",
",",
"'--display'",
",",
"name",
"]",
"out",
"=",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
",",
"python_shell",
"=",
"False",
")",
"if",
"out",
"[",
"'retcode'",
"]",
">",
"0",
"and",
"out",
"[",
"'stderr'",
"]",
"!=",
"''",
":",
"return",
"False",
"return",
"any",
"(",
"(",
"line",
".",
"startswith",
"(",
"path",
")",
"for",
"line",
"in",
"out",
"[",
"'stdout'",
"]",
".",
"splitlines",
"(",
")",
")",
")"
] | 24.684211 | 25.421053 |
def write(filename, f):
"""Write a function `f` defined in terms of spherical coordinates to a file.
"""
import meshio
import meshzoo
points, cells = meshzoo.iso_sphere(5)
# get spherical coordinates from points
polar = numpy.arccos(points[:, 2])
azimuthal = numpy.arctan2(points[:, 1], points[:, 0])
vals = f(polar, azimuthal)
meshio.write(filename, points, {"triangle": cells}, point_data={"f": vals})
return
|
[
"def",
"write",
"(",
"filename",
",",
"f",
")",
":",
"import",
"meshio",
"import",
"meshzoo",
"points",
",",
"cells",
"=",
"meshzoo",
".",
"iso_sphere",
"(",
"5",
")",
"# get spherical coordinates from points",
"polar",
"=",
"numpy",
".",
"arccos",
"(",
"points",
"[",
":",
",",
"2",
"]",
")",
"azimuthal",
"=",
"numpy",
".",
"arctan2",
"(",
"points",
"[",
":",
",",
"1",
"]",
",",
"points",
"[",
":",
",",
"0",
"]",
")",
"vals",
"=",
"f",
"(",
"polar",
",",
"azimuthal",
")",
"meshio",
".",
"write",
"(",
"filename",
",",
"points",
",",
"{",
"\"triangle\"",
":",
"cells",
"}",
",",
"point_data",
"=",
"{",
"\"f\"",
":",
"vals",
"}",
")",
"return"
] | 34.076923 | 15.692308 |
def cis(x: float) -> complex:
r"""
Implements Euler's formula
:math:`\text{cis}(x) = e^{i \pi x} = \cos(x) + i \sin(x)`
"""
return np.cos(x) + 1.0j * np.sin(x)
|
[
"def",
"cis",
"(",
"x",
":",
"float",
")",
"->",
"complex",
":",
"return",
"np",
".",
"cos",
"(",
"x",
")",
"+",
"1.0j",
"*",
"np",
".",
"sin",
"(",
"x",
")"
] | 29 | 7.166667 |
def describe_db_instances(name=None, filters=None, jmespath='DBInstances',
region=None, key=None, keyid=None, profile=None):
'''
Return a detailed listing of some, or all, DB Instances visible in the
current scope. Arbitrary subelements or subsections of the returned dataset
can be selected by passing in a valid JMSEPath filter as well.
CLI example::
salt myminion boto_rds.describe_db_instances jmespath='DBInstances[*].DBInstanceIdentifier'
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
pag = conn.get_paginator('describe_db_instances')
args = {}
args.update({'DBInstanceIdentifier': name}) if name else None
args.update({'Filters': filters}) if filters else None
pit = pag.paginate(**args)
pit = pit.search(jmespath) if jmespath else pit
try:
return [p for p in pit]
except ClientError as e:
code = getattr(e, 'response', {}).get('Error', {}).get('Code')
if code != 'DBInstanceNotFound':
log.error(__utils__['boto3.get_error'](e))
return []
|
[
"def",
"describe_db_instances",
"(",
"name",
"=",
"None",
",",
"filters",
"=",
"None",
",",
"jmespath",
"=",
"'DBInstances'",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"pag",
"=",
"conn",
".",
"get_paginator",
"(",
"'describe_db_instances'",
")",
"args",
"=",
"{",
"}",
"args",
".",
"update",
"(",
"{",
"'DBInstanceIdentifier'",
":",
"name",
"}",
")",
"if",
"name",
"else",
"None",
"args",
".",
"update",
"(",
"{",
"'Filters'",
":",
"filters",
"}",
")",
"if",
"filters",
"else",
"None",
"pit",
"=",
"pag",
".",
"paginate",
"(",
"*",
"*",
"args",
")",
"pit",
"=",
"pit",
".",
"search",
"(",
"jmespath",
")",
"if",
"jmespath",
"else",
"pit",
"try",
":",
"return",
"[",
"p",
"for",
"p",
"in",
"pit",
"]",
"except",
"ClientError",
"as",
"e",
":",
"code",
"=",
"getattr",
"(",
"e",
",",
"'response'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'Error'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'Code'",
")",
"if",
"code",
"!=",
"'DBInstanceNotFound'",
":",
"log",
".",
"error",
"(",
"__utils__",
"[",
"'boto3.get_error'",
"]",
"(",
"e",
")",
")",
"return",
"[",
"]"
] | 41.807692 | 26.884615 |
def main():
"""Create coverage reports and open them in the browser."""
usage = "Usage: %prog PATH_TO_PACKAGE"
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"-v", "--verbose",
action="store_true", dest="verbose", default=False,
help="Show debug output")
parser.add_option(
"-d", "--output-dir",
action="store", type="string", dest="output_dir",
default='',
help="")
parser.add_option(
"-t", "--test-args",
action="store", type="string", dest="test_args",
default='',
help=("Pass argument on to bin/test. Quote the argument, " +
"for instance \"-t '-m somemodule'\"."))
(options, args) = parser.parse_args()
if options.verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig(level=log_level,
format="%(levelname)s: %(message)s")
curdir = os.getcwd()
testbinary = os.path.join(curdir, 'bin', 'test')
if not os.path.exists(testbinary):
raise RuntimeError("Test command doesn't exist: %s" % testbinary)
coveragebinary = os.path.join(curdir, 'bin', 'coverage')
if not os.path.exists(coveragebinary):
logger.debug("Trying globally installed coverage command.")
coveragebinary = 'coverage'
logger.info("Running tests in coverage mode (can take a long time)")
parts = [coveragebinary, 'run', testbinary]
if options.test_args:
parts.append(options.test_args)
system(" ".join(parts))
logger.debug("Creating coverage reports...")
if options.output_dir:
coverage_dir = options.output_dir
open_in_browser = False
else:
coverage_dir = 'htmlcov' # The default
open_in_browser = True
system("%s html --directory=%s" % (coveragebinary, coverage_dir))
logger.info("Wrote coverage files to %s", coverage_dir)
if open_in_browser:
index_file = os.path.abspath(
os.path.join(coverage_dir, 'index.html'))
logger.debug("About to open %s in your webbrowser.", index_file)
webbrowser.open('file://' + index_file)
logger.info("Opened reports in your browser.")
|
[
"def",
"main",
"(",
")",
":",
"usage",
"=",
"\"Usage: %prog PATH_TO_PACKAGE\"",
"parser",
"=",
"optparse",
".",
"OptionParser",
"(",
"usage",
"=",
"usage",
")",
"parser",
".",
"add_option",
"(",
"\"-v\"",
",",
"\"--verbose\"",
",",
"action",
"=",
"\"store_true\"",
",",
"dest",
"=",
"\"verbose\"",
",",
"default",
"=",
"False",
",",
"help",
"=",
"\"Show debug output\"",
")",
"parser",
".",
"add_option",
"(",
"\"-d\"",
",",
"\"--output-dir\"",
",",
"action",
"=",
"\"store\"",
",",
"type",
"=",
"\"string\"",
",",
"dest",
"=",
"\"output_dir\"",
",",
"default",
"=",
"''",
",",
"help",
"=",
"\"\"",
")",
"parser",
".",
"add_option",
"(",
"\"-t\"",
",",
"\"--test-args\"",
",",
"action",
"=",
"\"store\"",
",",
"type",
"=",
"\"string\"",
",",
"dest",
"=",
"\"test_args\"",
",",
"default",
"=",
"''",
",",
"help",
"=",
"(",
"\"Pass argument on to bin/test. Quote the argument, \"",
"+",
"\"for instance \\\"-t '-m somemodule'\\\".\"",
")",
")",
"(",
"options",
",",
"args",
")",
"=",
"parser",
".",
"parse_args",
"(",
")",
"if",
"options",
".",
"verbose",
":",
"log_level",
"=",
"logging",
".",
"DEBUG",
"else",
":",
"log_level",
"=",
"logging",
".",
"INFO",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"log_level",
",",
"format",
"=",
"\"%(levelname)s: %(message)s\"",
")",
"curdir",
"=",
"os",
".",
"getcwd",
"(",
")",
"testbinary",
"=",
"os",
".",
"path",
".",
"join",
"(",
"curdir",
",",
"'bin'",
",",
"'test'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"testbinary",
")",
":",
"raise",
"RuntimeError",
"(",
"\"Test command doesn't exist: %s\"",
"%",
"testbinary",
")",
"coveragebinary",
"=",
"os",
".",
"path",
".",
"join",
"(",
"curdir",
",",
"'bin'",
",",
"'coverage'",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"coveragebinary",
")",
":",
"logger",
".",
"debug",
"(",
"\"Trying globally installed coverage command.\"",
")",
"coveragebinary",
"=",
"'coverage'",
"logger",
".",
"info",
"(",
"\"Running tests in coverage mode (can take a long time)\"",
")",
"parts",
"=",
"[",
"coveragebinary",
",",
"'run'",
",",
"testbinary",
"]",
"if",
"options",
".",
"test_args",
":",
"parts",
".",
"append",
"(",
"options",
".",
"test_args",
")",
"system",
"(",
"\" \"",
".",
"join",
"(",
"parts",
")",
")",
"logger",
".",
"debug",
"(",
"\"Creating coverage reports...\"",
")",
"if",
"options",
".",
"output_dir",
":",
"coverage_dir",
"=",
"options",
".",
"output_dir",
"open_in_browser",
"=",
"False",
"else",
":",
"coverage_dir",
"=",
"'htmlcov'",
"# The default",
"open_in_browser",
"=",
"True",
"system",
"(",
"\"%s html --directory=%s\"",
"%",
"(",
"coveragebinary",
",",
"coverage_dir",
")",
")",
"logger",
".",
"info",
"(",
"\"Wrote coverage files to %s\"",
",",
"coverage_dir",
")",
"if",
"open_in_browser",
":",
"index_file",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"coverage_dir",
",",
"'index.html'",
")",
")",
"logger",
".",
"debug",
"(",
"\"About to open %s in your webbrowser.\"",
",",
"index_file",
")",
"webbrowser",
".",
"open",
"(",
"'file://'",
"+",
"index_file",
")",
"logger",
".",
"info",
"(",
"\"Opened reports in your browser.\"",
")"
] | 38.210526 | 15.77193 |
def from_val(val_schema):
"""Serialize a val schema to teleport."""
definition = getattr(val_schema, "definition", val_schema) if isinstance(
val_schema, BaseSchema) else val_schema
if isinstance(definition, dict):
return _dict_to_teleport(definition)
if isinstance(definition, list):
# teleport only supports a single type by default
if len(definition) == 1:
return {"Array": from_val(definition[0])}
if definition in VAL_PRIMITIVES:
return VAL_PRIMITIVES[definition]
raise SerializationError(
"Serializing %r not (yet) supported." % definition)
|
[
"def",
"from_val",
"(",
"val_schema",
")",
":",
"definition",
"=",
"getattr",
"(",
"val_schema",
",",
"\"definition\"",
",",
"val_schema",
")",
"if",
"isinstance",
"(",
"val_schema",
",",
"BaseSchema",
")",
"else",
"val_schema",
"if",
"isinstance",
"(",
"definition",
",",
"dict",
")",
":",
"return",
"_dict_to_teleport",
"(",
"definition",
")",
"if",
"isinstance",
"(",
"definition",
",",
"list",
")",
":",
"# teleport only supports a single type by default",
"if",
"len",
"(",
"definition",
")",
"==",
"1",
":",
"return",
"{",
"\"Array\"",
":",
"from_val",
"(",
"definition",
"[",
"0",
"]",
")",
"}",
"if",
"definition",
"in",
"VAL_PRIMITIVES",
":",
"return",
"VAL_PRIMITIVES",
"[",
"definition",
"]",
"raise",
"SerializationError",
"(",
"\"Serializing %r not (yet) supported.\"",
"%",
"definition",
")"
] | 36.294118 | 15.529412 |
def _rpt_unused_sections(self, prt):
"""Report unused sections."""
sections_unused = set(self.sections_seen).difference(self.section2goids.keys())
for sec in sections_unused:
prt.write(" UNUSED SECTION: {SEC}\n".format(SEC=sec))
|
[
"def",
"_rpt_unused_sections",
"(",
"self",
",",
"prt",
")",
":",
"sections_unused",
"=",
"set",
"(",
"self",
".",
"sections_seen",
")",
".",
"difference",
"(",
"self",
".",
"section2goids",
".",
"keys",
"(",
")",
")",
"for",
"sec",
"in",
"sections_unused",
":",
"prt",
".",
"write",
"(",
"\" UNUSED SECTION: {SEC}\\n\"",
".",
"format",
"(",
"SEC",
"=",
"sec",
")",
")"
] | 52.2 | 16.4 |
def get_relationship_query_session_for_family(self, family_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the relationship query service for the given family.
arg: family_id (osid.id.Id): the ``Id`` of the family
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.relationship.RelationshipQuerySession) - a
``RelationshipQuerySession``
raise: NotFound - no ``Family`` found by the given ``Id``
raise: NullArgument - ``family_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_relationship_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if ``supports_relationship_query()``
and ``supports_visible_federation()`` are ``true``*
"""
if not family_id:
raise NullArgument
if not self.supports_relationship_query():
raise Unimplemented()
##
# Need to include check to see if the familyId is found otherwise raise NotFound
##
try:
from . import sessions
except ImportError:
raise OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.RelationshipQuerySession(family_id, proxy=proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed()
return session
|
[
"def",
"get_relationship_query_session_for_family",
"(",
"self",
",",
"family_id",
"=",
"None",
",",
"proxy",
"=",
"None",
")",
":",
"if",
"not",
"family_id",
":",
"raise",
"NullArgument",
"if",
"not",
"self",
".",
"supports_relationship_query",
"(",
")",
":",
"raise",
"Unimplemented",
"(",
")",
"##",
"# Need to include check to see if the familyId is found otherwise raise NotFound",
"##",
"try",
":",
"from",
".",
"import",
"sessions",
"except",
"ImportError",
":",
"raise",
"OperationFailed",
"(",
")",
"proxy",
"=",
"self",
".",
"_convert_proxy",
"(",
"proxy",
")",
"try",
":",
"session",
"=",
"sessions",
".",
"RelationshipQuerySession",
"(",
"family_id",
",",
"proxy",
"=",
"proxy",
",",
"runtime",
"=",
"self",
".",
"_runtime",
")",
"except",
"AttributeError",
":",
"raise",
"OperationFailed",
"(",
")",
"return",
"session"
] | 45.515152 | 21.727273 |
def setMovie( self, movie ):
"""
Sets the movie for this loader to the inputed movie.
:param movie | <QMovie>
"""
self._movieLabel.setMovie(movie)
self._smallMovieLabel.setMovie(movie)
|
[
"def",
"setMovie",
"(",
"self",
",",
"movie",
")",
":",
"self",
".",
"_movieLabel",
".",
"setMovie",
"(",
"movie",
")",
"self",
".",
"_smallMovieLabel",
".",
"setMovie",
"(",
"movie",
")"
] | 30.75 | 9 |
def insert(queue, items, backend='sqlite'):
'''
Add an item or items to a queue
CLI Example:
.. code-block:: bash
salt-run queue.insert myqueue myitem
salt-run queue.insert myqueue "['item1', 'item2', 'item3']"
salt-run queue.insert myqueue myitem backend=sqlite
salt-run queue.insert myqueue "['item1', 'item2', 'item3']" backend=sqlite
'''
queue_funcs = salt.loader.queues(__opts__)
cmd = '{0}.insert'.format(backend)
if cmd not in queue_funcs:
raise SaltInvocationError('Function "{0}" is not available'.format(cmd))
ret = queue_funcs[cmd](items=items, queue=queue)
return ret
|
[
"def",
"insert",
"(",
"queue",
",",
"items",
",",
"backend",
"=",
"'sqlite'",
")",
":",
"queue_funcs",
"=",
"salt",
".",
"loader",
".",
"queues",
"(",
"__opts__",
")",
"cmd",
"=",
"'{0}.insert'",
".",
"format",
"(",
"backend",
")",
"if",
"cmd",
"not",
"in",
"queue_funcs",
":",
"raise",
"SaltInvocationError",
"(",
"'Function \"{0}\" is not available'",
".",
"format",
"(",
"cmd",
")",
")",
"ret",
"=",
"queue_funcs",
"[",
"cmd",
"]",
"(",
"items",
"=",
"items",
",",
"queue",
"=",
"queue",
")",
"return",
"ret"
] | 33.894737 | 22.210526 |
def set_db_application_prefix(prefix, sep=None):
"""Set the global app prefix and separator."""
global _APPLICATION_PREFIX, _APPLICATION_SEP
_APPLICATION_PREFIX = prefix
if (sep is not None):
_APPLICATION_SEP = sep
|
[
"def",
"set_db_application_prefix",
"(",
"prefix",
",",
"sep",
"=",
"None",
")",
":",
"global",
"_APPLICATION_PREFIX",
",",
"_APPLICATION_SEP",
"_APPLICATION_PREFIX",
"=",
"prefix",
"if",
"(",
"sep",
"is",
"not",
"None",
")",
":",
"_APPLICATION_SEP",
"=",
"sep"
] | 38.833333 | 8.166667 |
def _default_json_default(obj):
"""
Coerce everything to strings.
All objects representing time get output as ISO8601.
"""
if isinstance(obj, (datetime.datetime, datetime.date, datetime.time)):
return obj.isoformat()
else:
return str(obj)
|
[
"def",
"_default_json_default",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"datetime",
".",
"datetime",
",",
"datetime",
".",
"date",
",",
"datetime",
".",
"time",
")",
")",
":",
"return",
"obj",
".",
"isoformat",
"(",
")",
"else",
":",
"return",
"str",
"(",
"obj",
")"
] | 30 | 13.777778 |
def get_dataset(self, remote_id):
'''Get or create a dataset given its remote ID (and its source)
We first try to match `source_id` to be source domain independent
'''
dataset = Dataset.objects(__raw__={
'extras.harvest:remote_id': remote_id,
'$or': [
{'extras.harvest:domain': self.source.domain},
{'extras.harvest:source_id': str(self.source.id)},
],
}).first()
return dataset or Dataset()
|
[
"def",
"get_dataset",
"(",
"self",
",",
"remote_id",
")",
":",
"dataset",
"=",
"Dataset",
".",
"objects",
"(",
"__raw__",
"=",
"{",
"'extras.harvest:remote_id'",
":",
"remote_id",
",",
"'$or'",
":",
"[",
"{",
"'extras.harvest:domain'",
":",
"self",
".",
"source",
".",
"domain",
"}",
",",
"{",
"'extras.harvest:source_id'",
":",
"str",
"(",
"self",
".",
"source",
".",
"id",
")",
"}",
",",
"]",
",",
"}",
")",
".",
"first",
"(",
")",
"return",
"dataset",
"or",
"Dataset",
"(",
")"
] | 41.333333 | 19.5 |
def _perform_async_update_rule(context, id, db_sg_group, rule_id, action):
"""Updates a SG rule async and return the job information.
Only happens if the security group has associated ports. If the async
connection fails the update continues (legacy mode).
"""
rpc_reply = None
sg_rpc = sg_rpc_api.QuarkSGAsyncProcessClient()
ports = db_api.sg_gather_associated_ports(context, db_sg_group)
if len(ports) > 0:
rpc_reply = sg_rpc.start_update(context, id, rule_id, action)
if rpc_reply:
job_id = rpc_reply['job_id']
job_api.add_job_to_context(context, job_id)
else:
LOG.error("Async update failed. Is the worker running?")
|
[
"def",
"_perform_async_update_rule",
"(",
"context",
",",
"id",
",",
"db_sg_group",
",",
"rule_id",
",",
"action",
")",
":",
"rpc_reply",
"=",
"None",
"sg_rpc",
"=",
"sg_rpc_api",
".",
"QuarkSGAsyncProcessClient",
"(",
")",
"ports",
"=",
"db_api",
".",
"sg_gather_associated_ports",
"(",
"context",
",",
"db_sg_group",
")",
"if",
"len",
"(",
"ports",
")",
">",
"0",
":",
"rpc_reply",
"=",
"sg_rpc",
".",
"start_update",
"(",
"context",
",",
"id",
",",
"rule_id",
",",
"action",
")",
"if",
"rpc_reply",
":",
"job_id",
"=",
"rpc_reply",
"[",
"'job_id'",
"]",
"job_api",
".",
"add_job_to_context",
"(",
"context",
",",
"job_id",
")",
"else",
":",
"LOG",
".",
"error",
"(",
"\"Async update failed. Is the worker running?\"",
")"
] | 43.625 | 19.8125 |
def up_to(self, key):
'''Gets the recently inserted values up to a key'''
for okey, ovalue in reversed(self.history):
if okey == key:
break
else:
yield ovalue
|
[
"def",
"up_to",
"(",
"self",
",",
"key",
")",
":",
"for",
"okey",
",",
"ovalue",
"in",
"reversed",
"(",
"self",
".",
"history",
")",
":",
"if",
"okey",
"==",
"key",
":",
"break",
"else",
":",
"yield",
"ovalue"
] | 32 | 16.571429 |
def _traverse_parent_objs(self, p2cs, goobj_child, goids_seen):
"""Traverse from source GO up parents."""
# Update public(go2obj p2cs), private(goids_seen)
child_id = goobj_child.id
# mark child as seen
goids_seen.add(child_id)
##A self.go2obj[child_id] = goobj_child
# Update goids_seen and go2obj with child alt_ids
for goid_altid in goobj_child.alt_ids:
goids_seen.add(goid_altid)
##A self.go2obj[goid_altid] = goobj_child
# Loop through parents of child object
for parent_obj in goobj_child.parents:
parent_id = parent_obj.id
p2cs[parent_id].add(child_id)
# If parent has not been seen, traverse
if parent_id not in goids_seen:
##F self._traverse_parent_objs(p2cs, parent_obj, go2obj, goids_seen)
self._traverse_parent_objs(p2cs, parent_obj, goids_seen)
|
[
"def",
"_traverse_parent_objs",
"(",
"self",
",",
"p2cs",
",",
"goobj_child",
",",
"goids_seen",
")",
":",
"# Update public(go2obj p2cs), private(goids_seen)",
"child_id",
"=",
"goobj_child",
".",
"id",
"# mark child as seen",
"goids_seen",
".",
"add",
"(",
"child_id",
")",
"##A self.go2obj[child_id] = goobj_child",
"# Update goids_seen and go2obj with child alt_ids",
"for",
"goid_altid",
"in",
"goobj_child",
".",
"alt_ids",
":",
"goids_seen",
".",
"add",
"(",
"goid_altid",
")",
"##A self.go2obj[goid_altid] = goobj_child",
"# Loop through parents of child object",
"for",
"parent_obj",
"in",
"goobj_child",
".",
"parents",
":",
"parent_id",
"=",
"parent_obj",
".",
"id",
"p2cs",
"[",
"parent_id",
"]",
".",
"add",
"(",
"child_id",
")",
"# If parent has not been seen, traverse",
"if",
"parent_id",
"not",
"in",
"goids_seen",
":",
"##F self._traverse_parent_objs(p2cs, parent_obj, go2obj, goids_seen)",
"self",
".",
"_traverse_parent_objs",
"(",
"p2cs",
",",
"parent_obj",
",",
"goids_seen",
")"
] | 48.578947 | 11.473684 |
def get_event_canned_questions(self, id, **data):
"""
GET /events/:id/canned_questions/
This endpoint returns canned questions of a single event (examples: first name, last name, company, prefix, etc.). This endpoint will return :format:`question`.
"""
return self.get("/events/{0}/canned_questions/".format(id), data=data)
|
[
"def",
"get_event_canned_questions",
"(",
"self",
",",
"id",
",",
"*",
"*",
"data",
")",
":",
"return",
"self",
".",
"get",
"(",
"\"/events/{0}/canned_questions/\"",
".",
"format",
"(",
"id",
")",
",",
"data",
"=",
"data",
")"
] | 52.285714 | 29.714286 |
def _set_hostname_domain(self):
"""Extract hostname and domain"""
self._hostname, _, self._domain = str(self._fqdn).partition('.')
log.debug('Hostname: %s, Domain: %s' % (self._hostname, self._domain))
|
[
"def",
"_set_hostname_domain",
"(",
"self",
")",
":",
"self",
".",
"_hostname",
",",
"_",
",",
"self",
".",
"_domain",
"=",
"str",
"(",
"self",
".",
"_fqdn",
")",
".",
"partition",
"(",
"'.'",
")",
"log",
".",
"debug",
"(",
"'Hostname: %s, Domain: %s'",
"%",
"(",
"self",
".",
"_hostname",
",",
"self",
".",
"_domain",
")",
")"
] | 55.5 | 19.75 |
def revoke_user_token(self, user_id):
"""
Revoke user token
Erases user token on file forcing them to re-login and obtain a new one.
:param user_id: int
:return:
"""
user = self.get(user_id)
user._token = None
self.save(user)
|
[
"def",
"revoke_user_token",
"(",
"self",
",",
"user_id",
")",
":",
"user",
"=",
"self",
".",
"get",
"(",
"user_id",
")",
"user",
".",
"_token",
"=",
"None",
"self",
".",
"save",
"(",
"user",
")"
] | 28.8 | 13.4 |
def get_metric_type(measure, aggregation):
"""Get the corresponding metric type for the given stats type.
:type measure: (:class: '~opencensus.stats.measure.BaseMeasure')
:param measure: the measure for which to find a metric type
:type aggregation: (:class:
'~opencensus.stats.aggregation.BaseAggregation')
:param aggregation: the aggregation for which to find a metric type
"""
if aggregation.aggregation_type == aggregation_module.Type.NONE:
raise ValueError("aggregation type must not be NONE")
assert isinstance(aggregation,
AGGREGATION_TYPE_MAP[aggregation.aggregation_type])
if aggregation.aggregation_type == aggregation_module.Type.SUM:
if isinstance(measure, measure_module.MeasureInt):
return metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64
elif isinstance(measure, measure_module.MeasureFloat):
return metric_descriptor.MetricDescriptorType.CUMULATIVE_DOUBLE
else:
raise ValueError
elif aggregation.aggregation_type == aggregation_module.Type.COUNT:
return metric_descriptor.MetricDescriptorType.CUMULATIVE_INT64
elif aggregation.aggregation_type == aggregation_module.Type.DISTRIBUTION:
return metric_descriptor.MetricDescriptorType.CUMULATIVE_DISTRIBUTION
elif aggregation.aggregation_type == aggregation_module.Type.LASTVALUE:
if isinstance(measure, measure_module.MeasureInt):
return metric_descriptor.MetricDescriptorType.GAUGE_INT64
elif isinstance(measure, measure_module.MeasureFloat):
return metric_descriptor.MetricDescriptorType.GAUGE_DOUBLE
else:
raise ValueError
else:
raise AssertionError
|
[
"def",
"get_metric_type",
"(",
"measure",
",",
"aggregation",
")",
":",
"if",
"aggregation",
".",
"aggregation_type",
"==",
"aggregation_module",
".",
"Type",
".",
"NONE",
":",
"raise",
"ValueError",
"(",
"\"aggregation type must not be NONE\"",
")",
"assert",
"isinstance",
"(",
"aggregation",
",",
"AGGREGATION_TYPE_MAP",
"[",
"aggregation",
".",
"aggregation_type",
"]",
")",
"if",
"aggregation",
".",
"aggregation_type",
"==",
"aggregation_module",
".",
"Type",
".",
"SUM",
":",
"if",
"isinstance",
"(",
"measure",
",",
"measure_module",
".",
"MeasureInt",
")",
":",
"return",
"metric_descriptor",
".",
"MetricDescriptorType",
".",
"CUMULATIVE_INT64",
"elif",
"isinstance",
"(",
"measure",
",",
"measure_module",
".",
"MeasureFloat",
")",
":",
"return",
"metric_descriptor",
".",
"MetricDescriptorType",
".",
"CUMULATIVE_DOUBLE",
"else",
":",
"raise",
"ValueError",
"elif",
"aggregation",
".",
"aggregation_type",
"==",
"aggregation_module",
".",
"Type",
".",
"COUNT",
":",
"return",
"metric_descriptor",
".",
"MetricDescriptorType",
".",
"CUMULATIVE_INT64",
"elif",
"aggregation",
".",
"aggregation_type",
"==",
"aggregation_module",
".",
"Type",
".",
"DISTRIBUTION",
":",
"return",
"metric_descriptor",
".",
"MetricDescriptorType",
".",
"CUMULATIVE_DISTRIBUTION",
"elif",
"aggregation",
".",
"aggregation_type",
"==",
"aggregation_module",
".",
"Type",
".",
"LASTVALUE",
":",
"if",
"isinstance",
"(",
"measure",
",",
"measure_module",
".",
"MeasureInt",
")",
":",
"return",
"metric_descriptor",
".",
"MetricDescriptorType",
".",
"GAUGE_INT64",
"elif",
"isinstance",
"(",
"measure",
",",
"measure_module",
".",
"MeasureFloat",
")",
":",
"return",
"metric_descriptor",
".",
"MetricDescriptorType",
".",
"GAUGE_DOUBLE",
"else",
":",
"raise",
"ValueError",
"else",
":",
"raise",
"AssertionError"
] | 49.171429 | 24 |
def set_logging(self, log_level=logging.ERROR, file_path_name=None):
"""
This function allows to change the logging backend, either output or file as backend
It also allows to set the logging level (whether to display only critical/error/info/debug.
for example::
yag = yagmail.SMTP()
yag.set_logging(yagmail.logging.DEBUG) # to see everything
and::
yagmail.set_logging(yagmail.logging.DEBUG, 'somelocalfile.log')
lastly, a log_level of :py:class:`None` will make sure there is no I/O.
"""
self.log = get_logger(log_level, file_path_name)
|
[
"def",
"set_logging",
"(",
"self",
",",
"log_level",
"=",
"logging",
".",
"ERROR",
",",
"file_path_name",
"=",
"None",
")",
":",
"self",
".",
"log",
"=",
"get_logger",
"(",
"log_level",
",",
"file_path_name",
")"
] | 39.25 | 29.625 |
def visit_With(self, node):
"""
with describe(thing) as it:
...
|
v
class TestThing(TestCase):
...
"""
withitem, = node.items
context = withitem.context_expr
if context.func.id == "describe":
describes = context.args[0].id
example_group_name = withitem.optional_vars.id
return self.transform_describe(node, describes, example_group_name)
else:
return node
|
[
"def",
"visit_With",
"(",
"self",
",",
"node",
")",
":",
"withitem",
",",
"=",
"node",
".",
"items",
"context",
"=",
"withitem",
".",
"context_expr",
"if",
"context",
".",
"func",
".",
"id",
"==",
"\"describe\"",
":",
"describes",
"=",
"context",
".",
"args",
"[",
"0",
"]",
".",
"id",
"example_group_name",
"=",
"withitem",
".",
"optional_vars",
".",
"id",
"return",
"self",
".",
"transform_describe",
"(",
"node",
",",
"describes",
",",
"example_group_name",
")",
"else",
":",
"return",
"node"
] | 22.772727 | 20.045455 |
def parse_delta(filename):
"""Returns (alignment length, similarity errors) tuple from passed .delta.
- filename - path to the input .delta file
Extracts the aligned length and number of similarity errors for each
aligned uniquely-matched region, and returns the cumulative total for
each as a tuple.
"""
aln_length, sim_errors = 0, 0
for line in [l.strip().split() for l in open(filename, "r").readlines()]:
if line[0] == "NUCMER" or line[0].startswith(">"): # Skip headers
continue
# We only process lines with seven columns:
if len(line) == 7:
aln_length += abs(int(line[1]) - int(line[0]))
sim_errors += int(line[4])
return aln_length, sim_errors
|
[
"def",
"parse_delta",
"(",
"filename",
")",
":",
"aln_length",
",",
"sim_errors",
"=",
"0",
",",
"0",
"for",
"line",
"in",
"[",
"l",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"for",
"l",
"in",
"open",
"(",
"filename",
",",
"\"r\"",
")",
".",
"readlines",
"(",
")",
"]",
":",
"if",
"line",
"[",
"0",
"]",
"==",
"\"NUCMER\"",
"or",
"line",
"[",
"0",
"]",
".",
"startswith",
"(",
"\">\"",
")",
":",
"# Skip headers",
"continue",
"# We only process lines with seven columns:",
"if",
"len",
"(",
"line",
")",
"==",
"7",
":",
"aln_length",
"+=",
"abs",
"(",
"int",
"(",
"line",
"[",
"1",
"]",
")",
"-",
"int",
"(",
"line",
"[",
"0",
"]",
")",
")",
"sim_errors",
"+=",
"int",
"(",
"line",
"[",
"4",
"]",
")",
"return",
"aln_length",
",",
"sim_errors"
] | 40.666667 | 18.611111 |
def _search_chimera(binary, directories, prefix, search_all=False):
"""
Try running ``chimera --root`` if Chimera happens to be in PATH, otherwise
traverse usual installation locations to find the Chimera root path.
Parameters
----------
binary : str
Name of the chimera executable in this platform
directories: list of str
Usual installation locations in this platform
prefix : str
Root directory prefix name in this platform
search_all : bool, optional, default=False
Collect all posible locations of Chimera installations, even if a
binary has been found.
Returns
-------
paths : list of str
Sorted list of Chimera paths. If found, the first one is the one returned
by the binary call. Next items are the ones found in `directories`, sorted
by descendent order.
"""
# First, check if environment variable is already present
try:
return os.path.expanduser(os.environ['CHIMERADIR']),
except KeyError:
pass
paths = []
binary_path = find_executable(binary)
if binary_path is not None:
real_path = os.path.realpath(binary_path) # follow symlinks
chimera_dir = os.path.sep + os.path.join(*real_path.split(os.path.sep)[1:-2])
paths.append(chimera_dir)
else:
search_all = True
if search_all:
for basedir in directories:
found_paths = filter(os.path.isdir, glob(os.path.join(basedir, prefix)))
if found_paths:
found_paths.sort()
found_paths.reverse()
paths.extend(found_paths)
seen = set()
paths = [p for p in paths if p not in seen and not seen.add(p)]
return paths
|
[
"def",
"_search_chimera",
"(",
"binary",
",",
"directories",
",",
"prefix",
",",
"search_all",
"=",
"False",
")",
":",
"# First, check if environment variable is already present",
"try",
":",
"return",
"os",
".",
"path",
".",
"expanduser",
"(",
"os",
".",
"environ",
"[",
"'CHIMERADIR'",
"]",
")",
",",
"except",
"KeyError",
":",
"pass",
"paths",
"=",
"[",
"]",
"binary_path",
"=",
"find_executable",
"(",
"binary",
")",
"if",
"binary_path",
"is",
"not",
"None",
":",
"real_path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"binary_path",
")",
"# follow symlinks",
"chimera_dir",
"=",
"os",
".",
"path",
".",
"sep",
"+",
"os",
".",
"path",
".",
"join",
"(",
"*",
"real_path",
".",
"split",
"(",
"os",
".",
"path",
".",
"sep",
")",
"[",
"1",
":",
"-",
"2",
"]",
")",
"paths",
".",
"append",
"(",
"chimera_dir",
")",
"else",
":",
"search_all",
"=",
"True",
"if",
"search_all",
":",
"for",
"basedir",
"in",
"directories",
":",
"found_paths",
"=",
"filter",
"(",
"os",
".",
"path",
".",
"isdir",
",",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"basedir",
",",
"prefix",
")",
")",
")",
"if",
"found_paths",
":",
"found_paths",
".",
"sort",
"(",
")",
"found_paths",
".",
"reverse",
"(",
")",
"paths",
".",
"extend",
"(",
"found_paths",
")",
"seen",
"=",
"set",
"(",
")",
"paths",
"=",
"[",
"p",
"for",
"p",
"in",
"paths",
"if",
"p",
"not",
"in",
"seen",
"and",
"not",
"seen",
".",
"add",
"(",
"p",
")",
"]",
"return",
"paths"
] | 34.795918 | 22.020408 |
def push_cluster_configuration(self, scaleioobj, noUpload = False, noInstall= False, noConfigure = False):
"""
Method push cached ScaleIO cluster configuration to IM (reconfigurations that have been made to cached configuration are committed using IM)
Method: POST
Attach JSON cluster configuration as request payload (data). Add MDM and LIA passwords)
"""
self.logger.debug("push_cluster_configuration(" + "{},{},{},{})".format(scaleioobj, noUpload, noInstall, noConfigure))
#print "JSON DUMP OF CLUSTER CONFIG:"
#pprint (json.loads(scaleioobj))
config_params = {'noUpload': noUpload, 'noInstall': noInstall, 'noConfigure':noConfigure}
r1 = self._im_session.post(
"{}/{}".format(self._im_api_url,"types/Installation/instances/"),
headers={'Content-type':'application/json','Version':'1.0'},
params = config_params,
verify=self._im_verify_ssl,
#json=json.loads(self._cluster_config_cached.to_JSON()),
json = json.loads(scaleioobj),
stream=True
)
if not r1.ok:
# Something went wrong
self.logger.error("Error push_cluster_configuration() - " + "Errorcode: {}".format(r1.status_code))
#print "Response after push_cluster_configuration()"
# RESPONSE NEED TO BE WRAPPED IN try/catch. Cannot assume JSON is returned.
#print r1.text
#pprint (json.loads(r1.text))
return r1.text
|
[
"def",
"push_cluster_configuration",
"(",
"self",
",",
"scaleioobj",
",",
"noUpload",
"=",
"False",
",",
"noInstall",
"=",
"False",
",",
"noConfigure",
"=",
"False",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"\"push_cluster_configuration(\"",
"+",
"\"{},{},{},{})\"",
".",
"format",
"(",
"scaleioobj",
",",
"noUpload",
",",
"noInstall",
",",
"noConfigure",
")",
")",
"#print \"JSON DUMP OF CLUSTER CONFIG:\"",
"#pprint (json.loads(scaleioobj))",
"config_params",
"=",
"{",
"'noUpload'",
":",
"noUpload",
",",
"'noInstall'",
":",
"noInstall",
",",
"'noConfigure'",
":",
"noConfigure",
"}",
"r1",
"=",
"self",
".",
"_im_session",
".",
"post",
"(",
"\"{}/{}\"",
".",
"format",
"(",
"self",
".",
"_im_api_url",
",",
"\"types/Installation/instances/\"",
")",
",",
"headers",
"=",
"{",
"'Content-type'",
":",
"'application/json'",
",",
"'Version'",
":",
"'1.0'",
"}",
",",
"params",
"=",
"config_params",
",",
"verify",
"=",
"self",
".",
"_im_verify_ssl",
",",
"#json=json.loads(self._cluster_config_cached.to_JSON()),",
"json",
"=",
"json",
".",
"loads",
"(",
"scaleioobj",
")",
",",
"stream",
"=",
"True",
")",
"if",
"not",
"r1",
".",
"ok",
":",
"# Something went wrong",
"self",
".",
"logger",
".",
"error",
"(",
"\"Error push_cluster_configuration() - \"",
"+",
"\"Errorcode: {}\"",
".",
"format",
"(",
"r1",
".",
"status_code",
")",
")",
"#print \"Response after push_cluster_configuration()\"",
"# RESPONSE NEED TO BE WRAPPED IN try/catch. Cannot assume JSON is returned.",
"#print r1.text",
"#pprint (json.loads(r1.text))",
"return",
"r1",
".",
"text"
] | 50.2 | 28.533333 |
def purge_all(self, rate_limit_delay=60):
'''Purge all pending URLs, waiting for API rate-limits if necessary!'''
for batch, response in self.purge():
if response.status_code == 507:
details = response.json().get('detail', '<response did not contain "detail">')
logger.info('Will retry request in %d seconds due to API rate-limit: %s',
rate_limit_delay, details)
time.sleep(rate_limit_delay)
|
[
"def",
"purge_all",
"(",
"self",
",",
"rate_limit_delay",
"=",
"60",
")",
":",
"for",
"batch",
",",
"response",
"in",
"self",
".",
"purge",
"(",
")",
":",
"if",
"response",
".",
"status_code",
"==",
"507",
":",
"details",
"=",
"response",
".",
"json",
"(",
")",
".",
"get",
"(",
"'detail'",
",",
"'<response did not contain \"detail\">'",
")",
"logger",
".",
"info",
"(",
"'Will retry request in %d seconds due to API rate-limit: %s'",
",",
"rate_limit_delay",
",",
"details",
")",
"time",
".",
"sleep",
"(",
"rate_limit_delay",
")"
] | 54.222222 | 23.111111 |
def GetRootFileEntry(self):
"""Retrieves the root file entry.
Returns:
APFSFileEntry: file entry.
"""
path_spec = apfs_path_spec.APFSPathSpec(
location=self.LOCATION_ROOT, identifier=self.ROOT_DIRECTORY_IDENTIFIER,
parent=self._path_spec.parent)
return self.GetFileEntryByPathSpec(path_spec)
|
[
"def",
"GetRootFileEntry",
"(",
"self",
")",
":",
"path_spec",
"=",
"apfs_path_spec",
".",
"APFSPathSpec",
"(",
"location",
"=",
"self",
".",
"LOCATION_ROOT",
",",
"identifier",
"=",
"self",
".",
"ROOT_DIRECTORY_IDENTIFIER",
",",
"parent",
"=",
"self",
".",
"_path_spec",
".",
"parent",
")",
"return",
"self",
".",
"GetFileEntryByPathSpec",
"(",
"path_spec",
")"
] | 32.5 | 14.3 |
def find_cell_content(self, lines):
"""Parse cell till its end and set content, lines_to_next_cell.
Return the position of next cell start"""
cell_end_marker, next_cell_start, explicit_eoc = self.find_cell_end(lines)
# Metadata to dict
if self.start_code_re.match(lines[0]) or self.alternative_start_code_re.match(lines[0]):
cell_start = 1
else:
cell_start = 0
# Cell content
source = lines[cell_start:cell_end_marker]
self.org_content = [line for line in source]
if self.cell_type != 'code' or (self.metadata and not is_active('py', self.metadata)) \
or (self.language is not None and self.language != self.default_language):
source = uncomment(source, self.comment)
elif self.metadata is not None and self.comment_magics:
source = self.uncomment_code_and_magics(source)
self.content = source
self.lines_to_next_cell = count_lines_to_next_cell(
cell_end_marker,
next_cell_start,
len(lines),
explicit_eoc)
return next_cell_start
|
[
"def",
"find_cell_content",
"(",
"self",
",",
"lines",
")",
":",
"cell_end_marker",
",",
"next_cell_start",
",",
"explicit_eoc",
"=",
"self",
".",
"find_cell_end",
"(",
"lines",
")",
"# Metadata to dict",
"if",
"self",
".",
"start_code_re",
".",
"match",
"(",
"lines",
"[",
"0",
"]",
")",
"or",
"self",
".",
"alternative_start_code_re",
".",
"match",
"(",
"lines",
"[",
"0",
"]",
")",
":",
"cell_start",
"=",
"1",
"else",
":",
"cell_start",
"=",
"0",
"# Cell content",
"source",
"=",
"lines",
"[",
"cell_start",
":",
"cell_end_marker",
"]",
"self",
".",
"org_content",
"=",
"[",
"line",
"for",
"line",
"in",
"source",
"]",
"if",
"self",
".",
"cell_type",
"!=",
"'code'",
"or",
"(",
"self",
".",
"metadata",
"and",
"not",
"is_active",
"(",
"'py'",
",",
"self",
".",
"metadata",
")",
")",
"or",
"(",
"self",
".",
"language",
"is",
"not",
"None",
"and",
"self",
".",
"language",
"!=",
"self",
".",
"default_language",
")",
":",
"source",
"=",
"uncomment",
"(",
"source",
",",
"self",
".",
"comment",
")",
"elif",
"self",
".",
"metadata",
"is",
"not",
"None",
"and",
"self",
".",
"comment_magics",
":",
"source",
"=",
"self",
".",
"uncomment_code_and_magics",
"(",
"source",
")",
"self",
".",
"content",
"=",
"source",
"self",
".",
"lines_to_next_cell",
"=",
"count_lines_to_next_cell",
"(",
"cell_end_marker",
",",
"next_cell_start",
",",
"len",
"(",
"lines",
")",
",",
"explicit_eoc",
")",
"return",
"next_cell_start"
] | 37.633333 | 23.566667 |
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_above_above_highthresh_action(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-monitor")
threshold_monitor = ET.SubElement(threshold_monitor_hidden, "threshold-monitor")
sfp = ET.SubElement(threshold_monitor, "sfp")
policy = ET.SubElement(sfp, "policy")
policy_name_key = ET.SubElement(policy, "policy_name")
policy_name_key.text = kwargs.pop('policy_name')
area = ET.SubElement(policy, "area")
type_key = ET.SubElement(area, "type")
type_key.text = kwargs.pop('type')
area_value_key = ET.SubElement(area, "area_value")
area_value_key.text = kwargs.pop('area_value')
alert = ET.SubElement(area, "alert")
above = ET.SubElement(alert, "above")
above_highthresh_action = ET.SubElement(above, "above-highthresh-action")
above_highthresh_action.text = kwargs.pop('above_highthresh_action')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
[
"def",
"threshold_monitor_hidden_threshold_monitor_sfp_policy_area_alert_above_above_highthresh_action",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"config",
"=",
"ET",
".",
"Element",
"(",
"\"config\"",
")",
"threshold_monitor_hidden",
"=",
"ET",
".",
"SubElement",
"(",
"config",
",",
"\"threshold-monitor-hidden\"",
",",
"xmlns",
"=",
"\"urn:brocade.com:mgmt:brocade-threshold-monitor\"",
")",
"threshold_monitor",
"=",
"ET",
".",
"SubElement",
"(",
"threshold_monitor_hidden",
",",
"\"threshold-monitor\"",
")",
"sfp",
"=",
"ET",
".",
"SubElement",
"(",
"threshold_monitor",
",",
"\"sfp\"",
")",
"policy",
"=",
"ET",
".",
"SubElement",
"(",
"sfp",
",",
"\"policy\"",
")",
"policy_name_key",
"=",
"ET",
".",
"SubElement",
"(",
"policy",
",",
"\"policy_name\"",
")",
"policy_name_key",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'policy_name'",
")",
"area",
"=",
"ET",
".",
"SubElement",
"(",
"policy",
",",
"\"area\"",
")",
"type_key",
"=",
"ET",
".",
"SubElement",
"(",
"area",
",",
"\"type\"",
")",
"type_key",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'type'",
")",
"area_value_key",
"=",
"ET",
".",
"SubElement",
"(",
"area",
",",
"\"area_value\"",
")",
"area_value_key",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'area_value'",
")",
"alert",
"=",
"ET",
".",
"SubElement",
"(",
"area",
",",
"\"alert\"",
")",
"above",
"=",
"ET",
".",
"SubElement",
"(",
"alert",
",",
"\"above\"",
")",
"above_highthresh_action",
"=",
"ET",
".",
"SubElement",
"(",
"above",
",",
"\"above-highthresh-action\"",
")",
"above_highthresh_action",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'above_highthresh_action'",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"return",
"callback",
"(",
"config",
")"
] | 55.227273 | 21.727273 |
def export_dse_home_in_dse_env_sh(self):
'''
Due to the way CCM lays out files, separating the repository
from the node(s) confs, the `dse-env.sh` script of each node
needs to have its DSE_HOME var set and exported. Since DSE
4.5.x, the stock `dse-env.sh` file includes a commented-out
place to do exactly this, intended for installers.
Basically: read in the file, write it back out and add the two
lines.
'sstableloader' is an example of a node script that depends on
this, when used in a CCM-built cluster.
'''
with open(self.get_bin_dir() + "/dse-env.sh", "r") as dse_env_sh:
buf = dse_env_sh.readlines()
with open(self.get_bin_dir() + "/dse-env.sh", "w") as out_file:
for line in buf:
out_file.write(line)
if line == "# This is here so the installer can force set DSE_HOME\n":
out_file.write("DSE_HOME=" + self.get_install_dir() + "\nexport DSE_HOME\n")
|
[
"def",
"export_dse_home_in_dse_env_sh",
"(",
"self",
")",
":",
"with",
"open",
"(",
"self",
".",
"get_bin_dir",
"(",
")",
"+",
"\"/dse-env.sh\"",
",",
"\"r\"",
")",
"as",
"dse_env_sh",
":",
"buf",
"=",
"dse_env_sh",
".",
"readlines",
"(",
")",
"with",
"open",
"(",
"self",
".",
"get_bin_dir",
"(",
")",
"+",
"\"/dse-env.sh\"",
",",
"\"w\"",
")",
"as",
"out_file",
":",
"for",
"line",
"in",
"buf",
":",
"out_file",
".",
"write",
"(",
"line",
")",
"if",
"line",
"==",
"\"# This is here so the installer can force set DSE_HOME\\n\"",
":",
"out_file",
".",
"write",
"(",
"\"DSE_HOME=\"",
"+",
"self",
".",
"get_install_dir",
"(",
")",
"+",
"\"\\nexport DSE_HOME\\n\"",
")"
] | 51 | 25 |
def periodic_hann(window_length):
"""Calculate a "periodic" Hann window.
The classic Hann window is defined as a raised cosine that starts and
ends on zero, and where every value appears twice, except the middle
point for an odd-length window. Matlab calls this a "symmetric" window
and np.hanning() returns it. However, for Fourier analysis, this
actually represents just over one cycle of a period N-1 cosine, and
thus is not compactly expressed on a length-N Fourier basis. Instead,
it's better to use a raised cosine that ends just before the final
zero value - i.e. a complete cycle of a period-N cosine. Matlab
calls this a "periodic" window. This routine calculates it.
Args:
window_length: The number of points in the returned window.
Returns:
A 1D np.array containing the periodic hann window.
"""
return 0.5 - (0.5 * np.cos(2 * np.pi / window_length *
np.arange(window_length)))
|
[
"def",
"periodic_hann",
"(",
"window_length",
")",
":",
"return",
"0.5",
"-",
"(",
"0.5",
"*",
"np",
".",
"cos",
"(",
"2",
"*",
"np",
".",
"pi",
"/",
"window_length",
"*",
"np",
".",
"arange",
"(",
"window_length",
")",
")",
")"
] | 44.761905 | 24.52381 |
def major_tick_mark(self):
"""
Read/write :ref:`XlTickMark` value specifying the type of major tick
mark to display on this axis.
"""
majorTickMark = self._element.majorTickMark
if majorTickMark is None:
return XL_TICK_MARK.CROSS
return majorTickMark.val
|
[
"def",
"major_tick_mark",
"(",
"self",
")",
":",
"majorTickMark",
"=",
"self",
".",
"_element",
".",
"majorTickMark",
"if",
"majorTickMark",
"is",
"None",
":",
"return",
"XL_TICK_MARK",
".",
"CROSS",
"return",
"majorTickMark",
".",
"val"
] | 34.888889 | 9.111111 |
def ac(d, key, **kwarg):
"""Alias of :meth:`self.add_children()<DictTree.add_children>`.
"""
if kwarg:
d[key] = {_meta: kwarg}
else:
d[key] = dict()
|
[
"def",
"ac",
"(",
"d",
",",
"key",
",",
"*",
"*",
"kwarg",
")",
":",
"if",
"kwarg",
":",
"d",
"[",
"key",
"]",
"=",
"{",
"_meta",
":",
"kwarg",
"}",
"else",
":",
"d",
"[",
"key",
"]",
"=",
"dict",
"(",
")"
] | 28.285714 | 12 |
def _extract_definitions(alist, level=None):
"""
Since we couldn't be bothered to register models elsewhere
our definitions need to be extracted from the parameters.
We require an 'id' field for the schema to be correctly
added to the definitions list.
"""
def _extract_array_defs(source):
# extract any definitions that are within arrays
# this occurs recursively
ret = []
items = source.get('items')
if items is not None and 'schema' in items:
ret += _extract_definitions([items], level+1)
return ret
# for tracking level of recursion
if level is None:
level = 0
defs = list()
if alist is not None:
for item in alist:
schema = item.get("schema")
if schema is not None:
schema_id = schema.get("id")
if schema_id is not None:
defs.append(schema)
ref = {"$ref": "#/definitions/{}".format(schema_id)}
# only add the reference as a schema if we are in a response or
# a parameter i.e. at the top level
# directly ref if a definition is used within another definition
if level == 0:
item['schema'] = ref
else:
item.update(ref)
del item['schema']
# extract any definitions that are within properties
# this occurs recursively
properties = schema.get('properties')
if properties is not None:
defs += _extract_definitions(properties.values(), level+1)
defs += _extract_array_defs(schema)
defs += _extract_array_defs(item)
return defs
|
[
"def",
"_extract_definitions",
"(",
"alist",
",",
"level",
"=",
"None",
")",
":",
"def",
"_extract_array_defs",
"(",
"source",
")",
":",
"# extract any definitions that are within arrays",
"# this occurs recursively",
"ret",
"=",
"[",
"]",
"items",
"=",
"source",
".",
"get",
"(",
"'items'",
")",
"if",
"items",
"is",
"not",
"None",
"and",
"'schema'",
"in",
"items",
":",
"ret",
"+=",
"_extract_definitions",
"(",
"[",
"items",
"]",
",",
"level",
"+",
"1",
")",
"return",
"ret",
"# for tracking level of recursion",
"if",
"level",
"is",
"None",
":",
"level",
"=",
"0",
"defs",
"=",
"list",
"(",
")",
"if",
"alist",
"is",
"not",
"None",
":",
"for",
"item",
"in",
"alist",
":",
"schema",
"=",
"item",
".",
"get",
"(",
"\"schema\"",
")",
"if",
"schema",
"is",
"not",
"None",
":",
"schema_id",
"=",
"schema",
".",
"get",
"(",
"\"id\"",
")",
"if",
"schema_id",
"is",
"not",
"None",
":",
"defs",
".",
"append",
"(",
"schema",
")",
"ref",
"=",
"{",
"\"$ref\"",
":",
"\"#/definitions/{}\"",
".",
"format",
"(",
"schema_id",
")",
"}",
"# only add the reference as a schema if we are in a response or",
"# a parameter i.e. at the top level",
"# directly ref if a definition is used within another definition",
"if",
"level",
"==",
"0",
":",
"item",
"[",
"'schema'",
"]",
"=",
"ref",
"else",
":",
"item",
".",
"update",
"(",
"ref",
")",
"del",
"item",
"[",
"'schema'",
"]",
"# extract any definitions that are within properties",
"# this occurs recursively",
"properties",
"=",
"schema",
".",
"get",
"(",
"'properties'",
")",
"if",
"properties",
"is",
"not",
"None",
":",
"defs",
"+=",
"_extract_definitions",
"(",
"properties",
".",
"values",
"(",
")",
",",
"level",
"+",
"1",
")",
"defs",
"+=",
"_extract_array_defs",
"(",
"schema",
")",
"defs",
"+=",
"_extract_array_defs",
"(",
"item",
")",
"return",
"defs"
] | 35.058824 | 17.490196 |
def job_monitor(job, interval=None, monitor_async=False, quiet=False, output=sys.stdout):
"""Monitor the status of a IBMQJob instance.
Args:
job (BaseJob): Job to monitor.
interval (int): Time interval between status queries.
monitor_async (bool): Monitor asyncronously (in Jupyter only).
quiet (bool): If True, do not print status messages.
output (file): The file like object to write status messages to.
By default this is sys.stdout.
Raises:
QiskitError: When trying to run async outside of Jupyter
ImportError: ipywidgets not available for notebook.
"""
if interval is None:
_interval_set = False
interval = 2
else:
_interval_set = True
if _NOTEBOOK_ENV:
if monitor_async:
try:
import ipywidgets as widgets # pylint: disable=import-error
except ImportError:
raise ImportError('These functions need ipywidgets. '
'Run "pip install ipywidgets" before.')
from qiskit.tools.jupyter.jupyter_magics import _html_checker # pylint: disable=C0412
style = "font-size:16px;"
header = "<p style='{style}'>Job Status: %s </p>".format(
style=style)
status = widgets.HTML(value=header % job.status().value)
display(status)
thread = threading.Thread(target=_html_checker, args=(job, interval,
status, header))
thread.start()
else:
_text_checker(job, interval, _interval_set,
quiet=quiet, output=output)
else:
if monitor_async:
raise QiskitError(
'monitor_async only available in Jupyter notebooks.')
_text_checker(job, interval, _interval_set, quiet=quiet, output=output)
|
[
"def",
"job_monitor",
"(",
"job",
",",
"interval",
"=",
"None",
",",
"monitor_async",
"=",
"False",
",",
"quiet",
"=",
"False",
",",
"output",
"=",
"sys",
".",
"stdout",
")",
":",
"if",
"interval",
"is",
"None",
":",
"_interval_set",
"=",
"False",
"interval",
"=",
"2",
"else",
":",
"_interval_set",
"=",
"True",
"if",
"_NOTEBOOK_ENV",
":",
"if",
"monitor_async",
":",
"try",
":",
"import",
"ipywidgets",
"as",
"widgets",
"# pylint: disable=import-error",
"except",
"ImportError",
":",
"raise",
"ImportError",
"(",
"'These functions need ipywidgets. '",
"'Run \"pip install ipywidgets\" before.'",
")",
"from",
"qiskit",
".",
"tools",
".",
"jupyter",
".",
"jupyter_magics",
"import",
"_html_checker",
"# pylint: disable=C0412",
"style",
"=",
"\"font-size:16px;\"",
"header",
"=",
"\"<p style='{style}'>Job Status: %s </p>\"",
".",
"format",
"(",
"style",
"=",
"style",
")",
"status",
"=",
"widgets",
".",
"HTML",
"(",
"value",
"=",
"header",
"%",
"job",
".",
"status",
"(",
")",
".",
"value",
")",
"display",
"(",
"status",
")",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"_html_checker",
",",
"args",
"=",
"(",
"job",
",",
"interval",
",",
"status",
",",
"header",
")",
")",
"thread",
".",
"start",
"(",
")",
"else",
":",
"_text_checker",
"(",
"job",
",",
"interval",
",",
"_interval_set",
",",
"quiet",
"=",
"quiet",
",",
"output",
"=",
"output",
")",
"else",
":",
"if",
"monitor_async",
":",
"raise",
"QiskitError",
"(",
"'monitor_async only available in Jupyter notebooks.'",
")",
"_text_checker",
"(",
"job",
",",
"interval",
",",
"_interval_set",
",",
"quiet",
"=",
"quiet",
",",
"output",
"=",
"output",
")"
] | 40.340426 | 24.106383 |
def _download_libraries(self, libname):
""" download enrichr libraries."""
self._logger.info("Downloading and generating Enrichr library gene sets......")
s = retry(5)
# queery string
ENRICHR_URL = 'http://amp.pharm.mssm.edu/Enrichr/geneSetLibrary'
query_string = '?mode=text&libraryName=%s'
# get
response = s.get( ENRICHR_URL + query_string % libname, timeout=None)
if not response.ok:
raise Exception('Error fetching enrichment results, check internet connection first.')
# reformat to dict and save to disk
mkdirs(DEFAULT_CACHE_PATH)
genesets_dict = {}
outname = "enrichr.%s.gmt"%libname
gmtout = open(os.path.join(DEFAULT_CACHE_PATH, outname), "w")
for line in response.iter_lines(chunk_size=1024, decode_unicode='utf-8'):
line=line.strip()
k = line.split("\t")[0]
v = list(map(lambda x: x.split(",")[0], line.split("\t")[2:]))
genesets_dict.update({ k: v})
outline = "%s\t\t%s\n"%(k, "\t".join(v))
gmtout.write(outline)
gmtout.close()
return genesets_dict
|
[
"def",
"_download_libraries",
"(",
"self",
",",
"libname",
")",
":",
"self",
".",
"_logger",
".",
"info",
"(",
"\"Downloading and generating Enrichr library gene sets......\"",
")",
"s",
"=",
"retry",
"(",
"5",
")",
"# queery string",
"ENRICHR_URL",
"=",
"'http://amp.pharm.mssm.edu/Enrichr/geneSetLibrary'",
"query_string",
"=",
"'?mode=text&libraryName=%s'",
"# get",
"response",
"=",
"s",
".",
"get",
"(",
"ENRICHR_URL",
"+",
"query_string",
"%",
"libname",
",",
"timeout",
"=",
"None",
")",
"if",
"not",
"response",
".",
"ok",
":",
"raise",
"Exception",
"(",
"'Error fetching enrichment results, check internet connection first.'",
")",
"# reformat to dict and save to disk",
"mkdirs",
"(",
"DEFAULT_CACHE_PATH",
")",
"genesets_dict",
"=",
"{",
"}",
"outname",
"=",
"\"enrichr.%s.gmt\"",
"%",
"libname",
"gmtout",
"=",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"DEFAULT_CACHE_PATH",
",",
"outname",
")",
",",
"\"w\"",
")",
"for",
"line",
"in",
"response",
".",
"iter_lines",
"(",
"chunk_size",
"=",
"1024",
",",
"decode_unicode",
"=",
"'utf-8'",
")",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"k",
"=",
"line",
".",
"split",
"(",
"\"\\t\"",
")",
"[",
"0",
"]",
"v",
"=",
"list",
"(",
"map",
"(",
"lambda",
"x",
":",
"x",
".",
"split",
"(",
"\",\"",
")",
"[",
"0",
"]",
",",
"line",
".",
"split",
"(",
"\"\\t\"",
")",
"[",
"2",
":",
"]",
")",
")",
"genesets_dict",
".",
"update",
"(",
"{",
"k",
":",
"v",
"}",
")",
"outline",
"=",
"\"%s\\t\\t%s\\n\"",
"%",
"(",
"k",
",",
"\"\\t\"",
".",
"join",
"(",
"v",
")",
")",
"gmtout",
".",
"write",
"(",
"outline",
")",
"gmtout",
".",
"close",
"(",
")",
"return",
"genesets_dict"
] | 44.5 | 19.115385 |
def get_version():
"""Get version and version_info without importing the entire module."""
path = os.path.join(os.path.dirname(__file__), 'backrefs')
fp, pathname, desc = imp.find_module('__meta__', [path])
try:
vi = imp.load_module('__meta__', fp, pathname, desc).__version_info__
return vi._get_canonical(), vi._get_dev_status()
except Exception:
print(traceback.format_exc())
finally:
fp.close()
|
[
"def",
"get_version",
"(",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'backrefs'",
")",
"fp",
",",
"pathname",
",",
"desc",
"=",
"imp",
".",
"find_module",
"(",
"'__meta__'",
",",
"[",
"path",
"]",
")",
"try",
":",
"vi",
"=",
"imp",
".",
"load_module",
"(",
"'__meta__'",
",",
"fp",
",",
"pathname",
",",
"desc",
")",
".",
"__version_info__",
"return",
"vi",
".",
"_get_canonical",
"(",
")",
",",
"vi",
".",
"_get_dev_status",
"(",
")",
"except",
"Exception",
":",
"print",
"(",
"traceback",
".",
"format_exc",
"(",
")",
")",
"finally",
":",
"fp",
".",
"close",
"(",
")"
] | 37 | 21.75 |
def check(self, line_info):
"Check if the initial word/function is callable and autocall is on."
if not self.shell.autocall:
return None
oinfo = line_info.ofind(self.shell) # This can mutate state via getattr
if not oinfo['found']:
return None
if callable(oinfo['obj']) \
and (not self.exclude_regexp.match(line_info.the_rest)) \
and self.function_name_regexp.match(line_info.ifun):
return self.prefilter_manager.get_handler_by_name('auto')
else:
return None
|
[
"def",
"check",
"(",
"self",
",",
"line_info",
")",
":",
"if",
"not",
"self",
".",
"shell",
".",
"autocall",
":",
"return",
"None",
"oinfo",
"=",
"line_info",
".",
"ofind",
"(",
"self",
".",
"shell",
")",
"# This can mutate state via getattr",
"if",
"not",
"oinfo",
"[",
"'found'",
"]",
":",
"return",
"None",
"if",
"callable",
"(",
"oinfo",
"[",
"'obj'",
"]",
")",
"and",
"(",
"not",
"self",
".",
"exclude_regexp",
".",
"match",
"(",
"line_info",
".",
"the_rest",
")",
")",
"and",
"self",
".",
"function_name_regexp",
".",
"match",
"(",
"line_info",
".",
"ifun",
")",
":",
"return",
"self",
".",
"prefilter_manager",
".",
"get_handler_by_name",
"(",
"'auto'",
")",
"else",
":",
"return",
"None"
] | 38.133333 | 23.6 |
def replace_persistent_volume_status(self, name, body, **kwargs): # noqa: E501
"""replace_persistent_volume_status # noqa: E501
replace status of the specified PersistentVolume # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_persistent_volume_status(name, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the PersistentVolume (required)
:param V1PersistentVolume body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:return: V1PersistentVolume
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.replace_persistent_volume_status_with_http_info(name, body, **kwargs) # noqa: E501
else:
(data) = self.replace_persistent_volume_status_with_http_info(name, body, **kwargs) # noqa: E501
return data
|
[
"def",
"replace_persistent_volume_status",
"(",
"self",
",",
"name",
",",
"body",
",",
"*",
"*",
"kwargs",
")",
":",
"# noqa: E501",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async_req'",
")",
":",
"return",
"self",
".",
"replace_persistent_volume_status_with_http_info",
"(",
"name",
",",
"body",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"else",
":",
"(",
"data",
")",
"=",
"self",
".",
"replace_persistent_volume_status_with_http_info",
"(",
"name",
",",
"body",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"return",
"data"
] | 59.625 | 32.791667 |
def move(src_parent, src_idx, dest_parent, dest_idx):
"""Move an item."""
copy(src_parent, src_idx, dest_parent, dest_idx)
remove(src_parent, src_idx)
|
[
"def",
"move",
"(",
"src_parent",
",",
"src_idx",
",",
"dest_parent",
",",
"dest_idx",
")",
":",
"copy",
"(",
"src_parent",
",",
"src_idx",
",",
"dest_parent",
",",
"dest_idx",
")",
"remove",
"(",
"src_parent",
",",
"src_idx",
")"
] | 38.25 | 8.5 |
def _generate_examples(
self, image_dir, annotation_dir, split_type, has_annotation=True):
"""Generate examples as dicts.
Args:
image_dir: `str`, directory containing the images
annotation_dir: `str`, directory containing
split_type: `str`, <split_name><year> (ex: train2014)
has_annotation: `bool`, when False (for the testing set), the annotations
are not recorded
Yields:
Generator yielding the next samples
"""
if has_annotation:
instance_filename = "instances_{}.json"
else:
instance_filename = "image_info_{}.json"
# Load the label names and images
instance_path = os.path.join(
annotation_dir,
"annotations",
instance_filename.format(split_type),
)
coco_annotation = CocoAnnotation(instance_path)
# Each category is a dict:
# {
# 'id': 51, # From 1-91, some entry missing
# 'name': 'bowl',
# 'supercategory': 'kitchen',
# }
categories = coco_annotation.categories
# Each image is a dict:
# {
# 'id': 262145,
# 'file_name': 'COCO_train2014_000000262145.jpg'
# 'flickr_url': 'http://farm8.staticflickr.com/7187/xyz.jpg',
# 'coco_url': 'http://images.cocodataset.org/train2014/xyz.jpg',
# 'license': 2,
# 'date_captured': '2013-11-20 02:07:55',
# 'height': 427,
# 'width': 640,
# }
images = coco_annotation.images
# TODO(b/121375022): ClassLabel names should also contains 'id' and
# and 'supercategory' (in addition to 'name')
# Warning: As Coco only use 80 out of the 91 labels, the c['id'] and
# dataset names ids won't match.
self.info.features["objects"]["label"].names = [
c["name"] for c in categories
]
# TODO(b/121375022): Conversion should be done by ClassLabel
categories_id2name = {c["id"]: c["name"] for c in categories}
# Iterate over all images
annotation_skipped = 0
for image_info in sorted(images, key=lambda x: x["id"]):
if has_annotation:
# Each instance annotation is a dict:
# {
# 'iscrowd': 0,
# 'bbox': [116.95, 305.86, 285.3, 266.03],
# 'image_id': 480023,
# 'segmentation': [[312.29, 562.89, 402.25, ...]],
# 'category_id': 58,
# 'area': 54652.9556,
# 'id': 86,
# }
instances = coco_annotation.get_annotations(img_id=image_info["id"])
else:
instances = [] # No annotations
if not instances:
annotation_skipped += 1
def build_bbox(x, y, width, height):
# pylint: disable=cell-var-from-loop
# build_bbox is only used within the loop so it is ok to use image_info
return tfds.features.BBox(
ymin=y / image_info["height"],
xmin=x / image_info["width"],
ymax=(y + height) / image_info["height"],
xmax=(x + width) / image_info["width"],
)
# pylint: enable=cell-var-from-loop
yield {
"image": os.path.join(image_dir, split_type, image_info["file_name"]),
"image/filename": image_info["file_name"],
"objects": [{
"bbox": build_bbox(*instance_info["bbox"]),
"label": categories_id2name[instance_info["category_id"]],
"is_crowd": bool(instance_info["iscrowd"]),
} for instance_info in instances],
}
logging.info(
"%d/%d images do not contains any annotations",
annotation_skipped,
len(images),
)
|
[
"def",
"_generate_examples",
"(",
"self",
",",
"image_dir",
",",
"annotation_dir",
",",
"split_type",
",",
"has_annotation",
"=",
"True",
")",
":",
"if",
"has_annotation",
":",
"instance_filename",
"=",
"\"instances_{}.json\"",
"else",
":",
"instance_filename",
"=",
"\"image_info_{}.json\"",
"# Load the label names and images",
"instance_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"annotation_dir",
",",
"\"annotations\"",
",",
"instance_filename",
".",
"format",
"(",
"split_type",
")",
",",
")",
"coco_annotation",
"=",
"CocoAnnotation",
"(",
"instance_path",
")",
"# Each category is a dict:",
"# {",
"# 'id': 51, # From 1-91, some entry missing",
"# 'name': 'bowl',",
"# 'supercategory': 'kitchen',",
"# }",
"categories",
"=",
"coco_annotation",
".",
"categories",
"# Each image is a dict:",
"# {",
"# 'id': 262145,",
"# 'file_name': 'COCO_train2014_000000262145.jpg'",
"# 'flickr_url': 'http://farm8.staticflickr.com/7187/xyz.jpg',",
"# 'coco_url': 'http://images.cocodataset.org/train2014/xyz.jpg',",
"# 'license': 2,",
"# 'date_captured': '2013-11-20 02:07:55',",
"# 'height': 427,",
"# 'width': 640,",
"# }",
"images",
"=",
"coco_annotation",
".",
"images",
"# TODO(b/121375022): ClassLabel names should also contains 'id' and",
"# and 'supercategory' (in addition to 'name')",
"# Warning: As Coco only use 80 out of the 91 labels, the c['id'] and",
"# dataset names ids won't match.",
"self",
".",
"info",
".",
"features",
"[",
"\"objects\"",
"]",
"[",
"\"label\"",
"]",
".",
"names",
"=",
"[",
"c",
"[",
"\"name\"",
"]",
"for",
"c",
"in",
"categories",
"]",
"# TODO(b/121375022): Conversion should be done by ClassLabel",
"categories_id2name",
"=",
"{",
"c",
"[",
"\"id\"",
"]",
":",
"c",
"[",
"\"name\"",
"]",
"for",
"c",
"in",
"categories",
"}",
"# Iterate over all images",
"annotation_skipped",
"=",
"0",
"for",
"image_info",
"in",
"sorted",
"(",
"images",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
"[",
"\"id\"",
"]",
")",
":",
"if",
"has_annotation",
":",
"# Each instance annotation is a dict:",
"# {",
"# 'iscrowd': 0,",
"# 'bbox': [116.95, 305.86, 285.3, 266.03],",
"# 'image_id': 480023,",
"# 'segmentation': [[312.29, 562.89, 402.25, ...]],",
"# 'category_id': 58,",
"# 'area': 54652.9556,",
"# 'id': 86,",
"# }",
"instances",
"=",
"coco_annotation",
".",
"get_annotations",
"(",
"img_id",
"=",
"image_info",
"[",
"\"id\"",
"]",
")",
"else",
":",
"instances",
"=",
"[",
"]",
"# No annotations",
"if",
"not",
"instances",
":",
"annotation_skipped",
"+=",
"1",
"def",
"build_bbox",
"(",
"x",
",",
"y",
",",
"width",
",",
"height",
")",
":",
"# pylint: disable=cell-var-from-loop",
"# build_bbox is only used within the loop so it is ok to use image_info",
"return",
"tfds",
".",
"features",
".",
"BBox",
"(",
"ymin",
"=",
"y",
"/",
"image_info",
"[",
"\"height\"",
"]",
",",
"xmin",
"=",
"x",
"/",
"image_info",
"[",
"\"width\"",
"]",
",",
"ymax",
"=",
"(",
"y",
"+",
"height",
")",
"/",
"image_info",
"[",
"\"height\"",
"]",
",",
"xmax",
"=",
"(",
"x",
"+",
"width",
")",
"/",
"image_info",
"[",
"\"width\"",
"]",
",",
")",
"# pylint: enable=cell-var-from-loop",
"yield",
"{",
"\"image\"",
":",
"os",
".",
"path",
".",
"join",
"(",
"image_dir",
",",
"split_type",
",",
"image_info",
"[",
"\"file_name\"",
"]",
")",
",",
"\"image/filename\"",
":",
"image_info",
"[",
"\"file_name\"",
"]",
",",
"\"objects\"",
":",
"[",
"{",
"\"bbox\"",
":",
"build_bbox",
"(",
"*",
"instance_info",
"[",
"\"bbox\"",
"]",
")",
",",
"\"label\"",
":",
"categories_id2name",
"[",
"instance_info",
"[",
"\"category_id\"",
"]",
"]",
",",
"\"is_crowd\"",
":",
"bool",
"(",
"instance_info",
"[",
"\"iscrowd\"",
"]",
")",
",",
"}",
"for",
"instance_info",
"in",
"instances",
"]",
",",
"}",
"logging",
".",
"info",
"(",
"\"%d/%d images do not contains any annotations\"",
",",
"annotation_skipped",
",",
"len",
"(",
"images",
")",
",",
")"
] | 34.147059 | 18.882353 |
def update(self, message, content, branch=None, committer=None,
author=None):
"""Update this file.
:param str message: (required), commit message to describe the update
:param str content: (required), content to update the file with
:param str branch: (optional), branch where the file exists.
Defaults to the default branch of the repository.
:param dict committer: (optional), if no information is given the
authenticated user's information will be used. You must specify
both a name and email.
:param dict author: (optional), if omitted this will be filled in with
committer information. If passed, you must specify both a name and
email.
:returns: :class:`Commit <github3.git.Commit>`
"""
if content and not isinstance(content, bytes):
raise ValueError( # (No coverage)
'content must be a bytes object') # (No coverage)
json = None
if message and content:
content = b64encode(content).decode('utf-8')
data = {'message': message, 'content': content, 'branch': branch,
'sha': self.sha,
'committer': validate_commmitter(committer),
'author': validate_commmitter(author)}
self._remove_none(data)
json = self._json(self._put(self._api, data=dumps(data)), 200)
if 'content' in json and 'commit' in json:
self.__init__(json['content'], self)
json = Commit(json['commit'], self)
return json
|
[
"def",
"update",
"(",
"self",
",",
"message",
",",
"content",
",",
"branch",
"=",
"None",
",",
"committer",
"=",
"None",
",",
"author",
"=",
"None",
")",
":",
"if",
"content",
"and",
"not",
"isinstance",
"(",
"content",
",",
"bytes",
")",
":",
"raise",
"ValueError",
"(",
"# (No coverage)",
"'content must be a bytes object'",
")",
"# (No coverage)",
"json",
"=",
"None",
"if",
"message",
"and",
"content",
":",
"content",
"=",
"b64encode",
"(",
"content",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"data",
"=",
"{",
"'message'",
":",
"message",
",",
"'content'",
":",
"content",
",",
"'branch'",
":",
"branch",
",",
"'sha'",
":",
"self",
".",
"sha",
",",
"'committer'",
":",
"validate_commmitter",
"(",
"committer",
")",
",",
"'author'",
":",
"validate_commmitter",
"(",
"author",
")",
"}",
"self",
".",
"_remove_none",
"(",
"data",
")",
"json",
"=",
"self",
".",
"_json",
"(",
"self",
".",
"_put",
"(",
"self",
".",
"_api",
",",
"data",
"=",
"dumps",
"(",
"data",
")",
")",
",",
"200",
")",
"if",
"'content'",
"in",
"json",
"and",
"'commit'",
"in",
"json",
":",
"self",
".",
"__init__",
"(",
"json",
"[",
"'content'",
"]",
",",
"self",
")",
"json",
"=",
"Commit",
"(",
"json",
"[",
"'commit'",
"]",
",",
"self",
")",
"return",
"json"
] | 47.323529 | 21.470588 |
def to_bytes(s):
'''Return s as a bytes type, using utf-8 encoding if necessary.
@param s string or bytes
@return bytes
'''
if isinstance(s, six.binary_type):
return s
if isinstance(s, six.string_types):
return s.encode('utf-8')
raise TypeError('want string or bytes, got {}', type(s))
|
[
"def",
"to_bytes",
"(",
"s",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"six",
".",
"binary_type",
")",
":",
"return",
"s",
"if",
"isinstance",
"(",
"s",
",",
"six",
".",
"string_types",
")",
":",
"return",
"s",
".",
"encode",
"(",
"'utf-8'",
")",
"raise",
"TypeError",
"(",
"'want string or bytes, got {}'",
",",
"type",
"(",
"s",
")",
")"
] | 32 | 17.4 |
def get_xref_graph(ont):
"""
Creates a basic graph object corresponding to a remote ontology
"""
g = networkx.MultiGraph()
for (c,x) in fetchall_xrefs(ont):
g.add_edge(c,x,source=c)
return g
|
[
"def",
"get_xref_graph",
"(",
"ont",
")",
":",
"g",
"=",
"networkx",
".",
"MultiGraph",
"(",
")",
"for",
"(",
"c",
",",
"x",
")",
"in",
"fetchall_xrefs",
"(",
"ont",
")",
":",
"g",
".",
"add_edge",
"(",
"c",
",",
"x",
",",
"source",
"=",
"c",
")",
"return",
"g"
] | 26.875 | 11.625 |
def serialize(self, value, entity=None, request=None):
""" Validate and serialize the value.
This is the default implementation
"""
ret = self.from_python(value)
self.validate(ret)
self.run_validators(value)
return ret
|
[
"def",
"serialize",
"(",
"self",
",",
"value",
",",
"entity",
"=",
"None",
",",
"request",
"=",
"None",
")",
":",
"ret",
"=",
"self",
".",
"from_python",
"(",
"value",
")",
"self",
".",
"validate",
"(",
"ret",
")",
"self",
".",
"run_validators",
"(",
"value",
")",
"return",
"ret"
] | 23.9 | 15.7 |
def append(self, position, array):
"""Append an array to the end of the map. The position
must be greater than any positions in the map"""
if not Gauged.map_append(self.ptr, position, array.ptr):
raise MemoryError
|
[
"def",
"append",
"(",
"self",
",",
"position",
",",
"array",
")",
":",
"if",
"not",
"Gauged",
".",
"map_append",
"(",
"self",
".",
"ptr",
",",
"position",
",",
"array",
".",
"ptr",
")",
":",
"raise",
"MemoryError"
] | 49 | 8.2 |
def write(self, message, autoerase=True):
"""Send something for stdout and erased after delay"""
super(Animation, self).write(message)
self.last_message = message
if autoerase:
time.sleep(self.interval)
self.erase(message)
|
[
"def",
"write",
"(",
"self",
",",
"message",
",",
"autoerase",
"=",
"True",
")",
":",
"super",
"(",
"Animation",
",",
"self",
")",
".",
"write",
"(",
"message",
")",
"self",
".",
"last_message",
"=",
"message",
"if",
"autoerase",
":",
"time",
".",
"sleep",
"(",
"self",
".",
"interval",
")",
"self",
".",
"erase",
"(",
"message",
")"
] | 38.857143 | 6 |
def Write(self, grr_message):
"""Write the message into the transaction log.
Args:
grr_message: A GrrMessage instance.
"""
grr_message = grr_message.SerializeToString()
try:
winreg.SetValueEx(_GetServiceKey(), "Transaction", 0, winreg.REG_BINARY,
grr_message)
self._synced = False
except OSError:
pass
|
[
"def",
"Write",
"(",
"self",
",",
"grr_message",
")",
":",
"grr_message",
"=",
"grr_message",
".",
"SerializeToString",
"(",
")",
"try",
":",
"winreg",
".",
"SetValueEx",
"(",
"_GetServiceKey",
"(",
")",
",",
"\"Transaction\"",
",",
"0",
",",
"winreg",
".",
"REG_BINARY",
",",
"grr_message",
")",
"self",
".",
"_synced",
"=",
"False",
"except",
"OSError",
":",
"pass"
] | 27.846154 | 17.769231 |
def _homogenize_waves(wave_a, wave_b):
"""
Generate combined independent variable vector.
The combination is from two waveforms and the (possibly interpolated)
dependent variable vectors of these two waveforms
"""
indep_vector = _get_indep_vector(wave_a, wave_b)
dep_vector_a = _interp_dep_vector(wave_a, indep_vector)
dep_vector_b = _interp_dep_vector(wave_b, indep_vector)
return (indep_vector, dep_vector_a, dep_vector_b)
|
[
"def",
"_homogenize_waves",
"(",
"wave_a",
",",
"wave_b",
")",
":",
"indep_vector",
"=",
"_get_indep_vector",
"(",
"wave_a",
",",
"wave_b",
")",
"dep_vector_a",
"=",
"_interp_dep_vector",
"(",
"wave_a",
",",
"indep_vector",
")",
"dep_vector_b",
"=",
"_interp_dep_vector",
"(",
"wave_b",
",",
"indep_vector",
")",
"return",
"(",
"indep_vector",
",",
"dep_vector_a",
",",
"dep_vector_b",
")"
] | 41 | 14.636364 |
def ensure_tuple(obj):
"""Try and make the given argument into a tuple."""
if obj is None:
return tuple()
if isinstance(obj, Iterable) and not isinstance(obj, six.string_types):
return tuple(obj)
return obj,
|
[
"def",
"ensure_tuple",
"(",
"obj",
")",
":",
"if",
"obj",
"is",
"None",
":",
"return",
"tuple",
"(",
")",
"if",
"isinstance",
"(",
"obj",
",",
"Iterable",
")",
"and",
"not",
"isinstance",
"(",
"obj",
",",
"six",
".",
"string_types",
")",
":",
"return",
"tuple",
"(",
"obj",
")",
"return",
"obj",
","
] | 33.285714 | 18.857143 |
def _lincomb(self, a, x1, b, x2, out):
"""Raw linear combination."""
self.tspace._lincomb(a, x1.tensor, b, x2.tensor, out.tensor)
|
[
"def",
"_lincomb",
"(",
"self",
",",
"a",
",",
"x1",
",",
"b",
",",
"x2",
",",
"out",
")",
":",
"self",
".",
"tspace",
".",
"_lincomb",
"(",
"a",
",",
"x1",
".",
"tensor",
",",
"b",
",",
"x2",
".",
"tensor",
",",
"out",
".",
"tensor",
")"
] | 47.666667 | 10 |
def natural_sorted(l):
""" sorts a sortable in human order (0 < 20 < 100) """
ll = copy(l)
ll.sort(key=_natural_keys)
return ll
|
[
"def",
"natural_sorted",
"(",
"l",
")",
":",
"ll",
"=",
"copy",
"(",
"l",
")",
"ll",
".",
"sort",
"(",
"key",
"=",
"_natural_keys",
")",
"return",
"ll"
] | 27.8 | 15.8 |
def _es_margin(settings):
"""
Extract margin formating related subset of widget settings.
"""
return {k: settings[k] for k in (ConsoleWidget.SETTING_MARGIN,
ConsoleWidget.SETTING_MARGIN_LEFT,
ConsoleWidget.SETTING_MARGIN_RIGHT,
ConsoleWidget.SETTING_MARGIN_CHAR)}
|
[
"def",
"_es_margin",
"(",
"settings",
")",
":",
"return",
"{",
"k",
":",
"settings",
"[",
"k",
"]",
"for",
"k",
"in",
"(",
"ConsoleWidget",
".",
"SETTING_MARGIN",
",",
"ConsoleWidget",
".",
"SETTING_MARGIN_LEFT",
",",
"ConsoleWidget",
".",
"SETTING_MARGIN_RIGHT",
",",
"ConsoleWidget",
".",
"SETTING_MARGIN_CHAR",
")",
"}"
] | 51.375 | 22.375 |
def get_time_variables(ds):
'''
Returns a list of variables describing the time coordinate
:param netCDF4.Dataset ds: An open netCDF4 Dataset
'''
time_variables = set()
for variable in ds.get_variables_by_attributes(standard_name='time'):
time_variables.add(variable.name)
for variable in ds.get_variables_by_attributes(axis='T'):
if variable.name not in time_variables:
time_variables.add(variable.name)
regx = r'^(?:day|d|hour|hr|h|minute|min|second|s)s? since .*$'
for variable in ds.get_variables_by_attributes(units=lambda x: isinstance(x, basestring)):
if re.match(regx, variable.units) and variable.name not in time_variables:
time_variables.add(variable.name)
return time_variables
|
[
"def",
"get_time_variables",
"(",
"ds",
")",
":",
"time_variables",
"=",
"set",
"(",
")",
"for",
"variable",
"in",
"ds",
".",
"get_variables_by_attributes",
"(",
"standard_name",
"=",
"'time'",
")",
":",
"time_variables",
".",
"add",
"(",
"variable",
".",
"name",
")",
"for",
"variable",
"in",
"ds",
".",
"get_variables_by_attributes",
"(",
"axis",
"=",
"'T'",
")",
":",
"if",
"variable",
".",
"name",
"not",
"in",
"time_variables",
":",
"time_variables",
".",
"add",
"(",
"variable",
".",
"name",
")",
"regx",
"=",
"r'^(?:day|d|hour|hr|h|minute|min|second|s)s? since .*$'",
"for",
"variable",
"in",
"ds",
".",
"get_variables_by_attributes",
"(",
"units",
"=",
"lambda",
"x",
":",
"isinstance",
"(",
"x",
",",
"basestring",
")",
")",
":",
"if",
"re",
".",
"match",
"(",
"regx",
",",
"variable",
".",
"units",
")",
"and",
"variable",
".",
"name",
"not",
"in",
"time_variables",
":",
"time_variables",
".",
"add",
"(",
"variable",
".",
"name",
")",
"return",
"time_variables"
] | 38.1 | 24.9 |
def getStore(self) :
"""get the inner store as dictionary"""
res = {}
res.update(self.store)
for k, v in self.subStores.items() :
res[k] = v.getStore()
return res
|
[
"def",
"getStore",
"(",
"self",
")",
":",
"res",
"=",
"{",
"}",
"res",
".",
"update",
"(",
"self",
".",
"store",
")",
"for",
"k",
",",
"v",
"in",
"self",
".",
"subStores",
".",
"items",
"(",
")",
":",
"res",
"[",
"k",
"]",
"=",
"v",
".",
"getStore",
"(",
")",
"return",
"res"
] | 27 | 14.875 |
def delete(self, ids):
"""
Method to delete ipv4's by their ids
:param ids: Identifiers of ipv4's
:return: None
"""
url = build_uri_with_ids('api/v4/ipv4/%s/', ids)
return super(ApiV4IPv4, self).delete(url)
|
[
"def",
"delete",
"(",
"self",
",",
"ids",
")",
":",
"url",
"=",
"build_uri_with_ids",
"(",
"'api/v4/ipv4/%s/'",
",",
"ids",
")",
"return",
"super",
"(",
"ApiV4IPv4",
",",
"self",
")",
".",
"delete",
"(",
"url",
")"
] | 25.5 | 14.7 |
def retryable_http_error( e ):
"""
Determine if an error encountered during an HTTP download is likely to go away if we try again.
"""
if isinstance( e, urllib.error.HTTPError ) and e.code in ('503', '408', '500'):
# The server returned one of:
# 503 Service Unavailable
# 408 Request Timeout
# 500 Internal Server Error
return True
if isinstance( e, BadStatusLine ):
# The server didn't return a valid response at all
return True
return False
|
[
"def",
"retryable_http_error",
"(",
"e",
")",
":",
"if",
"isinstance",
"(",
"e",
",",
"urllib",
".",
"error",
".",
"HTTPError",
")",
"and",
"e",
".",
"code",
"in",
"(",
"'503'",
",",
"'408'",
",",
"'500'",
")",
":",
"# The server returned one of:",
"# 503 Service Unavailable",
"# 408 Request Timeout",
"# 500 Internal Server Error",
"return",
"True",
"if",
"isinstance",
"(",
"e",
",",
"BadStatusLine",
")",
":",
"# The server didn't return a valid response at all",
"return",
"True",
"return",
"False"
] | 36.428571 | 16 |
def from_fp(self, file_pointer, comment_lead=['c']):
"""
Read a CNF+ formula from a file pointer. A file pointer should be
specified as an argument. The only default argument is
``comment_lead``, which can be used for parsing specific comment
lines.
:param file_pointer: a file pointer to read the formula from.
:param comment_lead: a list of characters leading comment lines
:type file_pointer: file pointer
:type comment_lead: list(str)
Usage example:
.. code-block:: python
>>> with open('some-file.cnf+', 'r') as fp:
... cnf1 = CNFPlus()
... cnf1.from_fp(fp)
>>>
>>> with open('another-file.cnf+', 'r') as fp:
... cnf2 = CNFPlus(from_fp=fp)
"""
self.nv = 0
self.clauses = []
self.atmosts = []
self.comments = []
comment_lead = tuple('p') + tuple(comment_lead)
for line in file_pointer:
line = line.strip()
if line:
if line[0] not in comment_lead:
if line[-1] == '0': # normal clause
cl = [int(l) for l in line.split()[:-1]]
self.nv = max([abs(l) for l in cl] + [self.nv])
self.clauses.append(cl)
else: # atmost/atleast constraint
items = [i for i in line.split()]
lits = [int(l) for l in items[:-2]]
rhs = int(items[-1])
self.nv = max([abs(l) for l in lits] + [self.nv])
if items[-2][0] == '>':
lits = list(map(lambda l: -l, lits))
rhs = len(lits) - rhs
self.atmosts.append([lits, rhs])
elif not line.startswith('p cnf+ '):
self.comments.append(line)
|
[
"def",
"from_fp",
"(",
"self",
",",
"file_pointer",
",",
"comment_lead",
"=",
"[",
"'c'",
"]",
")",
":",
"self",
".",
"nv",
"=",
"0",
"self",
".",
"clauses",
"=",
"[",
"]",
"self",
".",
"atmosts",
"=",
"[",
"]",
"self",
".",
"comments",
"=",
"[",
"]",
"comment_lead",
"=",
"tuple",
"(",
"'p'",
")",
"+",
"tuple",
"(",
"comment_lead",
")",
"for",
"line",
"in",
"file_pointer",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"line",
":",
"if",
"line",
"[",
"0",
"]",
"not",
"in",
"comment_lead",
":",
"if",
"line",
"[",
"-",
"1",
"]",
"==",
"'0'",
":",
"# normal clause",
"cl",
"=",
"[",
"int",
"(",
"l",
")",
"for",
"l",
"in",
"line",
".",
"split",
"(",
")",
"[",
":",
"-",
"1",
"]",
"]",
"self",
".",
"nv",
"=",
"max",
"(",
"[",
"abs",
"(",
"l",
")",
"for",
"l",
"in",
"cl",
"]",
"+",
"[",
"self",
".",
"nv",
"]",
")",
"self",
".",
"clauses",
".",
"append",
"(",
"cl",
")",
"else",
":",
"# atmost/atleast constraint",
"items",
"=",
"[",
"i",
"for",
"i",
"in",
"line",
".",
"split",
"(",
")",
"]",
"lits",
"=",
"[",
"int",
"(",
"l",
")",
"for",
"l",
"in",
"items",
"[",
":",
"-",
"2",
"]",
"]",
"rhs",
"=",
"int",
"(",
"items",
"[",
"-",
"1",
"]",
")",
"self",
".",
"nv",
"=",
"max",
"(",
"[",
"abs",
"(",
"l",
")",
"for",
"l",
"in",
"lits",
"]",
"+",
"[",
"self",
".",
"nv",
"]",
")",
"if",
"items",
"[",
"-",
"2",
"]",
"[",
"0",
"]",
"==",
"'>'",
":",
"lits",
"=",
"list",
"(",
"map",
"(",
"lambda",
"l",
":",
"-",
"l",
",",
"lits",
")",
")",
"rhs",
"=",
"len",
"(",
"lits",
")",
"-",
"rhs",
"self",
".",
"atmosts",
".",
"append",
"(",
"[",
"lits",
",",
"rhs",
"]",
")",
"elif",
"not",
"line",
".",
"startswith",
"(",
"'p cnf+ '",
")",
":",
"self",
".",
"comments",
".",
"append",
"(",
"line",
")"
] | 37.622642 | 20 |
def update_hook(self, auth, repo_name, hook_id, update, organization=None):
"""
Updates hook with id ``hook_id`` according to ``update``.
:param auth.Authentication auth: authentication object
:param str repo_name: repo of the hook to update
:param int hook_id: id of the hook to update
:param GogsHookUpdate update: a ``GogsHookUpdate`` object describing the requested update
:param str organization: name of associated organization, if applicable
:return: the updated hook
:rtype: GogsRepo.Hook
:raises NetworkFailure: if there is an error communicating with the server
:raises ApiFailure: if the request cannot be serviced
"""
if organization is not None:
path = "/repos/{o}/{r}/hooks/{i}".format(o=organization, r=repo_name, i=hook_id)
else:
path = "/repos/{r}/hooks/{i}".format(r=repo_name, i=hook_id)
response = self._patch(path, auth=auth, data=update.as_dict())
return GogsRepo.Hook.from_json(response.json())
|
[
"def",
"update_hook",
"(",
"self",
",",
"auth",
",",
"repo_name",
",",
"hook_id",
",",
"update",
",",
"organization",
"=",
"None",
")",
":",
"if",
"organization",
"is",
"not",
"None",
":",
"path",
"=",
"\"/repos/{o}/{r}/hooks/{i}\"",
".",
"format",
"(",
"o",
"=",
"organization",
",",
"r",
"=",
"repo_name",
",",
"i",
"=",
"hook_id",
")",
"else",
":",
"path",
"=",
"\"/repos/{r}/hooks/{i}\"",
".",
"format",
"(",
"r",
"=",
"repo_name",
",",
"i",
"=",
"hook_id",
")",
"response",
"=",
"self",
".",
"_patch",
"(",
"path",
",",
"auth",
"=",
"auth",
",",
"data",
"=",
"update",
".",
"as_dict",
"(",
")",
")",
"return",
"GogsRepo",
".",
"Hook",
".",
"from_json",
"(",
"response",
".",
"json",
"(",
")",
")"
] | 52.55 | 24.35 |
def spur(image, mask=None, iterations=1):
'''Remove spur pixels from an image
0 0 0 0 0 0
0 1 0 -> 0 0 0
0 0 1 0 0 ?
'''
global spur_table_1,spur_table_2
if mask is None:
masked_image = image
else:
masked_image = image.astype(bool).copy()
masked_image[~mask] = False
index_i, index_j, masked_image = prepare_for_index_lookup(masked_image,
False)
if iterations is None:
iterations = len(index_i)
for i in range(iterations):
for table in (spur_table_1, spur_table_2):
index_i, index_j = index_lookup(index_i, index_j,
masked_image, table, 1)
masked_image = extract_from_image_lookup(image, index_i, index_j)
if not mask is None:
masked_image[~mask] = image[~mask]
return masked_image
|
[
"def",
"spur",
"(",
"image",
",",
"mask",
"=",
"None",
",",
"iterations",
"=",
"1",
")",
":",
"global",
"spur_table_1",
",",
"spur_table_2",
"if",
"mask",
"is",
"None",
":",
"masked_image",
"=",
"image",
"else",
":",
"masked_image",
"=",
"image",
".",
"astype",
"(",
"bool",
")",
".",
"copy",
"(",
")",
"masked_image",
"[",
"~",
"mask",
"]",
"=",
"False",
"index_i",
",",
"index_j",
",",
"masked_image",
"=",
"prepare_for_index_lookup",
"(",
"masked_image",
",",
"False",
")",
"if",
"iterations",
"is",
"None",
":",
"iterations",
"=",
"len",
"(",
"index_i",
")",
"for",
"i",
"in",
"range",
"(",
"iterations",
")",
":",
"for",
"table",
"in",
"(",
"spur_table_1",
",",
"spur_table_2",
")",
":",
"index_i",
",",
"index_j",
"=",
"index_lookup",
"(",
"index_i",
",",
"index_j",
",",
"masked_image",
",",
"table",
",",
"1",
")",
"masked_image",
"=",
"extract_from_image_lookup",
"(",
"image",
",",
"index_i",
",",
"index_j",
")",
"if",
"not",
"mask",
"is",
"None",
":",
"masked_image",
"[",
"~",
"mask",
"]",
"=",
"image",
"[",
"~",
"mask",
"]",
"return",
"masked_image"
] | 35.68 | 17.36 |
def profile(self, query_id, timeout=10):
"""
Get the profile of the query that has the given queryid.
:param query_id: The UUID of the query in standard UUID format that Drill assigns to each query.
:param timeout: int
:return: pydrill.client.Result
"""
result = Result(*self.perform_request(**{
'method': 'GET',
'url': '/profiles/{0}.json'.format(query_id),
'params': {
'request_timeout': timeout
}
}))
return result
|
[
"def",
"profile",
"(",
"self",
",",
"query_id",
",",
"timeout",
"=",
"10",
")",
":",
"result",
"=",
"Result",
"(",
"*",
"self",
".",
"perform_request",
"(",
"*",
"*",
"{",
"'method'",
":",
"'GET'",
",",
"'url'",
":",
"'/profiles/{0}.json'",
".",
"format",
"(",
"query_id",
")",
",",
"'params'",
":",
"{",
"'request_timeout'",
":",
"timeout",
"}",
"}",
")",
")",
"return",
"result"
] | 33.6875 | 17.1875 |
def get_message(zelf):
"""
get one message if available else return None
if message is available returns the result of handler(message)
does not block!
if you would like to call your handler manually, this is the way to
go. don't pass in a handler to Listener() and the default handler will
log and return the message for your own manual processing
"""
try:
message = zelf.r.master.rpoplpush(zelf.lijst, zelf._processing)
if message:
# NOTE(tr3buchet): got a message, process it
LOG.debug('received: |%s|' % message)
return zelf._call_handler(message)
except zelf.r.generic_error:
LOG.exception('')
|
[
"def",
"get_message",
"(",
"zelf",
")",
":",
"try",
":",
"message",
"=",
"zelf",
".",
"r",
".",
"master",
".",
"rpoplpush",
"(",
"zelf",
".",
"lijst",
",",
"zelf",
".",
"_processing",
")",
"if",
"message",
":",
"# NOTE(tr3buchet): got a message, process it",
"LOG",
".",
"debug",
"(",
"'received: |%s|'",
"%",
"message",
")",
"return",
"zelf",
".",
"_call_handler",
"(",
"message",
")",
"except",
"zelf",
".",
"r",
".",
"generic_error",
":",
"LOG",
".",
"exception",
"(",
"''",
")"
] | 41.444444 | 19.666667 |
def _setChoiceDict(self):
"""Create dictionary for choice list"""
# value is name of choice parameter (same as key)
self.choiceDict = {}
for c in self.choice: self.choiceDict[c] = c
|
[
"def",
"_setChoiceDict",
"(",
"self",
")",
":",
"# value is name of choice parameter (same as key)",
"self",
".",
"choiceDict",
"=",
"{",
"}",
"for",
"c",
"in",
"self",
".",
"choice",
":",
"self",
".",
"choiceDict",
"[",
"c",
"]",
"=",
"c"
] | 41.8 | 11.2 |
def figure(self,forceNew=False):
"""make sure a figure is ready."""
if plt._pylab_helpers.Gcf.get_num_fig_managers()>0 and forceNew is False:
self.log.debug("figure already seen, not creating one.")
return
if self.subplot:
self.log.debug("subplot mode enabled, not creating new figure")
else:
self.log.debug("creating new figure")
plt.figure(figsize=(self.figure_width,self.figure_height))
|
[
"def",
"figure",
"(",
"self",
",",
"forceNew",
"=",
"False",
")",
":",
"if",
"plt",
".",
"_pylab_helpers",
".",
"Gcf",
".",
"get_num_fig_managers",
"(",
")",
">",
"0",
"and",
"forceNew",
"is",
"False",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"figure already seen, not creating one.\"",
")",
"return",
"if",
"self",
".",
"subplot",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"subplot mode enabled, not creating new figure\"",
")",
"else",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"creating new figure\"",
")",
"plt",
".",
"figure",
"(",
"figsize",
"=",
"(",
"self",
".",
"figure_width",
",",
"self",
".",
"figure_height",
")",
")"
] | 42.909091 | 23.272727 |
def mnl_utility_transform(sys_utility_array, *args, **kwargs):
"""
Parameters
----------
sys_utility_array : ndarray.
Should have 1D or 2D. Should have been created by the dot product of a
design matrix and an array of index coefficients.
Returns
-------
systematic_utilities : 2D ndarray.
The input systematic utilities. If `sys_utility_array` is 2D, then
`sys_utility_array` is returned. Else, returns
`sys_utility_array[:, None]`.
"""
# Return a 2D array of systematic utility values
if len(sys_utility_array.shape) == 1:
systematic_utilities = sys_utility_array[:, np.newaxis]
else:
systematic_utilities = sys_utility_array
return systematic_utilities
|
[
"def",
"mnl_utility_transform",
"(",
"sys_utility_array",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Return a 2D array of systematic utility values",
"if",
"len",
"(",
"sys_utility_array",
".",
"shape",
")",
"==",
"1",
":",
"systematic_utilities",
"=",
"sys_utility_array",
"[",
":",
",",
"np",
".",
"newaxis",
"]",
"else",
":",
"systematic_utilities",
"=",
"sys_utility_array",
"return",
"systematic_utilities"
] | 34.363636 | 19 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.