text
stringlengths 89
104k
| code_tokens
list | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def wait_until_alert_is_present(self, timeout=None):
"""
Waits for an alert to be present
@type timeout: int
@param timeout: the maximum number of seconds the driver will wait before timing out
@rtype: webdriverwrapper.WebElementWrapper
@return: Returns the element found
"""
timeout = timeout if timeout is not None else self.timeout
locator = None
def wait():
'''
Wait function passed to executor
'''
return WebDriverWait(self.driver, timeout).until(EC.alert_is_present())
return self.execute_and_handle_webdriver_exceptions(
wait, timeout, locator, 'Timeout waiting for alert to be present')
|
[
"def",
"wait_until_alert_is_present",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"timeout",
"=",
"timeout",
"if",
"timeout",
"is",
"not",
"None",
"else",
"self",
".",
"timeout",
"locator",
"=",
"None",
"def",
"wait",
"(",
")",
":",
"'''\n Wait function passed to executor\n '''",
"return",
"WebDriverWait",
"(",
"self",
".",
"driver",
",",
"timeout",
")",
".",
"until",
"(",
"EC",
".",
"alert_is_present",
"(",
")",
")",
"return",
"self",
".",
"execute_and_handle_webdriver_exceptions",
"(",
"wait",
",",
"timeout",
",",
"locator",
",",
"'Timeout waiting for alert to be present'",
")"
] | 36.857143 | 23.809524 |
def settings(**kwargs):
"""
Generally, this will automatically be added to a newly initialized
:class:`phoebe.frontend.bundle.Bundle`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s
"""
params = []
params += [StringParameter(qualifier='phoebe_version', value=kwargs.get('phoebe_version', __version__), description='Version of PHOEBE - change with caution')]
params += [BoolParameter(qualifier='log_history', value=kwargs.get('log_history', False), description='Whether to log history (undo/redo)')]
params += [DictParameter(qualifier='dict_filter', value=kwargs.get('dict_filter', {}), description='Filters to use when using dictionary access')]
params += [BoolParameter(qualifier='dict_set_all', value=kwargs.get('dict_set_all', False), description='Whether to set all values for dictionary access that returns more than 1 result')]
# params += [ChoiceParameter(qualifier='plotting_backend', value=kwargs.get('plotting_backend', 'mpl'), choices=['mpl', 'mpld3', 'mpl2bokeh', 'bokeh'] if conf.devel else ['mpl'], description='Default backend to use for plotting')]
# problem with try_sympy parameter: it can't be used during initialization... so this may need to be a phoebe-level setting
# params += [BoolParameter(qualifier='try_sympy', value=kwargs.get('try_sympy', True), description='Whether to use sympy if installed for constraints')]
# This could be complicated - because then we'll have to specifically watch to see when its enabled and then run all constraints - not sure if that is worth the time savings
# params += [BoolParameter(qualifier='run_constraints', value=kwargs.get('run_constraints', True), description='Whether to run_constraints whenever a parameter changes (warning: turning off will disable constraints until enabled at which point all constraints will be run)')]
return ParameterSet(params)
|
[
"def",
"settings",
"(",
"*",
"*",
"kwargs",
")",
":",
"params",
"=",
"[",
"]",
"params",
"+=",
"[",
"StringParameter",
"(",
"qualifier",
"=",
"'phoebe_version'",
",",
"value",
"=",
"kwargs",
".",
"get",
"(",
"'phoebe_version'",
",",
"__version__",
")",
",",
"description",
"=",
"'Version of PHOEBE - change with caution'",
")",
"]",
"params",
"+=",
"[",
"BoolParameter",
"(",
"qualifier",
"=",
"'log_history'",
",",
"value",
"=",
"kwargs",
".",
"get",
"(",
"'log_history'",
",",
"False",
")",
",",
"description",
"=",
"'Whether to log history (undo/redo)'",
")",
"]",
"params",
"+=",
"[",
"DictParameter",
"(",
"qualifier",
"=",
"'dict_filter'",
",",
"value",
"=",
"kwargs",
".",
"get",
"(",
"'dict_filter'",
",",
"{",
"}",
")",
",",
"description",
"=",
"'Filters to use when using dictionary access'",
")",
"]",
"params",
"+=",
"[",
"BoolParameter",
"(",
"qualifier",
"=",
"'dict_set_all'",
",",
"value",
"=",
"kwargs",
".",
"get",
"(",
"'dict_set_all'",
",",
"False",
")",
",",
"description",
"=",
"'Whether to set all values for dictionary access that returns more than 1 result'",
")",
"]",
"# params += [ChoiceParameter(qualifier='plotting_backend', value=kwargs.get('plotting_backend', 'mpl'), choices=['mpl', 'mpld3', 'mpl2bokeh', 'bokeh'] if conf.devel else ['mpl'], description='Default backend to use for plotting')]",
"# problem with try_sympy parameter: it can't be used during initialization... so this may need to be a phoebe-level setting",
"# params += [BoolParameter(qualifier='try_sympy', value=kwargs.get('try_sympy', True), description='Whether to use sympy if installed for constraints')]",
"# This could be complicated - because then we'll have to specifically watch to see when its enabled and then run all constraints - not sure if that is worth the time savings",
"# params += [BoolParameter(qualifier='run_constraints', value=kwargs.get('run_constraints', True), description='Whether to run_constraints whenever a parameter changes (warning: turning off will disable constraints until enabled at which point all constraints will be run)')]",
"return",
"ParameterSet",
"(",
"params",
")"
] | 78.115385 | 66.115385 |
def _which_ip_protocol(element):
"""
Validate the protocol addresses for the element. Most elements can
have an IPv4 or IPv6 address assigned on the same element. This
allows elements to be validated and placed on the right network.
:return: boolean tuple
:rtype: tuple(ipv4, ipv6)
"""
try:
if element.typeof in ('host', 'router'):
return getattr(element, 'address', False), getattr(element, 'ipv6_address', False)
elif element.typeof == 'netlink':
gateway = element.gateway
if gateway.typeof == 'router':
return getattr(gateway, 'address', False), getattr(gateway, 'ipv6_address', False)
# It's an engine, return true
elif element.typeof == 'network':
return getattr(element, 'ipv4_network', False), getattr(element, 'ipv6_network', False)
except AttributeError:
pass
# Always return true so that the calling function assumes the element
# is valid for the routing node. This could fail when submitting but
# we don't want to prevent adding elements yet since this could change
return True, True
|
[
"def",
"_which_ip_protocol",
"(",
"element",
")",
":",
"try",
":",
"if",
"element",
".",
"typeof",
"in",
"(",
"'host'",
",",
"'router'",
")",
":",
"return",
"getattr",
"(",
"element",
",",
"'address'",
",",
"False",
")",
",",
"getattr",
"(",
"element",
",",
"'ipv6_address'",
",",
"False",
")",
"elif",
"element",
".",
"typeof",
"==",
"'netlink'",
":",
"gateway",
"=",
"element",
".",
"gateway",
"if",
"gateway",
".",
"typeof",
"==",
"'router'",
":",
"return",
"getattr",
"(",
"gateway",
",",
"'address'",
",",
"False",
")",
",",
"getattr",
"(",
"gateway",
",",
"'ipv6_address'",
",",
"False",
")",
"# It's an engine, return true",
"elif",
"element",
".",
"typeof",
"==",
"'network'",
":",
"return",
"getattr",
"(",
"element",
",",
"'ipv4_network'",
",",
"False",
")",
",",
"getattr",
"(",
"element",
",",
"'ipv6_network'",
",",
"False",
")",
"except",
"AttributeError",
":",
"pass",
"# Always return true so that the calling function assumes the element",
"# is valid for the routing node. This could fail when submitting but",
"# we don't want to prevent adding elements yet since this could change",
"return",
"True",
",",
"True"
] | 45.48 | 21.32 |
def generate(self):
"""generate tar file
..Usage::
>>> tarfile = b"".join(data for data in tg.generate())
"""
if self._tar_buffer.tell():
self._tar_buffer.seek(0, 0)
yield self._tar_buffer.read()
for fname in self._files_to_add:
last = self._tar_buffer.tell()
self._tar_obj.add(fname)
self._tar_buffer.seek(last, os.SEEK_SET)
data = self._tar_buffer.read()
yield data
for info, content in self._ios_to_add.items():
last = self._tar_buffer.tell()
self._tar_obj.addfile(info, content)
self._tar_buffer.seek(last, os.SEEK_SET)
data = self._tar_buffer.read()
yield data
self._tar_obj.close()
yield self._tar_buffer.read()
self._generated = True
|
[
"def",
"generate",
"(",
"self",
")",
":",
"if",
"self",
".",
"_tar_buffer",
".",
"tell",
"(",
")",
":",
"self",
".",
"_tar_buffer",
".",
"seek",
"(",
"0",
",",
"0",
")",
"yield",
"self",
".",
"_tar_buffer",
".",
"read",
"(",
")",
"for",
"fname",
"in",
"self",
".",
"_files_to_add",
":",
"last",
"=",
"self",
".",
"_tar_buffer",
".",
"tell",
"(",
")",
"self",
".",
"_tar_obj",
".",
"add",
"(",
"fname",
")",
"self",
".",
"_tar_buffer",
".",
"seek",
"(",
"last",
",",
"os",
".",
"SEEK_SET",
")",
"data",
"=",
"self",
".",
"_tar_buffer",
".",
"read",
"(",
")",
"yield",
"data",
"for",
"info",
",",
"content",
"in",
"self",
".",
"_ios_to_add",
".",
"items",
"(",
")",
":",
"last",
"=",
"self",
".",
"_tar_buffer",
".",
"tell",
"(",
")",
"self",
".",
"_tar_obj",
".",
"addfile",
"(",
"info",
",",
"content",
")",
"self",
".",
"_tar_buffer",
".",
"seek",
"(",
"last",
",",
"os",
".",
"SEEK_SET",
")",
"data",
"=",
"self",
".",
"_tar_buffer",
".",
"read",
"(",
")",
"yield",
"data",
"self",
".",
"_tar_obj",
".",
"close",
"(",
")",
"yield",
"self",
".",
"_tar_buffer",
".",
"read",
"(",
")",
"self",
".",
"_generated",
"=",
"True"
] | 30.214286 | 14.107143 |
def _parse_args(args):
"""
Interpret command line arguments.
:param args: `sys.argv`
:return: The populated argparse namespace.
"""
parser = argparse.ArgumentParser(prog='nibble',
description='Speed, distance and time '
'calculations around '
'quantities of digital '
'information.')
parser.add_argument('-V', '--version',
action='version',
version='%(prog)s ' + nibble.__version__)
parser.add_argument('-v', '--verbosity',
help='increase output verbosity',
action='count',
default=0)
parser.add_argument('expression',
type=util.decode_cli_arg,
nargs='+',
help='the calculation to execute')
return parser.parse_args(args[1:])
|
[
"def",
"_parse_args",
"(",
"args",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"prog",
"=",
"'nibble'",
",",
"description",
"=",
"'Speed, distance and time '",
"'calculations around '",
"'quantities of digital '",
"'information.'",
")",
"parser",
".",
"add_argument",
"(",
"'-V'",
",",
"'--version'",
",",
"action",
"=",
"'version'",
",",
"version",
"=",
"'%(prog)s '",
"+",
"nibble",
".",
"__version__",
")",
"parser",
".",
"add_argument",
"(",
"'-v'",
",",
"'--verbosity'",
",",
"help",
"=",
"'increase output verbosity'",
",",
"action",
"=",
"'count'",
",",
"default",
"=",
"0",
")",
"parser",
".",
"add_argument",
"(",
"'expression'",
",",
"type",
"=",
"util",
".",
"decode_cli_arg",
",",
"nargs",
"=",
"'+'",
",",
"help",
"=",
"'the calculation to execute'",
")",
"return",
"parser",
".",
"parse_args",
"(",
"args",
"[",
"1",
":",
"]",
")"
] | 40.76 | 13.96 |
def allocate_tcp_port():
"""Return an (integer) available TCP port on localhost. This briefly
listens on the port in question, then closes it right away."""
# We want to bind() the socket but not listen(). Twisted (in
# tcp.Port.createInternetSocket) would do several other things:
# non-blocking, close-on-exec, and SO_REUSEADDR. We don't need
# non-blocking because we never listen on it, and we don't need
# close-on-exec because we close it right away. So just add SO_REUSEADDR.
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if platformType == "posix" and sys.platform != "cygwin":
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(("127.0.0.1", 0))
port = s.getsockname()[1]
s.close()
return port
|
[
"def",
"allocate_tcp_port",
"(",
")",
":",
"# We want to bind() the socket but not listen(). Twisted (in",
"# tcp.Port.createInternetSocket) would do several other things:",
"# non-blocking, close-on-exec, and SO_REUSEADDR. We don't need",
"# non-blocking because we never listen on it, and we don't need",
"# close-on-exec because we close it right away. So just add SO_REUSEADDR.",
"s",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_STREAM",
")",
"if",
"platformType",
"==",
"\"posix\"",
"and",
"sys",
".",
"platform",
"!=",
"\"cygwin\"",
":",
"s",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEADDR",
",",
"1",
")",
"s",
".",
"bind",
"(",
"(",
"\"127.0.0.1\"",
",",
"0",
")",
")",
"port",
"=",
"s",
".",
"getsockname",
"(",
")",
"[",
"1",
"]",
"s",
".",
"close",
"(",
")",
"return",
"port"
] | 51.2 | 19.466667 |
def list_all_table_rate_rules(cls, **kwargs):
"""List TableRateRules
Return a list of TableRateRules
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_table_rate_rules(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[TableRateRule]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._list_all_table_rate_rules_with_http_info(**kwargs)
else:
(data) = cls._list_all_table_rate_rules_with_http_info(**kwargs)
return data
|
[
"def",
"list_all_table_rate_rules",
"(",
"cls",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"cls",
".",
"_list_all_table_rate_rules_with_http_info",
"(",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"cls",
".",
"_list_all_table_rate_rules_with_http_info",
"(",
"*",
"*",
"kwargs",
")",
"return",
"data"
] | 38.347826 | 15.26087 |
def xtob(data, sep=''):
"""Interpret the hex encoding of a blob (string)."""
# remove the non-hex characters
data = re.sub("[^0-9a-fA-F]", '', data)
# interpret the hex
return binascii.unhexlify(data)
|
[
"def",
"xtob",
"(",
"data",
",",
"sep",
"=",
"''",
")",
":",
"# remove the non-hex characters",
"data",
"=",
"re",
".",
"sub",
"(",
"\"[^0-9a-fA-F]\"",
",",
"''",
",",
"data",
")",
"# interpret the hex",
"return",
"binascii",
".",
"unhexlify",
"(",
"data",
")"
] | 30.714286 | 12.428571 |
def decodeGsm7(encodedText):
""" GSM-7 text decoding algorithm
Decodes the specified GSM-7-encoded string into a plaintext string.
:param encodedText: the text string to encode
:type encodedText: bytearray or str
:return: A string containing the decoded text
:rtype: str
"""
result = []
if type(encodedText) == str:
encodedText = rawStrToByteArray(encodedText) #bytearray(encodedText)
iterEncoded = iter(encodedText)
for b in iterEncoded:
if b == 0x1B: # ESC - switch to extended table
c = chr(next(iterEncoded))
for char, value in dictItemsIter(GSM7_EXTENDED):
if c == value:
result.append(char)
break
else:
result.append(GSM7_BASIC[b])
return ''.join(result)
|
[
"def",
"decodeGsm7",
"(",
"encodedText",
")",
":",
"result",
"=",
"[",
"]",
"if",
"type",
"(",
"encodedText",
")",
"==",
"str",
":",
"encodedText",
"=",
"rawStrToByteArray",
"(",
"encodedText",
")",
"#bytearray(encodedText)",
"iterEncoded",
"=",
"iter",
"(",
"encodedText",
")",
"for",
"b",
"in",
"iterEncoded",
":",
"if",
"b",
"==",
"0x1B",
":",
"# ESC - switch to extended table",
"c",
"=",
"chr",
"(",
"next",
"(",
"iterEncoded",
")",
")",
"for",
"char",
",",
"value",
"in",
"dictItemsIter",
"(",
"GSM7_EXTENDED",
")",
":",
"if",
"c",
"==",
"value",
":",
"result",
".",
"append",
"(",
"char",
")",
"break",
"else",
":",
"result",
".",
"append",
"(",
"GSM7_BASIC",
"[",
"b",
"]",
")",
"return",
"''",
".",
"join",
"(",
"result",
")"
] | 32.6 | 15.48 |
def check_lon_extents(self, ds):
'''
Check that the values of geospatial_lon_min/geospatial_lon_max
approximately match the data.
:param netCDF4.Dataset ds: An open netCDF dataset
'''
if not (hasattr(ds, 'geospatial_lon_min') and hasattr(ds, 'geospatial_lon_max')):
return Result(BaseCheck.MEDIUM,
False,
'geospatial_lon_extents_match',
['geospatial_lon_min/max attribute not found, CF-1.6 spec chapter 4.1'])
try: # type cast
lon_min = float(ds.geospatial_lon_min)
lon_max = float(ds.geospatial_lon_max)
except ValueError:
return Result(BaseCheck.MEDIUM,
False,
'geospatial_lon_extents_match',
['Could not convert one of geospatial_lon_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1'
''.format(ds.geospatial_lon_min, ds.geospatial_lon_max)])
# identify lon var(s) as per CF 4.2
lon_vars = {} # var -> number of criteria passed
for name, var in ds.variables.items():
# must have units
if not hasattr(var, 'units'):
continue
lon_vars[var] = 0
# units in this set
if var.units in _possiblexunits:
lon_vars[var] += 1
# standard name of "longitude"
if hasattr(var, 'standard_name') and var.standard_name == 'longitude':
lon_vars[var] += 1
# axis of "Y"
if hasattr(var, 'axis') and var.axis == 'X':
lon_vars[var] += 1
# trim out any zeros
lon_vars = {k: v for k, v in lon_vars.items() if v > 0}
if len(lon_vars) == 0:
return Result(BaseCheck.MEDIUM,
False,
'geospatial_lon_extents_match',
['Could not find lon variable to test extent of geospatial_lon_min/max, see CF-1.6 spec chapter 4.2'])
# sort by criteria passed
final_lons = sorted(lon_vars, key=lambda x: lon_vars[x], reverse=True)
obs_mins = {var._name: np.nanmin(var) for var in final_lons if not np.isnan(var).all()}
obs_maxs = {var._name: np.nanmax(var) for var in final_lons if not np.isnan(var).all()}
min_pass = any((np.isclose(lon_min, min_val) for min_val in obs_mins.values()))
max_pass = any((np.isclose(lon_max, max_val) for max_val in obs_maxs.values()))
allpass = sum((min_pass, max_pass))
msgs = []
if not min_pass:
msgs.append("Data for possible longitude variables (%s) did not match geospatial_lon_min value (%s)" % (obs_mins, lon_min))
if not max_pass:
msgs.append("Data for possible longitude variables (%s) did not match geospatial_lon_max value (%s)" % (obs_maxs, lon_max))
return Result(BaseCheck.MEDIUM,
(allpass, 2),
'geospatial_lon_extents_match',
msgs)
|
[
"def",
"check_lon_extents",
"(",
"self",
",",
"ds",
")",
":",
"if",
"not",
"(",
"hasattr",
"(",
"ds",
",",
"'geospatial_lon_min'",
")",
"and",
"hasattr",
"(",
"ds",
",",
"'geospatial_lon_max'",
")",
")",
":",
"return",
"Result",
"(",
"BaseCheck",
".",
"MEDIUM",
",",
"False",
",",
"'geospatial_lon_extents_match'",
",",
"[",
"'geospatial_lon_min/max attribute not found, CF-1.6 spec chapter 4.1'",
"]",
")",
"try",
":",
"# type cast",
"lon_min",
"=",
"float",
"(",
"ds",
".",
"geospatial_lon_min",
")",
"lon_max",
"=",
"float",
"(",
"ds",
".",
"geospatial_lon_max",
")",
"except",
"ValueError",
":",
"return",
"Result",
"(",
"BaseCheck",
".",
"MEDIUM",
",",
"False",
",",
"'geospatial_lon_extents_match'",
",",
"[",
"'Could not convert one of geospatial_lon_min ({}) or max ({}) to float see CF-1.6 spec chapter 4.1'",
"''",
".",
"format",
"(",
"ds",
".",
"geospatial_lon_min",
",",
"ds",
".",
"geospatial_lon_max",
")",
"]",
")",
"# identify lon var(s) as per CF 4.2",
"lon_vars",
"=",
"{",
"}",
"# var -> number of criteria passed",
"for",
"name",
",",
"var",
"in",
"ds",
".",
"variables",
".",
"items",
"(",
")",
":",
"# must have units",
"if",
"not",
"hasattr",
"(",
"var",
",",
"'units'",
")",
":",
"continue",
"lon_vars",
"[",
"var",
"]",
"=",
"0",
"# units in this set",
"if",
"var",
".",
"units",
"in",
"_possiblexunits",
":",
"lon_vars",
"[",
"var",
"]",
"+=",
"1",
"# standard name of \"longitude\"",
"if",
"hasattr",
"(",
"var",
",",
"'standard_name'",
")",
"and",
"var",
".",
"standard_name",
"==",
"'longitude'",
":",
"lon_vars",
"[",
"var",
"]",
"+=",
"1",
"# axis of \"Y\"",
"if",
"hasattr",
"(",
"var",
",",
"'axis'",
")",
"and",
"var",
".",
"axis",
"==",
"'X'",
":",
"lon_vars",
"[",
"var",
"]",
"+=",
"1",
"# trim out any zeros",
"lon_vars",
"=",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"lon_vars",
".",
"items",
"(",
")",
"if",
"v",
">",
"0",
"}",
"if",
"len",
"(",
"lon_vars",
")",
"==",
"0",
":",
"return",
"Result",
"(",
"BaseCheck",
".",
"MEDIUM",
",",
"False",
",",
"'geospatial_lon_extents_match'",
",",
"[",
"'Could not find lon variable to test extent of geospatial_lon_min/max, see CF-1.6 spec chapter 4.2'",
"]",
")",
"# sort by criteria passed",
"final_lons",
"=",
"sorted",
"(",
"lon_vars",
",",
"key",
"=",
"lambda",
"x",
":",
"lon_vars",
"[",
"x",
"]",
",",
"reverse",
"=",
"True",
")",
"obs_mins",
"=",
"{",
"var",
".",
"_name",
":",
"np",
".",
"nanmin",
"(",
"var",
")",
"for",
"var",
"in",
"final_lons",
"if",
"not",
"np",
".",
"isnan",
"(",
"var",
")",
".",
"all",
"(",
")",
"}",
"obs_maxs",
"=",
"{",
"var",
".",
"_name",
":",
"np",
".",
"nanmax",
"(",
"var",
")",
"for",
"var",
"in",
"final_lons",
"if",
"not",
"np",
".",
"isnan",
"(",
"var",
")",
".",
"all",
"(",
")",
"}",
"min_pass",
"=",
"any",
"(",
"(",
"np",
".",
"isclose",
"(",
"lon_min",
",",
"min_val",
")",
"for",
"min_val",
"in",
"obs_mins",
".",
"values",
"(",
")",
")",
")",
"max_pass",
"=",
"any",
"(",
"(",
"np",
".",
"isclose",
"(",
"lon_max",
",",
"max_val",
")",
"for",
"max_val",
"in",
"obs_maxs",
".",
"values",
"(",
")",
")",
")",
"allpass",
"=",
"sum",
"(",
"(",
"min_pass",
",",
"max_pass",
")",
")",
"msgs",
"=",
"[",
"]",
"if",
"not",
"min_pass",
":",
"msgs",
".",
"append",
"(",
"\"Data for possible longitude variables (%s) did not match geospatial_lon_min value (%s)\"",
"%",
"(",
"obs_mins",
",",
"lon_min",
")",
")",
"if",
"not",
"max_pass",
":",
"msgs",
".",
"append",
"(",
"\"Data for possible longitude variables (%s) did not match geospatial_lon_max value (%s)\"",
"%",
"(",
"obs_maxs",
",",
"lon_max",
")",
")",
"return",
"Result",
"(",
"BaseCheck",
".",
"MEDIUM",
",",
"(",
"allpass",
",",
"2",
")",
",",
"'geospatial_lon_extents_match'",
",",
"msgs",
")"
] | 40.421053 | 26.368421 |
def make_diff(current, revision):
"""Create the difference between the current revision and a previous version"""
the_diff = []
dmp = diff_match_patch()
for field in (set(current.field_dict.keys()) | set(revision.field_dict.keys())):
# These exclusions really should be configurable
if field == 'id' or field.endswith('_rendered'):
continue
# KeyError's may happen if the database structure changes
# between the creation of revisions. This isn't ideal,
# but should not be a fatal error.
# Log this?
missing_field = False
try:
cur_val = current.field_dict[field] or ""
except KeyError:
cur_val = "No such field in latest version\n"
missing_field = True
try:
old_val = revision.field_dict[field] or ""
except KeyError:
old_val = "No such field in old version\n"
missing_field = True
if missing_field:
# Ensure that the complete texts are marked as changed
# so new entries containing any of the marker words
# don't show up as differences
diffs = [(dmp.DIFF_DELETE, old_val), (dmp.DIFF_INSERT, cur_val)]
patch = dmp.diff_prettyHtml(diffs)
elif isinstance(cur_val, Markup):
# we roll our own diff here, so we can compare of the raw
# markdown, rather than the rendered result.
if cur_val.raw == old_val.raw:
continue
diffs = dmp.diff_main(old_val.raw, cur_val.raw)
patch = dmp.diff_prettyHtml(diffs)
elif cur_val == old_val:
continue
else:
# Compare the actual field values
diffs = dmp.diff_main(force_text(old_val), force_text(cur_val))
patch = dmp.diff_prettyHtml(diffs)
the_diff.append((field, patch))
the_diff.sort()
return the_diff
|
[
"def",
"make_diff",
"(",
"current",
",",
"revision",
")",
":",
"the_diff",
"=",
"[",
"]",
"dmp",
"=",
"diff_match_patch",
"(",
")",
"for",
"field",
"in",
"(",
"set",
"(",
"current",
".",
"field_dict",
".",
"keys",
"(",
")",
")",
"|",
"set",
"(",
"revision",
".",
"field_dict",
".",
"keys",
"(",
")",
")",
")",
":",
"# These exclusions really should be configurable",
"if",
"field",
"==",
"'id'",
"or",
"field",
".",
"endswith",
"(",
"'_rendered'",
")",
":",
"continue",
"# KeyError's may happen if the database structure changes",
"# between the creation of revisions. This isn't ideal,",
"# but should not be a fatal error.",
"# Log this?",
"missing_field",
"=",
"False",
"try",
":",
"cur_val",
"=",
"current",
".",
"field_dict",
"[",
"field",
"]",
"or",
"\"\"",
"except",
"KeyError",
":",
"cur_val",
"=",
"\"No such field in latest version\\n\"",
"missing_field",
"=",
"True",
"try",
":",
"old_val",
"=",
"revision",
".",
"field_dict",
"[",
"field",
"]",
"or",
"\"\"",
"except",
"KeyError",
":",
"old_val",
"=",
"\"No such field in old version\\n\"",
"missing_field",
"=",
"True",
"if",
"missing_field",
":",
"# Ensure that the complete texts are marked as changed",
"# so new entries containing any of the marker words",
"# don't show up as differences",
"diffs",
"=",
"[",
"(",
"dmp",
".",
"DIFF_DELETE",
",",
"old_val",
")",
",",
"(",
"dmp",
".",
"DIFF_INSERT",
",",
"cur_val",
")",
"]",
"patch",
"=",
"dmp",
".",
"diff_prettyHtml",
"(",
"diffs",
")",
"elif",
"isinstance",
"(",
"cur_val",
",",
"Markup",
")",
":",
"# we roll our own diff here, so we can compare of the raw",
"# markdown, rather than the rendered result.",
"if",
"cur_val",
".",
"raw",
"==",
"old_val",
".",
"raw",
":",
"continue",
"diffs",
"=",
"dmp",
".",
"diff_main",
"(",
"old_val",
".",
"raw",
",",
"cur_val",
".",
"raw",
")",
"patch",
"=",
"dmp",
".",
"diff_prettyHtml",
"(",
"diffs",
")",
"elif",
"cur_val",
"==",
"old_val",
":",
"continue",
"else",
":",
"# Compare the actual field values",
"diffs",
"=",
"dmp",
".",
"diff_main",
"(",
"force_text",
"(",
"old_val",
")",
",",
"force_text",
"(",
"cur_val",
")",
")",
"patch",
"=",
"dmp",
".",
"diff_prettyHtml",
"(",
"diffs",
")",
"the_diff",
".",
"append",
"(",
"(",
"field",
",",
"patch",
")",
")",
"the_diff",
".",
"sort",
"(",
")",
"return",
"the_diff"
] | 40.702128 | 17.106383 |
def contextMenuEvent(self, event):
"""
Handles the default menu options for the orb widget.
:param event | <QContextMenuEvent>
"""
if self.contextMenuPolicy() == Qt.DefaultContextMenu:
self.showMenu(event.pos())
else:
super(XOrbTreeWidget, self).contextMenuEvent(event)
|
[
"def",
"contextMenuEvent",
"(",
"self",
",",
"event",
")",
":",
"if",
"self",
".",
"contextMenuPolicy",
"(",
")",
"==",
"Qt",
".",
"DefaultContextMenu",
":",
"self",
".",
"showMenu",
"(",
"event",
".",
"pos",
"(",
")",
")",
"else",
":",
"super",
"(",
"XOrbTreeWidget",
",",
"self",
")",
".",
"contextMenuEvent",
"(",
"event",
")"
] | 35.5 | 13.7 |
def _build_resolver(cls, session: AppSession):
'''Build resolver.'''
args = session.args
dns_timeout = args.dns_timeout
if args.timeout:
dns_timeout = args.timeout
if args.inet_family == 'IPv4':
family = IPFamilyPreference.ipv4_only
elif args.inet_family == 'IPv6':
family = IPFamilyPreference.ipv6_only
elif args.prefer_family == 'IPv6':
family = IPFamilyPreference.prefer_ipv6
elif args.prefer_family == 'IPv4':
family = IPFamilyPreference.prefer_ipv4
else:
family = IPFamilyPreference.any
return session.factory.new(
'Resolver',
family=family,
timeout=dns_timeout,
rotate=args.rotate_dns,
cache=session.factory.class_map['Resolver'].new_cache() if args.dns_cache else None,
)
|
[
"def",
"_build_resolver",
"(",
"cls",
",",
"session",
":",
"AppSession",
")",
":",
"args",
"=",
"session",
".",
"args",
"dns_timeout",
"=",
"args",
".",
"dns_timeout",
"if",
"args",
".",
"timeout",
":",
"dns_timeout",
"=",
"args",
".",
"timeout",
"if",
"args",
".",
"inet_family",
"==",
"'IPv4'",
":",
"family",
"=",
"IPFamilyPreference",
".",
"ipv4_only",
"elif",
"args",
".",
"inet_family",
"==",
"'IPv6'",
":",
"family",
"=",
"IPFamilyPreference",
".",
"ipv6_only",
"elif",
"args",
".",
"prefer_family",
"==",
"'IPv6'",
":",
"family",
"=",
"IPFamilyPreference",
".",
"prefer_ipv6",
"elif",
"args",
".",
"prefer_family",
"==",
"'IPv4'",
":",
"family",
"=",
"IPFamilyPreference",
".",
"prefer_ipv4",
"else",
":",
"family",
"=",
"IPFamilyPreference",
".",
"any",
"return",
"session",
".",
"factory",
".",
"new",
"(",
"'Resolver'",
",",
"family",
"=",
"family",
",",
"timeout",
"=",
"dns_timeout",
",",
"rotate",
"=",
"args",
".",
"rotate_dns",
",",
"cache",
"=",
"session",
".",
"factory",
".",
"class_map",
"[",
"'Resolver'",
"]",
".",
"new_cache",
"(",
")",
"if",
"args",
".",
"dns_cache",
"else",
"None",
",",
")"
] | 33.692308 | 14.692308 |
def _zip(self) -> ObjectValue:
"""Zip the receiver into an object and return it."""
res = ObjectValue(self.siblings.copy(), self.timestamp)
res[self.name] = self.value
return res
|
[
"def",
"_zip",
"(",
"self",
")",
"->",
"ObjectValue",
":",
"res",
"=",
"ObjectValue",
"(",
"self",
".",
"siblings",
".",
"copy",
"(",
")",
",",
"self",
".",
"timestamp",
")",
"res",
"[",
"self",
".",
"name",
"]",
"=",
"self",
".",
"value",
"return",
"res"
] | 41.2 | 12 |
def save(self) -> None:
"""
Save the training trace to :py:attr:`CXF_TRACE_FILE` file under the specified directory.
:raise ValueError: if no output directory was specified
"""
if self._output_dir is None:
raise ValueError('Can not save TrainingTrace without output dir.')
yaml_to_file(self._trace, self._output_dir, CXF_TRACE_FILE)
|
[
"def",
"save",
"(",
"self",
")",
"->",
"None",
":",
"if",
"self",
".",
"_output_dir",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Can not save TrainingTrace without output dir.'",
")",
"yaml_to_file",
"(",
"self",
".",
"_trace",
",",
"self",
".",
"_output_dir",
",",
"CXF_TRACE_FILE",
")"
] | 42.777778 | 22.777778 |
def update_cluster_admin_password(self, username, new_password):
"""Update cluster admin password."""
url = "cluster_admins/{0}".format(username)
data = {
'password': new_password
}
self.request(
url=url,
method='POST',
data=data,
expected_response_code=200
)
return True
|
[
"def",
"update_cluster_admin_password",
"(",
"self",
",",
"username",
",",
"new_password",
")",
":",
"url",
"=",
"\"cluster_admins/{0}\"",
".",
"format",
"(",
"username",
")",
"data",
"=",
"{",
"'password'",
":",
"new_password",
"}",
"self",
".",
"request",
"(",
"url",
"=",
"url",
",",
"method",
"=",
"'POST'",
",",
"data",
"=",
"data",
",",
"expected_response_code",
"=",
"200",
")",
"return",
"True"
] | 23.4375 | 21.1875 |
def get_optimization_coordinates(self):
"""Return the coordinates of the geometries at each point in the optimization"""
coor_array = self.fields.get("Opt point 1 Geometries")
if coor_array is None:
return []
else:
return np.reshape(coor_array, (-1, len(self.molecule.numbers), 3))
|
[
"def",
"get_optimization_coordinates",
"(",
"self",
")",
":",
"coor_array",
"=",
"self",
".",
"fields",
".",
"get",
"(",
"\"Opt point 1 Geometries\"",
")",
"if",
"coor_array",
"is",
"None",
":",
"return",
"[",
"]",
"else",
":",
"return",
"np",
".",
"reshape",
"(",
"coor_array",
",",
"(",
"-",
"1",
",",
"len",
"(",
"self",
".",
"molecule",
".",
"numbers",
")",
",",
"3",
")",
")"
] | 48.142857 | 17.571429 |
def fetchmany(self, size=None):
"""Fetch many"""
self._check_executed()
if size is None:
size = self.arraysize
rows = []
for i in range_type(size):
row = self.read_next()
if row is None:
break
rows.append(row)
self.rownumber += 1
return rows
|
[
"def",
"fetchmany",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"self",
".",
"_check_executed",
"(",
")",
"if",
"size",
"is",
"None",
":",
"size",
"=",
"self",
".",
"arraysize",
"rows",
"=",
"[",
"]",
"for",
"i",
"in",
"range_type",
"(",
"size",
")",
":",
"row",
"=",
"self",
".",
"read_next",
"(",
")",
"if",
"row",
"is",
"None",
":",
"break",
"rows",
".",
"append",
"(",
"row",
")",
"self",
".",
"rownumber",
"+=",
"1",
"return",
"rows"
] | 25.214286 | 13.642857 |
def get_polarization_change_norm(self, convert_to_muC_per_cm2=True, all_in_polar=True):
"""
Get magnitude of difference between nonpolar and polar same branch
polarization.
"""
polar = self.structures[-1]
a, b, c = polar.lattice.matrix
a, b, c = a / np.linalg.norm(a), b / np.linalg.norm(
b), c / np.linalg.norm(c)
P = self.get_polarization_change(convert_to_muC_per_cm2=convert_to_muC_per_cm2,
all_in_polar=all_in_polar).ravel()
P_norm = np.linalg.norm(a * P[0] + b * P[1] + c * P[2])
return P_norm
|
[
"def",
"get_polarization_change_norm",
"(",
"self",
",",
"convert_to_muC_per_cm2",
"=",
"True",
",",
"all_in_polar",
"=",
"True",
")",
":",
"polar",
"=",
"self",
".",
"structures",
"[",
"-",
"1",
"]",
"a",
",",
"b",
",",
"c",
"=",
"polar",
".",
"lattice",
".",
"matrix",
"a",
",",
"b",
",",
"c",
"=",
"a",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"a",
")",
",",
"b",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"b",
")",
",",
"c",
"/",
"np",
".",
"linalg",
".",
"norm",
"(",
"c",
")",
"P",
"=",
"self",
".",
"get_polarization_change",
"(",
"convert_to_muC_per_cm2",
"=",
"convert_to_muC_per_cm2",
",",
"all_in_polar",
"=",
"all_in_polar",
")",
".",
"ravel",
"(",
")",
"P_norm",
"=",
"np",
".",
"linalg",
".",
"norm",
"(",
"a",
"*",
"P",
"[",
"0",
"]",
"+",
"b",
"*",
"P",
"[",
"1",
"]",
"+",
"c",
"*",
"P",
"[",
"2",
"]",
")",
"return",
"P_norm"
] | 47.692308 | 19.538462 |
def plot_connectivity_surrogate(self, measure_name, repeats=100, fig=None):
""" Plot spectral connectivity measure under the assumption of no actual connectivity.
Repeatedly samples connectivity from phase-randomized data. This provides estimates of the connectivity
distribution if there was no causal structure in the data.
Parameters
----------
measure_name : str
Name of the connectivity measure to calculate. See :class:`Connectivity` for supported measures.
repeats : int, optional
How many surrogate samples to take.
fig : {None, Figure object}, optional
Where to plot the topos. f set to **None**, a new figure is created. Otherwise plot into the provided
figure object.
Returns
-------
fig : Figure object
Instance of the figure in which was plotted.
"""
cb = self.get_surrogate_connectivity(measure_name, repeats)
self._prepare_plots(True, False)
cu = np.percentile(cb, 95, axis=0)
fig = self.plotting.plot_connectivity_spectrum([cu], self.fs_, freq_range=self.plot_f_range, fig=fig)
return fig
|
[
"def",
"plot_connectivity_surrogate",
"(",
"self",
",",
"measure_name",
",",
"repeats",
"=",
"100",
",",
"fig",
"=",
"None",
")",
":",
"cb",
"=",
"self",
".",
"get_surrogate_connectivity",
"(",
"measure_name",
",",
"repeats",
")",
"self",
".",
"_prepare_plots",
"(",
"True",
",",
"False",
")",
"cu",
"=",
"np",
".",
"percentile",
"(",
"cb",
",",
"95",
",",
"axis",
"=",
"0",
")",
"fig",
"=",
"self",
".",
"plotting",
".",
"plot_connectivity_spectrum",
"(",
"[",
"cu",
"]",
",",
"self",
".",
"fs_",
",",
"freq_range",
"=",
"self",
".",
"plot_f_range",
",",
"fig",
"=",
"fig",
")",
"return",
"fig"
] | 39.266667 | 28.166667 |
def set_dict_item(dct, name_string, set_to):
"""Sets dictionary item identified by name_string to set_to.
name_string is the indentifier generated using flatten_dict.
Maintains the type of the orginal object in dct and tries to convert set_to
to that type.
"""
key_strings = str(name_string).split('-->')
d = dct
for ks in key_strings[:-1]:
d = d[ks]
item_type = type(d[key_strings[-1]])
d[key_strings[-1]] = item_type(set_to)
|
[
"def",
"set_dict_item",
"(",
"dct",
",",
"name_string",
",",
"set_to",
")",
":",
"key_strings",
"=",
"str",
"(",
"name_string",
")",
".",
"split",
"(",
"'-->'",
")",
"d",
"=",
"dct",
"for",
"ks",
"in",
"key_strings",
"[",
":",
"-",
"1",
"]",
":",
"d",
"=",
"d",
"[",
"ks",
"]",
"item_type",
"=",
"type",
"(",
"d",
"[",
"key_strings",
"[",
"-",
"1",
"]",
"]",
")",
"d",
"[",
"key_strings",
"[",
"-",
"1",
"]",
"]",
"=",
"item_type",
"(",
"set_to",
")"
] | 33.071429 | 17.142857 |
def magic_fields(self):
"""the magic fields for the schema"""
return {f:v for f, v in self.fields.items() if f.startswith('_')}
|
[
"def",
"magic_fields",
"(",
"self",
")",
":",
"return",
"{",
"f",
":",
"v",
"for",
"f",
",",
"v",
"in",
"self",
".",
"fields",
".",
"items",
"(",
")",
"if",
"f",
".",
"startswith",
"(",
"'_'",
")",
"}"
] | 47 | 16.666667 |
def avl_join_dir_recursive(t1, t2, node, direction):
"""
Recursive version of join_left and join_right
TODO: make this iterative using a stack
"""
other_side = 1 - direction
if _DEBUG_JOIN_DIR:
print('--JOIN DIR (dir=%r) --' % (direction,))
ascii_tree(t1, 't1')
ascii_tree(t2, 't2')
if direction == 0:
large, small = t2, t1
elif direction == 1:
large, small = t1, t2
else:
assert False
# Follow the spine of the larger tree
spine = large[direction]
rest = large[other_side]
# k_, v_ = large.key, large.value
hsmall = height(small)
hspine = height(spine)
hrest = height(rest)
if _DEBUG_JOIN_DIR:
ascii_tree(spine, 'spine')
ascii_tree(rest, 'rest')
ascii_tree(small, 'small')
if hspine <= hsmall + 1:
t_ = avl_new_top(small, spine, node, direction)
if _DEBUG_JOIN_DIR:
print('JOIN DIR (BASE)')
ascii_tree(t_, 't_')
if height(t_) <= hrest + 1:
if _DEBUG_JOIN_DIR:
print('JOIN DIR (Case 1)')
return avl_new_top(t_, rest, large, direction)
else:
# Double rotation, but with a new node
if _DEBUG_JOIN_DIR:
print('JOIN DIR (Case 2)')
t_rotate = avl_rotate_single(t_, direction)
if _DEBUG_JOIN_DIR:
ascii_tree(t_rotate, 't_rotate')
EulerTourTree(root=t_rotate)._assert_nodes('t_rotate')
t_merge = avl_new_top(rest, t_rotate, large, other_side)
if _DEBUG_JOIN_DIR:
ascii_tree(t_merge, 't_merge')
EulerTourTree(root=t_merge)._assert_nodes('t_merge')
new_root = avl_rotate_single(t_merge, other_side)
if _DEBUG_JOIN_DIR:
ascii_tree(new_root, 'new_root')
EulerTourTree(root=new_root)._assert_nodes('new_root')
return new_root
else:
# Traverse down the spine in the appropriate direction
if _DEBUG_JOIN_DIR:
print('JOIN DIR (RECURSE)')
if direction == 0:
t_ = avl_join_dir_recursive(small, spine, node, direction)
elif direction == 1:
t_ = avl_join_dir_recursive(spine, t2, node, direction)
else:
raise AssertionError('invalid direction')
t__ = avl_new_top(t_, rest, large, direction)
if height(t_) <= hrest + 1:
if _DEBUG_JOIN_DIR:
print('JOIN DIR (Case 3)')
return t__
else:
if _DEBUG_JOIN_DIR:
print('JOIN DIR (Case 4)')
return avl_rotate_single(t__, other_side)
assert False, 'should never get here'
|
[
"def",
"avl_join_dir_recursive",
"(",
"t1",
",",
"t2",
",",
"node",
",",
"direction",
")",
":",
"other_side",
"=",
"1",
"-",
"direction",
"if",
"_DEBUG_JOIN_DIR",
":",
"print",
"(",
"'--JOIN DIR (dir=%r) --'",
"%",
"(",
"direction",
",",
")",
")",
"ascii_tree",
"(",
"t1",
",",
"'t1'",
")",
"ascii_tree",
"(",
"t2",
",",
"'t2'",
")",
"if",
"direction",
"==",
"0",
":",
"large",
",",
"small",
"=",
"t2",
",",
"t1",
"elif",
"direction",
"==",
"1",
":",
"large",
",",
"small",
"=",
"t1",
",",
"t2",
"else",
":",
"assert",
"False",
"# Follow the spine of the larger tree",
"spine",
"=",
"large",
"[",
"direction",
"]",
"rest",
"=",
"large",
"[",
"other_side",
"]",
"# k_, v_ = large.key, large.value",
"hsmall",
"=",
"height",
"(",
"small",
")",
"hspine",
"=",
"height",
"(",
"spine",
")",
"hrest",
"=",
"height",
"(",
"rest",
")",
"if",
"_DEBUG_JOIN_DIR",
":",
"ascii_tree",
"(",
"spine",
",",
"'spine'",
")",
"ascii_tree",
"(",
"rest",
",",
"'rest'",
")",
"ascii_tree",
"(",
"small",
",",
"'small'",
")",
"if",
"hspine",
"<=",
"hsmall",
"+",
"1",
":",
"t_",
"=",
"avl_new_top",
"(",
"small",
",",
"spine",
",",
"node",
",",
"direction",
")",
"if",
"_DEBUG_JOIN_DIR",
":",
"print",
"(",
"'JOIN DIR (BASE)'",
")",
"ascii_tree",
"(",
"t_",
",",
"'t_'",
")",
"if",
"height",
"(",
"t_",
")",
"<=",
"hrest",
"+",
"1",
":",
"if",
"_DEBUG_JOIN_DIR",
":",
"print",
"(",
"'JOIN DIR (Case 1)'",
")",
"return",
"avl_new_top",
"(",
"t_",
",",
"rest",
",",
"large",
",",
"direction",
")",
"else",
":",
"# Double rotation, but with a new node",
"if",
"_DEBUG_JOIN_DIR",
":",
"print",
"(",
"'JOIN DIR (Case 2)'",
")",
"t_rotate",
"=",
"avl_rotate_single",
"(",
"t_",
",",
"direction",
")",
"if",
"_DEBUG_JOIN_DIR",
":",
"ascii_tree",
"(",
"t_rotate",
",",
"'t_rotate'",
")",
"EulerTourTree",
"(",
"root",
"=",
"t_rotate",
")",
".",
"_assert_nodes",
"(",
"'t_rotate'",
")",
"t_merge",
"=",
"avl_new_top",
"(",
"rest",
",",
"t_rotate",
",",
"large",
",",
"other_side",
")",
"if",
"_DEBUG_JOIN_DIR",
":",
"ascii_tree",
"(",
"t_merge",
",",
"'t_merge'",
")",
"EulerTourTree",
"(",
"root",
"=",
"t_merge",
")",
".",
"_assert_nodes",
"(",
"'t_merge'",
")",
"new_root",
"=",
"avl_rotate_single",
"(",
"t_merge",
",",
"other_side",
")",
"if",
"_DEBUG_JOIN_DIR",
":",
"ascii_tree",
"(",
"new_root",
",",
"'new_root'",
")",
"EulerTourTree",
"(",
"root",
"=",
"new_root",
")",
".",
"_assert_nodes",
"(",
"'new_root'",
")",
"return",
"new_root",
"else",
":",
"# Traverse down the spine in the appropriate direction",
"if",
"_DEBUG_JOIN_DIR",
":",
"print",
"(",
"'JOIN DIR (RECURSE)'",
")",
"if",
"direction",
"==",
"0",
":",
"t_",
"=",
"avl_join_dir_recursive",
"(",
"small",
",",
"spine",
",",
"node",
",",
"direction",
")",
"elif",
"direction",
"==",
"1",
":",
"t_",
"=",
"avl_join_dir_recursive",
"(",
"spine",
",",
"t2",
",",
"node",
",",
"direction",
")",
"else",
":",
"raise",
"AssertionError",
"(",
"'invalid direction'",
")",
"t__",
"=",
"avl_new_top",
"(",
"t_",
",",
"rest",
",",
"large",
",",
"direction",
")",
"if",
"height",
"(",
"t_",
")",
"<=",
"hrest",
"+",
"1",
":",
"if",
"_DEBUG_JOIN_DIR",
":",
"print",
"(",
"'JOIN DIR (Case 3)'",
")",
"return",
"t__",
"else",
":",
"if",
"_DEBUG_JOIN_DIR",
":",
"print",
"(",
"'JOIN DIR (Case 4)'",
")",
"return",
"avl_rotate_single",
"(",
"t__",
",",
"other_side",
")",
"assert",
"False",
",",
"'should never get here'"
] | 34.333333 | 14.641026 |
def sanitize(self):
'''
Check if the current settings conform to the RFC and fix where possible
'''
# Check ports
if not isinstance(self.source_port, numbers.Integral) \
or self.source_port < 0 \
or self.source_port >= 2 ** 16:
raise ValueError('Invalid source port')
if not isinstance(self.destination_port, numbers.Integral) \
or self.destination_port < 0 \
or self.destination_port >= 2 ** 16:
raise ValueError('Invalid destination port')
|
[
"def",
"sanitize",
"(",
"self",
")",
":",
"# Check ports",
"if",
"not",
"isinstance",
"(",
"self",
".",
"source_port",
",",
"numbers",
".",
"Integral",
")",
"or",
"self",
".",
"source_port",
"<",
"0",
"or",
"self",
".",
"source_port",
">=",
"2",
"**",
"16",
":",
"raise",
"ValueError",
"(",
"'Invalid source port'",
")",
"if",
"not",
"isinstance",
"(",
"self",
".",
"destination_port",
",",
"numbers",
".",
"Integral",
")",
"or",
"self",
".",
"destination_port",
"<",
"0",
"or",
"self",
".",
"destination_port",
">=",
"2",
"**",
"16",
":",
"raise",
"ValueError",
"(",
"'Invalid destination port'",
")"
] | 38.071429 | 19.214286 |
def check_unknown_attachment_in_space(confluence, space_key):
"""
Detect errors in space
:param confluence:
:param space_key:
:return:
"""
page_ids = get_all_pages_ids(confluence, space_key)
print("Start review pages {} in {}".format(len(page_ids), space_key))
for page_id in page_ids:
link = confluence.has_unknown_attachment_error(page_id)
if len(link) > 0:
print(link)
|
[
"def",
"check_unknown_attachment_in_space",
"(",
"confluence",
",",
"space_key",
")",
":",
"page_ids",
"=",
"get_all_pages_ids",
"(",
"confluence",
",",
"space_key",
")",
"print",
"(",
"\"Start review pages {} in {}\"",
".",
"format",
"(",
"len",
"(",
"page_ids",
")",
",",
"space_key",
")",
")",
"for",
"page_id",
"in",
"page_ids",
":",
"link",
"=",
"confluence",
".",
"has_unknown_attachment_error",
"(",
"page_id",
")",
"if",
"len",
"(",
"link",
")",
">",
"0",
":",
"print",
"(",
"link",
")"
] | 33.461538 | 16.384615 |
def save_base_map(filename, grouped_by_text):
"""Dump a list of agents along with groundings and counts into a csv file
Parameters
----------
filename : str
Filepath for output file
grouped_by_text : list of tuple
List of tuples of the form output by agent_texts_with_grounding
"""
rows = []
for group in grouped_by_text:
text_string = group[0]
for db, db_id, count in group[1]:
if db == 'UP':
name = uniprot_client.get_mnemonic(db_id)
else:
name = ''
row = [text_string, db, db_id, count, name]
rows.append(row)
write_unicode_csv(filename, rows, delimiter=',', quotechar='"',
quoting=csv.QUOTE_MINIMAL, lineterminator='\r\n')
|
[
"def",
"save_base_map",
"(",
"filename",
",",
"grouped_by_text",
")",
":",
"rows",
"=",
"[",
"]",
"for",
"group",
"in",
"grouped_by_text",
":",
"text_string",
"=",
"group",
"[",
"0",
"]",
"for",
"db",
",",
"db_id",
",",
"count",
"in",
"group",
"[",
"1",
"]",
":",
"if",
"db",
"==",
"'UP'",
":",
"name",
"=",
"uniprot_client",
".",
"get_mnemonic",
"(",
"db_id",
")",
"else",
":",
"name",
"=",
"''",
"row",
"=",
"[",
"text_string",
",",
"db",
",",
"db_id",
",",
"count",
",",
"name",
"]",
"rows",
".",
"append",
"(",
"row",
")",
"write_unicode_csv",
"(",
"filename",
",",
"rows",
",",
"delimiter",
"=",
"','",
",",
"quotechar",
"=",
"'\"'",
",",
"quoting",
"=",
"csv",
".",
"QUOTE_MINIMAL",
",",
"lineterminator",
"=",
"'\\r\\n'",
")"
] | 33.73913 | 17.478261 |
def get_joined_filters(self, filters):
"""
Creates a new filters class with active filters joined
"""
retfilters = Filters(self.filter_converter, self.datamodel)
retfilters.filters = self.filters + filters.filters
retfilters.values = self.values + filters.values
return retfilters
|
[
"def",
"get_joined_filters",
"(",
"self",
",",
"filters",
")",
":",
"retfilters",
"=",
"Filters",
"(",
"self",
".",
"filter_converter",
",",
"self",
".",
"datamodel",
")",
"retfilters",
".",
"filters",
"=",
"self",
".",
"filters",
"+",
"filters",
".",
"filters",
"retfilters",
".",
"values",
"=",
"self",
".",
"values",
"+",
"filters",
".",
"values",
"return",
"retfilters"
] | 41.625 | 13.125 |
def resolve_topic(topic):
"""Return class described by given topic.
Args:
topic: A string describing a class.
Returns:
A class.
Raises:
TopicResolutionError: If there is no such class.
"""
try:
module_name, _, class_name = topic.partition('#')
module = importlib.import_module(module_name)
except ImportError as e:
raise TopicResolutionError("{}: {}".format(topic, e))
try:
cls = resolve_attr(module, class_name)
except AttributeError as e:
raise TopicResolutionError("{}: {}".format(topic, e))
return cls
|
[
"def",
"resolve_topic",
"(",
"topic",
")",
":",
"try",
":",
"module_name",
",",
"_",
",",
"class_name",
"=",
"topic",
".",
"partition",
"(",
"'#'",
")",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"module_name",
")",
"except",
"ImportError",
"as",
"e",
":",
"raise",
"TopicResolutionError",
"(",
"\"{}: {}\"",
".",
"format",
"(",
"topic",
",",
"e",
")",
")",
"try",
":",
"cls",
"=",
"resolve_attr",
"(",
"module",
",",
"class_name",
")",
"except",
"AttributeError",
"as",
"e",
":",
"raise",
"TopicResolutionError",
"(",
"\"{}: {}\"",
".",
"format",
"(",
"topic",
",",
"e",
")",
")",
"return",
"cls"
] | 26.863636 | 20.681818 |
def api_payload(self):
""" Generates a payload ready for submission to the API, guided by `_api_payload` """
if not self._api_payload:
raise NotImplementedError()
payload = {}
for attr_name in self._api_payload:
value = getattr(self, attr_name, None)
if value is not None:
payload[attr_name] = value
return payload
|
[
"def",
"api_payload",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_api_payload",
":",
"raise",
"NotImplementedError",
"(",
")",
"payload",
"=",
"{",
"}",
"for",
"attr_name",
"in",
"self",
".",
"_api_payload",
":",
"value",
"=",
"getattr",
"(",
"self",
",",
"attr_name",
",",
"None",
")",
"if",
"value",
"is",
"not",
"None",
":",
"payload",
"[",
"attr_name",
"]",
"=",
"value",
"return",
"payload"
] | 39.7 | 8.6 |
def assertNoTMDiffs(tms):
"""
Check for diffs among the TM instances in the passed in tms dict and
raise an assert if any are detected
Parameters:
---------------------------------------------------------------------
tms: dict of TM instances
"""
if len(tms) == 1:
return
if len(tms) > 2:
raise "Not implemented for more than 2 TMs"
same = fdrutils.tmDiff2(tms.values(), verbosity=VERBOSITY)
assert(same)
return
|
[
"def",
"assertNoTMDiffs",
"(",
"tms",
")",
":",
"if",
"len",
"(",
"tms",
")",
"==",
"1",
":",
"return",
"if",
"len",
"(",
"tms",
")",
">",
"2",
":",
"raise",
"\"Not implemented for more than 2 TMs\"",
"same",
"=",
"fdrutils",
".",
"tmDiff2",
"(",
"tms",
".",
"values",
"(",
")",
",",
"verbosity",
"=",
"VERBOSITY",
")",
"assert",
"(",
"same",
")",
"return"
] | 24.777778 | 21.555556 |
def osCopy(self):
""" Triggers the OS "copy" keyboard shortcut """
k = Keyboard()
k.keyDown("{CTRL}")
k.type("c")
k.keyUp("{CTRL}")
|
[
"def",
"osCopy",
"(",
"self",
")",
":",
"k",
"=",
"Keyboard",
"(",
")",
"k",
".",
"keyDown",
"(",
"\"{CTRL}\"",
")",
"k",
".",
"type",
"(",
"\"c\"",
")",
"k",
".",
"keyUp",
"(",
"\"{CTRL}\"",
")"
] | 27.666667 | 15 |
def patch(self, client=None):
"""Sends all changed properties in a PATCH request.
Updates the ``_properties`` with the response from the backend.
If :attr:`user_project` is set, bills the API request to that project.
:type client: :class:`~google.cloud.storage.client.Client` or
``NoneType``
:param client: the client to use. If not passed, falls back to the
``client`` stored on the current object.
"""
client = self._require_client(client)
query_params = self._query_params
# Pass '?projection=full' here because 'PATCH' documented not
# to work properly w/ 'noAcl'.
query_params["projection"] = "full"
update_properties = {key: self._properties[key] for key in self._changes}
# Make the API call.
api_response = client._connection.api_request(
method="PATCH",
path=self.path,
data=update_properties,
query_params=query_params,
_target_object=self,
)
self._set_properties(api_response)
|
[
"def",
"patch",
"(",
"self",
",",
"client",
"=",
"None",
")",
":",
"client",
"=",
"self",
".",
"_require_client",
"(",
"client",
")",
"query_params",
"=",
"self",
".",
"_query_params",
"# Pass '?projection=full' here because 'PATCH' documented not",
"# to work properly w/ 'noAcl'.",
"query_params",
"[",
"\"projection\"",
"]",
"=",
"\"full\"",
"update_properties",
"=",
"{",
"key",
":",
"self",
".",
"_properties",
"[",
"key",
"]",
"for",
"key",
"in",
"self",
".",
"_changes",
"}",
"# Make the API call.",
"api_response",
"=",
"client",
".",
"_connection",
".",
"api_request",
"(",
"method",
"=",
"\"PATCH\"",
",",
"path",
"=",
"self",
".",
"path",
",",
"data",
"=",
"update_properties",
",",
"query_params",
"=",
"query_params",
",",
"_target_object",
"=",
"self",
",",
")",
"self",
".",
"_set_properties",
"(",
"api_response",
")"
] | 39.214286 | 18.357143 |
def jobs(self):
"""
Method for getting jobs from stage instance.
:return: arrays of jobs.
:rtype: list of yagocd.resources.job.JobInstance
"""
jobs = list()
for data in self.data.jobs:
jobs.append(JobInstance(session=self._session, data=data, stage=self))
return jobs
|
[
"def",
"jobs",
"(",
"self",
")",
":",
"jobs",
"=",
"list",
"(",
")",
"for",
"data",
"in",
"self",
".",
"data",
".",
"jobs",
":",
"jobs",
".",
"append",
"(",
"JobInstance",
"(",
"session",
"=",
"self",
".",
"_session",
",",
"data",
"=",
"data",
",",
"stage",
"=",
"self",
")",
")",
"return",
"jobs"
] | 27.833333 | 19 |
def refine_MIDDLEWARE_CLASSES(original):
"""
Django docs say that the LocaleMiddleware should come after the SessionMiddleware.
Here, we make sure that the SessionMiddleware is enabled and then place the
LocaleMiddleware at the correct position.
Be careful with the order when refining the MiddlewareClasses with following features.
:param original:
:return:
"""
try:
session_middleware_index = original.index('django.contrib.sessions.middleware.SessionMiddleware')
original.insert(session_middleware_index + 1, 'django.middleware.locale.LocaleMiddleware')
return original
except ValueError:
raise LookupError('SessionMiddleware not found! Please make sure you have enabled the \
SessionMiddleware in your settings (django.contrib.sessions.middleware.SessionMiddleware).')
|
[
"def",
"refine_MIDDLEWARE_CLASSES",
"(",
"original",
")",
":",
"try",
":",
"session_middleware_index",
"=",
"original",
".",
"index",
"(",
"'django.contrib.sessions.middleware.SessionMiddleware'",
")",
"original",
".",
"insert",
"(",
"session_middleware_index",
"+",
"1",
",",
"'django.middleware.locale.LocaleMiddleware'",
")",
"return",
"original",
"except",
"ValueError",
":",
"raise",
"LookupError",
"(",
"'SessionMiddleware not found! Please make sure you have enabled the \\\n SessionMiddleware in your settings (django.contrib.sessions.middleware.SessionMiddleware).'",
")"
] | 52.375 | 30.875 |
def new(
name: str,
arity: Arity,
class_name: str=None,
*,
associative: bool=False,
commutative: bool=False,
one_identity: bool=False,
infix: bool=False
) -> Type['Operation']:
"""Utility method to create a new operation type.
Example:
>>> Times = Operation.new('*', Arity.polyadic, 'Times', associative=True, commutative=True, one_identity=True)
>>> Times
Times['*', Arity(min_count=2, fixed_size=False), associative, commutative, one_identity]
>>> str(Times(Symbol('a'), Symbol('b')))
'*(a, b)'
Args:
name:
Name or symbol for the operator. Will be used as name for the new class if
`class_name` is not specified.
arity:
The arity of the operator as explained in the documentation of `Operation`.
class_name:
Name for the new operation class to be used instead of name. This argument
is required if `name` is not a valid python identifier.
Keyword Args:
associative:
See :attr:`~Operation.associative`.
commutative:
See :attr:`~Operation.commutative`.
one_identity:
See :attr:`~Operation.one_identity`.
infix:
See :attr:`~Operation.infix`.
Raises:
ValueError: if the class name of the operation is not a valid class identifier.
"""
class_name = class_name or name
if not class_name.isidentifier() or keyword.iskeyword(class_name):
raise ValueError("Invalid identifier for new operator class.")
return type(
class_name, (Operation, ), {
'name': name,
'arity': arity,
'associative': associative,
'commutative': commutative,
'one_identity': one_identity,
'infix': infix
}
)
|
[
"def",
"new",
"(",
"name",
":",
"str",
",",
"arity",
":",
"Arity",
",",
"class_name",
":",
"str",
"=",
"None",
",",
"*",
",",
"associative",
":",
"bool",
"=",
"False",
",",
"commutative",
":",
"bool",
"=",
"False",
",",
"one_identity",
":",
"bool",
"=",
"False",
",",
"infix",
":",
"bool",
"=",
"False",
")",
"->",
"Type",
"[",
"'Operation'",
"]",
":",
"class_name",
"=",
"class_name",
"or",
"name",
"if",
"not",
"class_name",
".",
"isidentifier",
"(",
")",
"or",
"keyword",
".",
"iskeyword",
"(",
"class_name",
")",
":",
"raise",
"ValueError",
"(",
"\"Invalid identifier for new operator class.\"",
")",
"return",
"type",
"(",
"class_name",
",",
"(",
"Operation",
",",
")",
",",
"{",
"'name'",
":",
"name",
",",
"'arity'",
":",
"arity",
",",
"'associative'",
":",
"associative",
",",
"'commutative'",
":",
"commutative",
",",
"'one_identity'",
":",
"one_identity",
",",
"'infix'",
":",
"infix",
"}",
")"
] | 35.315789 | 21.982456 |
def _help_workbench(self):
""" Help on Workbench """
help = '%sWelcome to Workbench Help:%s' % (color.Yellow, color.Normal)
help += '\n\t%s- workbench.help(\'basic\') %s for getting started help' % (color.Green, color.LightBlue)
help += '\n\t%s- workbench.help(\'workers\') %s for help on available workers' % (color.Green, color.LightBlue)
help += '\n\t%s- workbench.help(\'commands\') %s for help on workbench commands' % (color.Green, color.LightBlue)
help += '\n\t%s- workbench.help(topic) %s where topic can be a help, command or worker' % (color.Green, color.LightBlue)
help += '\n\n%sSee http://github.com/SuperCowPowers/workbench for more information\n%s' % (color.Yellow, color.Normal)
return help
|
[
"def",
"_help_workbench",
"(",
"self",
")",
":",
"help",
"=",
"'%sWelcome to Workbench Help:%s'",
"%",
"(",
"color",
".",
"Yellow",
",",
"color",
".",
"Normal",
")",
"help",
"+=",
"'\\n\\t%s- workbench.help(\\'basic\\') %s for getting started help'",
"%",
"(",
"color",
".",
"Green",
",",
"color",
".",
"LightBlue",
")",
"help",
"+=",
"'\\n\\t%s- workbench.help(\\'workers\\') %s for help on available workers'",
"%",
"(",
"color",
".",
"Green",
",",
"color",
".",
"LightBlue",
")",
"help",
"+=",
"'\\n\\t%s- workbench.help(\\'commands\\') %s for help on workbench commands'",
"%",
"(",
"color",
".",
"Green",
",",
"color",
".",
"LightBlue",
")",
"help",
"+=",
"'\\n\\t%s- workbench.help(topic) %s where topic can be a help, command or worker'",
"%",
"(",
"color",
".",
"Green",
",",
"color",
".",
"LightBlue",
")",
"help",
"+=",
"'\\n\\n%sSee http://github.com/SuperCowPowers/workbench for more information\\n%s'",
"%",
"(",
"color",
".",
"Yellow",
",",
"color",
".",
"Normal",
")",
"return",
"help"
] | 84.666667 | 53.222222 |
def Shape(docs, drop=0.0):
"""Get word shapes."""
ids = numpy.zeros((sum(len(doc) for doc in docs),), dtype="i")
i = 0
for doc in docs:
for token in doc:
ids[i] = token.shape
i += 1
return ids, None
|
[
"def",
"Shape",
"(",
"docs",
",",
"drop",
"=",
"0.0",
")",
":",
"ids",
"=",
"numpy",
".",
"zeros",
"(",
"(",
"sum",
"(",
"len",
"(",
"doc",
")",
"for",
"doc",
"in",
"docs",
")",
",",
")",
",",
"dtype",
"=",
"\"i\"",
")",
"i",
"=",
"0",
"for",
"doc",
"in",
"docs",
":",
"for",
"token",
"in",
"doc",
":",
"ids",
"[",
"i",
"]",
"=",
"token",
".",
"shape",
"i",
"+=",
"1",
"return",
"ids",
",",
"None"
] | 26.888889 | 17.333333 |
def run_sync(self, func, timeout=None):
"""Starts the `IOLoop`, runs the given function, and stops the loop.
The function must return either a yieldable object or
``None``. If the function returns a yieldable object, the
`IOLoop` will run until the yieldable is resolved (and
`run_sync()` will return the yieldable's result). If it raises
an exception, the `IOLoop` will stop and the exception will be
re-raised to the caller.
The keyword-only argument ``timeout`` may be used to set
a maximum duration for the function. If the timeout expires,
a `tornado.util.TimeoutError` is raised.
This method is useful in conjunction with `tornado.gen.coroutine`
to allow asynchronous calls in a ``main()`` function::
@gen.coroutine
def main():
# do stuff...
if __name__ == '__main__':
IOLoop.current().run_sync(main)
.. versionchanged:: 4.3
Returning a non-``None``, non-yieldable value is now an error.
"""
future_cell = [None]
def run():
try:
result = func()
if result is not None:
from .gen import convert_yielded
result = convert_yielded(result)
except Exception:
future_cell[0] = TracebackFuture()
future_cell[0].set_exc_info(sys.exc_info())
else:
if is_future(result):
future_cell[0] = result
else:
future_cell[0] = TracebackFuture()
future_cell[0].set_result(result)
self.add_future(future_cell[0], lambda future: self.stop())
self.add_callback(run)
if timeout is not None:
timeout_handle = self.add_timeout(self.time() + timeout, self.stop)
self.start()
if timeout is not None:
self.remove_timeout(timeout_handle)
if not future_cell[0].done():
raise TimeoutError('Operation timed out after %s seconds' % timeout)
return future_cell[0].result()
|
[
"def",
"run_sync",
"(",
"self",
",",
"func",
",",
"timeout",
"=",
"None",
")",
":",
"future_cell",
"=",
"[",
"None",
"]",
"def",
"run",
"(",
")",
":",
"try",
":",
"result",
"=",
"func",
"(",
")",
"if",
"result",
"is",
"not",
"None",
":",
"from",
".",
"gen",
"import",
"convert_yielded",
"result",
"=",
"convert_yielded",
"(",
"result",
")",
"except",
"Exception",
":",
"future_cell",
"[",
"0",
"]",
"=",
"TracebackFuture",
"(",
")",
"future_cell",
"[",
"0",
"]",
".",
"set_exc_info",
"(",
"sys",
".",
"exc_info",
"(",
")",
")",
"else",
":",
"if",
"is_future",
"(",
"result",
")",
":",
"future_cell",
"[",
"0",
"]",
"=",
"result",
"else",
":",
"future_cell",
"[",
"0",
"]",
"=",
"TracebackFuture",
"(",
")",
"future_cell",
"[",
"0",
"]",
".",
"set_result",
"(",
"result",
")",
"self",
".",
"add_future",
"(",
"future_cell",
"[",
"0",
"]",
",",
"lambda",
"future",
":",
"self",
".",
"stop",
"(",
")",
")",
"self",
".",
"add_callback",
"(",
"run",
")",
"if",
"timeout",
"is",
"not",
"None",
":",
"timeout_handle",
"=",
"self",
".",
"add_timeout",
"(",
"self",
".",
"time",
"(",
")",
"+",
"timeout",
",",
"self",
".",
"stop",
")",
"self",
".",
"start",
"(",
")",
"if",
"timeout",
"is",
"not",
"None",
":",
"self",
".",
"remove_timeout",
"(",
"timeout_handle",
")",
"if",
"not",
"future_cell",
"[",
"0",
"]",
".",
"done",
"(",
")",
":",
"raise",
"TimeoutError",
"(",
"'Operation timed out after %s seconds'",
"%",
"timeout",
")",
"return",
"future_cell",
"[",
"0",
"]",
".",
"result",
"(",
")"
] | 39.462963 | 18.592593 |
def get_reqs(which="main"):
"""Gets requirements from all_reqs with versions."""
reqs = []
for req in all_reqs[which]:
req_str = req + ">=" + ver_tuple_to_str(min_versions[req])
if req in version_strictly:
req_str += ",<" + ver_tuple_to_str(min_versions[req][:-1]) + "." + str(min_versions[req][-1] + 1)
reqs.append(req_str)
return reqs
|
[
"def",
"get_reqs",
"(",
"which",
"=",
"\"main\"",
")",
":",
"reqs",
"=",
"[",
"]",
"for",
"req",
"in",
"all_reqs",
"[",
"which",
"]",
":",
"req_str",
"=",
"req",
"+",
"\">=\"",
"+",
"ver_tuple_to_str",
"(",
"min_versions",
"[",
"req",
"]",
")",
"if",
"req",
"in",
"version_strictly",
":",
"req_str",
"+=",
"\",<\"",
"+",
"ver_tuple_to_str",
"(",
"min_versions",
"[",
"req",
"]",
"[",
":",
"-",
"1",
"]",
")",
"+",
"\".\"",
"+",
"str",
"(",
"min_versions",
"[",
"req",
"]",
"[",
"-",
"1",
"]",
"+",
"1",
")",
"reqs",
".",
"append",
"(",
"req_str",
")",
"return",
"reqs"
] | 42.222222 | 20.666667 |
def show_matrix(es, fs, t, a):
'''
print matrix according to viterbi alignment like
fs
-------------
e|x| | | |
s| |x| | |
| | | |x|
| | |x| |
-------------
'''
max_a = viterbi_alignment(es, fs, t, a).items()
m = len(es)
n = len(fs)
return matrix(m, n, max_a, es, fs)
|
[
"def",
"show_matrix",
"(",
"es",
",",
"fs",
",",
"t",
",",
"a",
")",
":",
"max_a",
"=",
"viterbi_alignment",
"(",
"es",
",",
"fs",
",",
"t",
",",
"a",
")",
".",
"items",
"(",
")",
"m",
"=",
"len",
"(",
"es",
")",
"n",
"=",
"len",
"(",
"fs",
")",
"return",
"matrix",
"(",
"m",
",",
"n",
",",
"max_a",
",",
"es",
",",
"fs",
")"
] | 20.875 | 22.25 |
def get_transformation(self, server_hardware_type_uri, enclosure_group_uri):
"""
Transforms an existing profile template by supplying a new server hardware type and enclosure group or both.
A profile template will be returned with a new configuration based on the capabilities of the supplied
server hardware type and/or enclosure group. All configured connections will have their port assignments
set to 'Auto.'
The new profile template can subsequently be used in the update method, but is not guaranteed to pass
validation. Any incompatibilities will be flagged when the transformed server profile template is submitted.
Note:
This method is available for API version 300 or later.
Args:
server_hardware_type_uri: The URI of the new server hardware type.
enclosure_group_uri: The URI of the new enclosure group.
Returns:
dict: The server profile template resource.
"""
query_params = self.TRANSFORMATION_PATH.format(**locals())
uri = "{}{}".format(self.data["uri"], query_params)
return self._helper.do_get(uri)
|
[
"def",
"get_transformation",
"(",
"self",
",",
"server_hardware_type_uri",
",",
"enclosure_group_uri",
")",
":",
"query_params",
"=",
"self",
".",
"TRANSFORMATION_PATH",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
"uri",
"=",
"\"{}{}\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"\"uri\"",
"]",
",",
"query_params",
")",
"return",
"self",
".",
"_helper",
".",
"do_get",
"(",
"uri",
")"
] | 52.545455 | 34.909091 |
def _rule_block(self):
""" Parses the production rule::
block : NAME '{' option* '}'
Returns tuple (name, options_list).
"""
name = self._get_token(self.RE_NAME)
self._expect_token('{')
# consume additional options if available
options = []
while self._lookahead_token() != '}':
options.append(self._rule_option())
self._expect_token('}')
return [name, options]
|
[
"def",
"_rule_block",
"(",
"self",
")",
":",
"name",
"=",
"self",
".",
"_get_token",
"(",
"self",
".",
"RE_NAME",
")",
"self",
".",
"_expect_token",
"(",
"'{'",
")",
"# consume additional options if available",
"options",
"=",
"[",
"]",
"while",
"self",
".",
"_lookahead_token",
"(",
")",
"!=",
"'}'",
":",
"options",
".",
"append",
"(",
"self",
".",
"_rule_option",
"(",
")",
")",
"self",
".",
"_expect_token",
"(",
"'}'",
")",
"return",
"[",
"name",
",",
"options",
"]"
] | 27.352941 | 15.411765 |
def setMLPrefix(self, sMeshLocalPrefix):
"""set mesh local prefix"""
print '%s call setMLPrefix' % self.port
try:
cmd = 'dataset meshlocalprefix %s' % sMeshLocalPrefix
self.hasActiveDatasetToCommit = True
return self.__sendCommand(cmd)[0] == 'Done'
except Exception, e:
ModuleHelper.WriteIntoDebugLogger("setMLPrefix() Error: " + str(e))
|
[
"def",
"setMLPrefix",
"(",
"self",
",",
"sMeshLocalPrefix",
")",
":",
"print",
"'%s call setMLPrefix'",
"%",
"self",
".",
"port",
"try",
":",
"cmd",
"=",
"'dataset meshlocalprefix %s'",
"%",
"sMeshLocalPrefix",
"self",
".",
"hasActiveDatasetToCommit",
"=",
"True",
"return",
"self",
".",
"__sendCommand",
"(",
"cmd",
")",
"[",
"0",
"]",
"==",
"'Done'",
"except",
"Exception",
",",
"e",
":",
"ModuleHelper",
".",
"WriteIntoDebugLogger",
"(",
"\"setMLPrefix() Error: \"",
"+",
"str",
"(",
"e",
")",
")"
] | 45.444444 | 14.888889 |
def _allocate_ids_async(cls, size=None, max=None, parent=None,
**ctx_options):
"""Allocates a range of key IDs for this model class.
This is the asynchronous version of Model._allocate_ids().
"""
from . import tasklets
ctx = tasklets.get_context()
cls._pre_allocate_ids_hook(size, max, parent)
key = Key(cls._get_kind(), None, parent=parent)
fut = ctx.allocate_ids(key, size=size, max=max, **ctx_options)
post_hook = cls._post_allocate_ids_hook
if not cls._is_default_hook(Model._default_post_allocate_ids_hook,
post_hook):
fut.add_immediate_callback(post_hook, size, max, parent, fut)
return fut
|
[
"def",
"_allocate_ids_async",
"(",
"cls",
",",
"size",
"=",
"None",
",",
"max",
"=",
"None",
",",
"parent",
"=",
"None",
",",
"*",
"*",
"ctx_options",
")",
":",
"from",
".",
"import",
"tasklets",
"ctx",
"=",
"tasklets",
".",
"get_context",
"(",
")",
"cls",
".",
"_pre_allocate_ids_hook",
"(",
"size",
",",
"max",
",",
"parent",
")",
"key",
"=",
"Key",
"(",
"cls",
".",
"_get_kind",
"(",
")",
",",
"None",
",",
"parent",
"=",
"parent",
")",
"fut",
"=",
"ctx",
".",
"allocate_ids",
"(",
"key",
",",
"size",
"=",
"size",
",",
"max",
"=",
"max",
",",
"*",
"*",
"ctx_options",
")",
"post_hook",
"=",
"cls",
".",
"_post_allocate_ids_hook",
"if",
"not",
"cls",
".",
"_is_default_hook",
"(",
"Model",
".",
"_default_post_allocate_ids_hook",
",",
"post_hook",
")",
":",
"fut",
".",
"add_immediate_callback",
"(",
"post_hook",
",",
"size",
",",
"max",
",",
"parent",
",",
"fut",
")",
"return",
"fut"
] | 43.125 | 15.125 |
def detect_registry_url(client, auto_login=True):
"""Return a URL of the Docker registry."""
repo = client.repo
config = repo.config_reader()
# Find registry URL in .git/config
remote_url = None
try:
registry_url = config.get_value('renku', 'registry', None)
except NoSectionError:
registry_url = None
remote_branch = repo.head.reference.tracking_branch()
if remote_branch is not None:
remote_name = remote_branch.remote_name
config_section = 'renku "{remote_name}"'.format(
remote_name=remote_name
)
try:
registry_url = config.get_value(
config_section, 'registry', registry_url
)
except NoSectionError:
pass
remote_url = repo.remotes[remote_name].url
if registry_url:
# Look in [renku] and [renku "{remote_name}"] for registry_url key.
url = GitURL.parse(registry_url)
elif remote_url:
# Use URL based on remote configuration.
url = GitURL.parse(remote_url)
# Replace gitlab. with registry. unless running on gitlab.com.
hostname_parts = url.hostname.split('.')
if len(hostname_parts) > 2 and hostname_parts[0] == 'gitlab':
hostname_parts = hostname_parts[1:]
hostname = '.'.join(['registry'] + hostname_parts)
url = attr.evolve(url, hostname=hostname)
else:
raise errors.ConfigurationError(
'Configure renku.repository_url or Git remote.'
)
if auto_login and url.username and url.password:
try:
subprocess.run([
'docker',
'login',
url.hostname,
'-u',
url.username,
'--password-stdin',
],
check=True,
input=url.password.encode('utf-8'))
except subprocess.CalledProcessError:
raise errors.AuthenticationError(
'Check configuration of password or token in the registry URL'
)
return url
|
[
"def",
"detect_registry_url",
"(",
"client",
",",
"auto_login",
"=",
"True",
")",
":",
"repo",
"=",
"client",
".",
"repo",
"config",
"=",
"repo",
".",
"config_reader",
"(",
")",
"# Find registry URL in .git/config",
"remote_url",
"=",
"None",
"try",
":",
"registry_url",
"=",
"config",
".",
"get_value",
"(",
"'renku'",
",",
"'registry'",
",",
"None",
")",
"except",
"NoSectionError",
":",
"registry_url",
"=",
"None",
"remote_branch",
"=",
"repo",
".",
"head",
".",
"reference",
".",
"tracking_branch",
"(",
")",
"if",
"remote_branch",
"is",
"not",
"None",
":",
"remote_name",
"=",
"remote_branch",
".",
"remote_name",
"config_section",
"=",
"'renku \"{remote_name}\"'",
".",
"format",
"(",
"remote_name",
"=",
"remote_name",
")",
"try",
":",
"registry_url",
"=",
"config",
".",
"get_value",
"(",
"config_section",
",",
"'registry'",
",",
"registry_url",
")",
"except",
"NoSectionError",
":",
"pass",
"remote_url",
"=",
"repo",
".",
"remotes",
"[",
"remote_name",
"]",
".",
"url",
"if",
"registry_url",
":",
"# Look in [renku] and [renku \"{remote_name}\"] for registry_url key.",
"url",
"=",
"GitURL",
".",
"parse",
"(",
"registry_url",
")",
"elif",
"remote_url",
":",
"# Use URL based on remote configuration.",
"url",
"=",
"GitURL",
".",
"parse",
"(",
"remote_url",
")",
"# Replace gitlab. with registry. unless running on gitlab.com.",
"hostname_parts",
"=",
"url",
".",
"hostname",
".",
"split",
"(",
"'.'",
")",
"if",
"len",
"(",
"hostname_parts",
")",
">",
"2",
"and",
"hostname_parts",
"[",
"0",
"]",
"==",
"'gitlab'",
":",
"hostname_parts",
"=",
"hostname_parts",
"[",
"1",
":",
"]",
"hostname",
"=",
"'.'",
".",
"join",
"(",
"[",
"'registry'",
"]",
"+",
"hostname_parts",
")",
"url",
"=",
"attr",
".",
"evolve",
"(",
"url",
",",
"hostname",
"=",
"hostname",
")",
"else",
":",
"raise",
"errors",
".",
"ConfigurationError",
"(",
"'Configure renku.repository_url or Git remote.'",
")",
"if",
"auto_login",
"and",
"url",
".",
"username",
"and",
"url",
".",
"password",
":",
"try",
":",
"subprocess",
".",
"run",
"(",
"[",
"'docker'",
",",
"'login'",
",",
"url",
".",
"hostname",
",",
"'-u'",
",",
"url",
".",
"username",
",",
"'--password-stdin'",
",",
"]",
",",
"check",
"=",
"True",
",",
"input",
"=",
"url",
".",
"password",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"raise",
"errors",
".",
"AuthenticationError",
"(",
"'Check configuration of password or token in the registry URL'",
")",
"return",
"url"
] | 32.650794 | 18.555556 |
def relation_get(attribute=None, unit=None, rid=None):
"""Attempt to use leader-get if supported in the current version of Juju,
otherwise falls back on relation-get.
Note that we only attempt to use leader-get if the provided rid is a peer
relation id or no relation id is provided (in which case we assume we are
within the peer relation context).
"""
try:
if rid in relation_ids('cluster'):
return leader_get(attribute, rid)
else:
raise NotImplementedError
except NotImplementedError:
return _relation_get(attribute=attribute, rid=rid, unit=unit)
|
[
"def",
"relation_get",
"(",
"attribute",
"=",
"None",
",",
"unit",
"=",
"None",
",",
"rid",
"=",
"None",
")",
":",
"try",
":",
"if",
"rid",
"in",
"relation_ids",
"(",
"'cluster'",
")",
":",
"return",
"leader_get",
"(",
"attribute",
",",
"rid",
")",
"else",
":",
"raise",
"NotImplementedError",
"except",
"NotImplementedError",
":",
"return",
"_relation_get",
"(",
"attribute",
"=",
"attribute",
",",
"rid",
"=",
"rid",
",",
"unit",
"=",
"unit",
")"
] | 41.066667 | 15.866667 |
def simple_in_memory_settings(cls):
"""
Decorator that returns a class that "persists" data in-memory. Mostly
useful for testing
:param cls: the class whose features should be persisted in-memory
:return: A new class that will persist features in memory
"""
class Settings(ff.PersistenceSettings):
id_provider = ff.UuidProvider()
key_builder = ff.StringDelimitedKeyBuilder()
database = ff.InMemoryDatabase(key_builder=key_builder)
class Model(cls, Settings):
pass
Model.__name__ = cls.__name__
Model.__module__ = cls.__module__
return Model
|
[
"def",
"simple_in_memory_settings",
"(",
"cls",
")",
":",
"class",
"Settings",
"(",
"ff",
".",
"PersistenceSettings",
")",
":",
"id_provider",
"=",
"ff",
".",
"UuidProvider",
"(",
")",
"key_builder",
"=",
"ff",
".",
"StringDelimitedKeyBuilder",
"(",
")",
"database",
"=",
"ff",
".",
"InMemoryDatabase",
"(",
"key_builder",
"=",
"key_builder",
")",
"class",
"Model",
"(",
"cls",
",",
"Settings",
")",
":",
"pass",
"Model",
".",
"__name__",
"=",
"cls",
".",
"__name__",
"Model",
".",
"__module__",
"=",
"cls",
".",
"__module__",
"return",
"Model"
] | 31.736842 | 17.736842 |
def _filter_child_model_fields(cls, fields):
""" Keep only related model fields.
Example: Inherited models: A -> B -> C
B has one-to-many relationship to BMany.
after inspection BMany would have links to B and C. Keep only B. Parent
model A could not be used (It would not be in fields)
:param list fields: model fields.
:return list fields: filtered fields.
"""
indexes_to_remove = set([])
for index1, field1 in enumerate(fields):
for index2, field2 in enumerate(fields):
if index1 < index2 and index1 not in indexes_to_remove and\
index2 not in indexes_to_remove:
if issubclass(field1.related_model, field2.related_model):
indexes_to_remove.add(index1)
if issubclass(field2.related_model, field1.related_model):
indexes_to_remove.add(index2)
fields = [field for index, field in enumerate(fields)
if index not in indexes_to_remove]
return fields
|
[
"def",
"_filter_child_model_fields",
"(",
"cls",
",",
"fields",
")",
":",
"indexes_to_remove",
"=",
"set",
"(",
"[",
"]",
")",
"for",
"index1",
",",
"field1",
"in",
"enumerate",
"(",
"fields",
")",
":",
"for",
"index2",
",",
"field2",
"in",
"enumerate",
"(",
"fields",
")",
":",
"if",
"index1",
"<",
"index2",
"and",
"index1",
"not",
"in",
"indexes_to_remove",
"and",
"index2",
"not",
"in",
"indexes_to_remove",
":",
"if",
"issubclass",
"(",
"field1",
".",
"related_model",
",",
"field2",
".",
"related_model",
")",
":",
"indexes_to_remove",
".",
"add",
"(",
"index1",
")",
"if",
"issubclass",
"(",
"field2",
".",
"related_model",
",",
"field1",
".",
"related_model",
")",
":",
"indexes_to_remove",
".",
"add",
"(",
"index2",
")",
"fields",
"=",
"[",
"field",
"for",
"index",
",",
"field",
"in",
"enumerate",
"(",
"fields",
")",
"if",
"index",
"not",
"in",
"indexes_to_remove",
"]",
"return",
"fields"
] | 40 | 20.518519 |
def get_float(self,
key,
is_list=False,
is_optional=False,
is_secret=False,
is_local=False,
default=None,
options=None):
"""
Get a the value corresponding to the key and converts it to `float`/`list(float)`.
Args:
key: the dict key.
is_list: If this is one element or a list of elements.
is_optional: To raise an error if key was not found.
is_secret: If the key is a secret.
is_local: If the key is a local to this service.
default: default value if is_optional is True.
options: list/tuple if provided, the value must be one of these values.
Returns:
`float`: value corresponding to the key.
"""
if is_list:
return self._get_typed_list_value(key=key,
target_type=float,
type_convert=float,
is_optional=is_optional,
is_secret=is_secret,
is_local=is_local,
default=default,
options=options)
return self._get_typed_value(key=key,
target_type=float,
type_convert=float,
is_optional=is_optional,
is_secret=is_secret,
is_local=is_local,
default=default,
options=options)
|
[
"def",
"get_float",
"(",
"self",
",",
"key",
",",
"is_list",
"=",
"False",
",",
"is_optional",
"=",
"False",
",",
"is_secret",
"=",
"False",
",",
"is_local",
"=",
"False",
",",
"default",
"=",
"None",
",",
"options",
"=",
"None",
")",
":",
"if",
"is_list",
":",
"return",
"self",
".",
"_get_typed_list_value",
"(",
"key",
"=",
"key",
",",
"target_type",
"=",
"float",
",",
"type_convert",
"=",
"float",
",",
"is_optional",
"=",
"is_optional",
",",
"is_secret",
"=",
"is_secret",
",",
"is_local",
"=",
"is_local",
",",
"default",
"=",
"default",
",",
"options",
"=",
"options",
")",
"return",
"self",
".",
"_get_typed_value",
"(",
"key",
"=",
"key",
",",
"target_type",
"=",
"float",
",",
"type_convert",
"=",
"float",
",",
"is_optional",
"=",
"is_optional",
",",
"is_secret",
"=",
"is_secret",
",",
"is_local",
"=",
"is_local",
",",
"default",
"=",
"default",
",",
"options",
"=",
"options",
")"
] | 43.926829 | 19.146341 |
def _renorm(args: Dict[str, Any]):
"""Renormalizes the state using the norm arg."""
state = _state_shard(args)
# If our gate is so bad that we have norm of zero, we have bigger problems.
state /= np.sqrt(args['norm_squared'])
|
[
"def",
"_renorm",
"(",
"args",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
")",
":",
"state",
"=",
"_state_shard",
"(",
"args",
")",
"# If our gate is so bad that we have norm of zero, we have bigger problems.",
"state",
"/=",
"np",
".",
"sqrt",
"(",
"args",
"[",
"'norm_squared'",
"]",
")"
] | 47.4 | 11.4 |
def create_api_gateway_routes( self,
lambda_arn,
api_name=None,
api_key_required=False,
authorization_type='NONE',
authorizer=None,
cors_options=None,
description=None,
endpoint_configuration=None
):
"""
Create the API Gateway for this Zappa deployment.
Returns the new RestAPI CF resource.
"""
restapi = troposphere.apigateway.RestApi('Api')
restapi.Name = api_name or lambda_arn.split(':')[-1]
if not description:
description = 'Created automatically by Zappa.'
restapi.Description = description
endpoint_configuration = [] if endpoint_configuration is None else endpoint_configuration
if self.boto_session.region_name == "us-gov-west-1":
endpoint_configuration.append("REGIONAL")
if endpoint_configuration:
endpoint = troposphere.apigateway.EndpointConfiguration()
endpoint.Types = list(set(endpoint_configuration))
restapi.EndpointConfiguration = endpoint
if self.apigateway_policy:
restapi.Policy = json.loads(self.apigateway_policy)
self.cf_template.add_resource(restapi)
root_id = troposphere.GetAtt(restapi, 'RootResourceId')
invocation_prefix = "aws" if self.boto_session.region_name != "us-gov-west-1" else "aws-us-gov"
invocations_uri = 'arn:' + invocation_prefix + ':apigateway:' + self.boto_session.region_name + ':lambda:path/2015-03-31/functions/' + lambda_arn + '/invocations'
##
# The Resources
##
authorizer_resource = None
if authorizer:
authorizer_lambda_arn = authorizer.get('arn', lambda_arn)
lambda_uri = 'arn:{invocation_prefix}:apigateway:{region_name}:lambda:path/2015-03-31/functions/{lambda_arn}/invocations'.format(
invocation_prefix=invocation_prefix,
region_name=self.boto_session.region_name,
lambda_arn=authorizer_lambda_arn
)
authorizer_resource = self.create_authorizer(
restapi, lambda_uri, authorizer
)
self.create_and_setup_methods( restapi,
root_id,
api_key_required,
invocations_uri,
authorization_type,
authorizer_resource,
0
)
if cors_options:
self.create_and_setup_cors( restapi,
root_id,
invocations_uri,
0,
cors_options
)
resource = troposphere.apigateway.Resource('ResourceAnyPathSlashed')
self.cf_api_resources.append(resource.title)
resource.RestApiId = troposphere.Ref(restapi)
resource.ParentId = root_id
resource.PathPart = "{proxy+}"
self.cf_template.add_resource(resource)
self.create_and_setup_methods( restapi,
resource,
api_key_required,
invocations_uri,
authorization_type,
authorizer_resource,
1
) # pragma: no cover
if cors_options:
self.create_and_setup_cors( restapi,
resource,
invocations_uri,
1,
cors_options
) # pragma: no cover
return restapi
|
[
"def",
"create_api_gateway_routes",
"(",
"self",
",",
"lambda_arn",
",",
"api_name",
"=",
"None",
",",
"api_key_required",
"=",
"False",
",",
"authorization_type",
"=",
"'NONE'",
",",
"authorizer",
"=",
"None",
",",
"cors_options",
"=",
"None",
",",
"description",
"=",
"None",
",",
"endpoint_configuration",
"=",
"None",
")",
":",
"restapi",
"=",
"troposphere",
".",
"apigateway",
".",
"RestApi",
"(",
"'Api'",
")",
"restapi",
".",
"Name",
"=",
"api_name",
"or",
"lambda_arn",
".",
"split",
"(",
"':'",
")",
"[",
"-",
"1",
"]",
"if",
"not",
"description",
":",
"description",
"=",
"'Created automatically by Zappa.'",
"restapi",
".",
"Description",
"=",
"description",
"endpoint_configuration",
"=",
"[",
"]",
"if",
"endpoint_configuration",
"is",
"None",
"else",
"endpoint_configuration",
"if",
"self",
".",
"boto_session",
".",
"region_name",
"==",
"\"us-gov-west-1\"",
":",
"endpoint_configuration",
".",
"append",
"(",
"\"REGIONAL\"",
")",
"if",
"endpoint_configuration",
":",
"endpoint",
"=",
"troposphere",
".",
"apigateway",
".",
"EndpointConfiguration",
"(",
")",
"endpoint",
".",
"Types",
"=",
"list",
"(",
"set",
"(",
"endpoint_configuration",
")",
")",
"restapi",
".",
"EndpointConfiguration",
"=",
"endpoint",
"if",
"self",
".",
"apigateway_policy",
":",
"restapi",
".",
"Policy",
"=",
"json",
".",
"loads",
"(",
"self",
".",
"apigateway_policy",
")",
"self",
".",
"cf_template",
".",
"add_resource",
"(",
"restapi",
")",
"root_id",
"=",
"troposphere",
".",
"GetAtt",
"(",
"restapi",
",",
"'RootResourceId'",
")",
"invocation_prefix",
"=",
"\"aws\"",
"if",
"self",
".",
"boto_session",
".",
"region_name",
"!=",
"\"us-gov-west-1\"",
"else",
"\"aws-us-gov\"",
"invocations_uri",
"=",
"'arn:'",
"+",
"invocation_prefix",
"+",
"':apigateway:'",
"+",
"self",
".",
"boto_session",
".",
"region_name",
"+",
"':lambda:path/2015-03-31/functions/'",
"+",
"lambda_arn",
"+",
"'/invocations'",
"##",
"# The Resources",
"##",
"authorizer_resource",
"=",
"None",
"if",
"authorizer",
":",
"authorizer_lambda_arn",
"=",
"authorizer",
".",
"get",
"(",
"'arn'",
",",
"lambda_arn",
")",
"lambda_uri",
"=",
"'arn:{invocation_prefix}:apigateway:{region_name}:lambda:path/2015-03-31/functions/{lambda_arn}/invocations'",
".",
"format",
"(",
"invocation_prefix",
"=",
"invocation_prefix",
",",
"region_name",
"=",
"self",
".",
"boto_session",
".",
"region_name",
",",
"lambda_arn",
"=",
"authorizer_lambda_arn",
")",
"authorizer_resource",
"=",
"self",
".",
"create_authorizer",
"(",
"restapi",
",",
"lambda_uri",
",",
"authorizer",
")",
"self",
".",
"create_and_setup_methods",
"(",
"restapi",
",",
"root_id",
",",
"api_key_required",
",",
"invocations_uri",
",",
"authorization_type",
",",
"authorizer_resource",
",",
"0",
")",
"if",
"cors_options",
":",
"self",
".",
"create_and_setup_cors",
"(",
"restapi",
",",
"root_id",
",",
"invocations_uri",
",",
"0",
",",
"cors_options",
")",
"resource",
"=",
"troposphere",
".",
"apigateway",
".",
"Resource",
"(",
"'ResourceAnyPathSlashed'",
")",
"self",
".",
"cf_api_resources",
".",
"append",
"(",
"resource",
".",
"title",
")",
"resource",
".",
"RestApiId",
"=",
"troposphere",
".",
"Ref",
"(",
"restapi",
")",
"resource",
".",
"ParentId",
"=",
"root_id",
"resource",
".",
"PathPart",
"=",
"\"{proxy+}\"",
"self",
".",
"cf_template",
".",
"add_resource",
"(",
"resource",
")",
"self",
".",
"create_and_setup_methods",
"(",
"restapi",
",",
"resource",
",",
"api_key_required",
",",
"invocations_uri",
",",
"authorization_type",
",",
"authorizer_resource",
",",
"1",
")",
"# pragma: no cover",
"if",
"cors_options",
":",
"self",
".",
"create_and_setup_cors",
"(",
"restapi",
",",
"resource",
",",
"invocations_uri",
",",
"1",
",",
"cors_options",
")",
"# pragma: no cover",
"return",
"restapi"
] | 45.369565 | 19.23913 |
def read_numeric(fmt, buff, byteorder='big'):
"""Read a numeric value from a file-like object."""
try:
fmt = fmt[byteorder]
return fmt.unpack(buff.read(fmt.size))[0]
except StructError:
return 0
except KeyError as exc:
raise ValueError('Invalid byte order') from exc
|
[
"def",
"read_numeric",
"(",
"fmt",
",",
"buff",
",",
"byteorder",
"=",
"'big'",
")",
":",
"try",
":",
"fmt",
"=",
"fmt",
"[",
"byteorder",
"]",
"return",
"fmt",
".",
"unpack",
"(",
"buff",
".",
"read",
"(",
"fmt",
".",
"size",
")",
")",
"[",
"0",
"]",
"except",
"StructError",
":",
"return",
"0",
"except",
"KeyError",
"as",
"exc",
":",
"raise",
"ValueError",
"(",
"'Invalid byte order'",
")",
"from",
"exc"
] | 34.888889 | 13.777778 |
def _parse_out_variable(self):
"""Internal method to parse the tc_playbook_out_variable arg.
**Example Variable Format**::
#App:1234:status!String,#App:1234:status_code!String
"""
self._out_variables = {}
self._out_variables_type = {}
if self.tcex.default_args.tc_playbook_out_variables:
variables = self.tcex.default_args.tc_playbook_out_variables.strip()
for o in variables.split(','):
# parse the variable to get individual parts
parsed_key = self.parse_variable(o)
variable_name = parsed_key['name']
variable_type = parsed_key['type']
# store the variables in dict by name (e.g. "status_code")
self._out_variables[variable_name] = {'variable': o}
# store the variables in dict by name-type (e.g. "status_code-String")
vt_key = '{}-{}'.format(variable_name, variable_type)
self._out_variables_type[vt_key] = {'variable': o}
|
[
"def",
"_parse_out_variable",
"(",
"self",
")",
":",
"self",
".",
"_out_variables",
"=",
"{",
"}",
"self",
".",
"_out_variables_type",
"=",
"{",
"}",
"if",
"self",
".",
"tcex",
".",
"default_args",
".",
"tc_playbook_out_variables",
":",
"variables",
"=",
"self",
".",
"tcex",
".",
"default_args",
".",
"tc_playbook_out_variables",
".",
"strip",
"(",
")",
"for",
"o",
"in",
"variables",
".",
"split",
"(",
"','",
")",
":",
"# parse the variable to get individual parts",
"parsed_key",
"=",
"self",
".",
"parse_variable",
"(",
"o",
")",
"variable_name",
"=",
"parsed_key",
"[",
"'name'",
"]",
"variable_type",
"=",
"parsed_key",
"[",
"'type'",
"]",
"# store the variables in dict by name (e.g. \"status_code\")",
"self",
".",
"_out_variables",
"[",
"variable_name",
"]",
"=",
"{",
"'variable'",
":",
"o",
"}",
"# store the variables in dict by name-type (e.g. \"status_code-String\")",
"vt_key",
"=",
"'{}-{}'",
".",
"format",
"(",
"variable_name",
",",
"variable_type",
")",
"self",
".",
"_out_variables_type",
"[",
"vt_key",
"]",
"=",
"{",
"'variable'",
":",
"o",
"}"
] | 49.333333 | 19.238095 |
def cont(self, event = None):
"""
Resumes execution after processing a debug event.
@see: dispatch(), loop(), wait()
@type event: L{Event}
@param event: (Optional) Event object returned by L{wait}.
@raise WindowsError: Raises an exception on error.
"""
# If no event object was given, use the last event.
if event is None:
event = self.lastEvent
# Ignore dummy events.
if not event:
return
# Get the event continue status information.
dwProcessId = event.get_pid()
dwThreadId = event.get_tid()
dwContinueStatus = event.continueStatus
# Check if the process is still being debugged.
if self.is_debugee(dwProcessId):
# Try to flush the instruction cache.
try:
if self.system.has_process(dwProcessId):
aProcess = self.system.get_process(dwProcessId)
else:
aProcess = Process(dwProcessId)
aProcess.flush_instruction_cache()
except WindowsError:
pass
# XXX TODO
#
# Try to execute the UnhandledExceptionFilter for second chance
# exceptions, at least when in hostile mode (in normal mode it
# would be breaking compatibility, as users may actually expect
# second chance exceptions to be raised again).
#
# Reportedly in Windows 7 (maybe in Vista too) this seems to be
# happening already. In XP and below the UnhandledExceptionFilter
# was never called for processes being debugged.
# Continue execution of the debugee.
win32.ContinueDebugEvent(dwProcessId, dwThreadId, dwContinueStatus)
# If the event is the last event, forget it.
if event == self.lastEvent:
self.lastEvent = None
|
[
"def",
"cont",
"(",
"self",
",",
"event",
"=",
"None",
")",
":",
"# If no event object was given, use the last event.",
"if",
"event",
"is",
"None",
":",
"event",
"=",
"self",
".",
"lastEvent",
"# Ignore dummy events.",
"if",
"not",
"event",
":",
"return",
"# Get the event continue status information.",
"dwProcessId",
"=",
"event",
".",
"get_pid",
"(",
")",
"dwThreadId",
"=",
"event",
".",
"get_tid",
"(",
")",
"dwContinueStatus",
"=",
"event",
".",
"continueStatus",
"# Check if the process is still being debugged.",
"if",
"self",
".",
"is_debugee",
"(",
"dwProcessId",
")",
":",
"# Try to flush the instruction cache.",
"try",
":",
"if",
"self",
".",
"system",
".",
"has_process",
"(",
"dwProcessId",
")",
":",
"aProcess",
"=",
"self",
".",
"system",
".",
"get_process",
"(",
"dwProcessId",
")",
"else",
":",
"aProcess",
"=",
"Process",
"(",
"dwProcessId",
")",
"aProcess",
".",
"flush_instruction_cache",
"(",
")",
"except",
"WindowsError",
":",
"pass",
"# XXX TODO",
"#",
"# Try to execute the UnhandledExceptionFilter for second chance",
"# exceptions, at least when in hostile mode (in normal mode it",
"# would be breaking compatibility, as users may actually expect",
"# second chance exceptions to be raised again).",
"#",
"# Reportedly in Windows 7 (maybe in Vista too) this seems to be",
"# happening already. In XP and below the UnhandledExceptionFilter",
"# was never called for processes being debugged.",
"# Continue execution of the debugee.",
"win32",
".",
"ContinueDebugEvent",
"(",
"dwProcessId",
",",
"dwThreadId",
",",
"dwContinueStatus",
")",
"# If the event is the last event, forget it.",
"if",
"event",
"==",
"self",
".",
"lastEvent",
":",
"self",
".",
"lastEvent",
"=",
"None"
] | 34.890909 | 20.963636 |
def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs):
"""
`Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
"""
args = (arg1, arg2, arg3, arg4) + args
return self.ThenAt(5, f, *args, **kwargs)
|
[
"def",
"Then5",
"(",
"self",
",",
"f",
",",
"arg1",
",",
"arg2",
",",
"arg3",
",",
"arg4",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"args",
"=",
"(",
"arg1",
",",
"arg2",
",",
"arg3",
",",
"arg4",
")",
"+",
"args",
"return",
"self",
".",
"ThenAt",
"(",
"5",
",",
"f",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 48.333333 | 18 |
def K2onSilicon_main(args=None):
"""Function called when `K2onSilicon` is executed on the command line."""
import argparse
parser = argparse.ArgumentParser(
description="Run K2onSilicon to find which targets in a "
"list call on active silicon for a given K2 campaign.")
parser.add_argument('csv_file', type=str,
help="Name of input csv file with targets, column are "
"Ra_degrees, Dec_degrees, Kepmag")
parser.add_argument('campaign', type=int, help='K2 Campaign number')
args = parser.parse_args(args)
K2onSilicon(args.csv_file, args.campaign)
|
[
"def",
"K2onSilicon_main",
"(",
"args",
"=",
"None",
")",
":",
"import",
"argparse",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"\"Run K2onSilicon to find which targets in a \"",
"\"list call on active silicon for a given K2 campaign.\"",
")",
"parser",
".",
"add_argument",
"(",
"'csv_file'",
",",
"type",
"=",
"str",
",",
"help",
"=",
"\"Name of input csv file with targets, column are \"",
"\"Ra_degrees, Dec_degrees, Kepmag\"",
")",
"parser",
".",
"add_argument",
"(",
"'campaign'",
",",
"type",
"=",
"int",
",",
"help",
"=",
"'K2 Campaign number'",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"args",
")",
"K2onSilicon",
"(",
"args",
".",
"csv_file",
",",
"args",
".",
"campaign",
")"
] | 53.583333 | 16.833333 |
def add_rule(self, name, callable_):
"""Makes rule 'name' available to all subsequently loaded Jamfiles.
Calling that rule wil relay to 'callable'."""
assert isinstance(name, basestring)
assert callable(callable_)
self.project_rules_.add_rule(name, callable_)
|
[
"def",
"add_rule",
"(",
"self",
",",
"name",
",",
"callable_",
")",
":",
"assert",
"isinstance",
"(",
"name",
",",
"basestring",
")",
"assert",
"callable",
"(",
"callable_",
")",
"self",
".",
"project_rules_",
".",
"add_rule",
"(",
"name",
",",
"callable_",
")"
] | 42 | 9.428571 |
def find_template(repo_dir):
"""Determine which child directory of `repo_dir` is the project template.
:param repo_dir: Local directory of newly cloned repo.
:returns project_template: Relative path to project template.
"""
logger.debug('Searching {} for the project template.'.format(repo_dir))
repo_dir_contents = os.listdir(repo_dir)
project_template = None
for item in repo_dir_contents:
if 'cookiecutter' in item and '{{' in item and '}}' in item:
project_template = item
break
if project_template:
project_template = os.path.join(repo_dir, project_template)
logger.debug(
'The project template appears to be {}'.format(project_template)
)
return project_template
else:
raise NonTemplatedInputDirException
|
[
"def",
"find_template",
"(",
"repo_dir",
")",
":",
"logger",
".",
"debug",
"(",
"'Searching {} for the project template.'",
".",
"format",
"(",
"repo_dir",
")",
")",
"repo_dir_contents",
"=",
"os",
".",
"listdir",
"(",
"repo_dir",
")",
"project_template",
"=",
"None",
"for",
"item",
"in",
"repo_dir_contents",
":",
"if",
"'cookiecutter'",
"in",
"item",
"and",
"'{{'",
"in",
"item",
"and",
"'}}'",
"in",
"item",
":",
"project_template",
"=",
"item",
"break",
"if",
"project_template",
":",
"project_template",
"=",
"os",
".",
"path",
".",
"join",
"(",
"repo_dir",
",",
"project_template",
")",
"logger",
".",
"debug",
"(",
"'The project template appears to be {}'",
".",
"format",
"(",
"project_template",
")",
")",
"return",
"project_template",
"else",
":",
"raise",
"NonTemplatedInputDirException"
] | 33.958333 | 20.875 |
def create(self, **kwargs):
"""Custom _create method to accommodate for issue 11.5.4 and 12.1.1,
Where creation of an object would return 404, despite the object
being created.
"""
tmos_v = self._meta_data['bigip']._meta_data['tmos_version']
if LooseVersion(tmos_v) == LooseVersion('11.5.4') or LooseVersion(
tmos_v) == LooseVersion('12.1.1'):
if 'uri' in self._meta_data:
error = "There was an attempt to assign a new uri to this " \
"resource, the _meta_data['uri'] is %s and it should" \
" not be changed." % (self._meta_data['uri'])
raise URICreationCollision(error)
self._check_exclusive_parameters(**kwargs)
requests_params = self._handle_requests_params(kwargs)
self._check_create_parameters(**kwargs)
# Reduce boolean pairs as specified by the meta_data entry below
for key1, key2 in self._meta_data['reduction_forcing_pairs']:
kwargs = self._reduce_boolean_pair(kwargs, key1, key2)
# Make convenience variable with short names for this method.
_create_uri = self._meta_data['container']._meta_data['uri']
session = self._meta_data['bigip']._meta_data['icr_session']
# We using try/except just in case some HF will fix
# this in 11.5.4
try:
response = session.post(
_create_uri, json=kwargs, **requests_params)
except HTTPError as err:
if err.response.status_code != 404:
raise
if err.response.status_code == 404:
return self._return_object(self._meta_data['container'],
kwargs['name'])
# Make new instance of self
return self._produce_instance(response)
else:
return self._create(**kwargs)
|
[
"def",
"create",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"tmos_v",
"=",
"self",
".",
"_meta_data",
"[",
"'bigip'",
"]",
".",
"_meta_data",
"[",
"'tmos_version'",
"]",
"if",
"LooseVersion",
"(",
"tmos_v",
")",
"==",
"LooseVersion",
"(",
"'11.5.4'",
")",
"or",
"LooseVersion",
"(",
"tmos_v",
")",
"==",
"LooseVersion",
"(",
"'12.1.1'",
")",
":",
"if",
"'uri'",
"in",
"self",
".",
"_meta_data",
":",
"error",
"=",
"\"There was an attempt to assign a new uri to this \"",
"\"resource, the _meta_data['uri'] is %s and it should\"",
"\" not be changed.\"",
"%",
"(",
"self",
".",
"_meta_data",
"[",
"'uri'",
"]",
")",
"raise",
"URICreationCollision",
"(",
"error",
")",
"self",
".",
"_check_exclusive_parameters",
"(",
"*",
"*",
"kwargs",
")",
"requests_params",
"=",
"self",
".",
"_handle_requests_params",
"(",
"kwargs",
")",
"self",
".",
"_check_create_parameters",
"(",
"*",
"*",
"kwargs",
")",
"# Reduce boolean pairs as specified by the meta_data entry below",
"for",
"key1",
",",
"key2",
"in",
"self",
".",
"_meta_data",
"[",
"'reduction_forcing_pairs'",
"]",
":",
"kwargs",
"=",
"self",
".",
"_reduce_boolean_pair",
"(",
"kwargs",
",",
"key1",
",",
"key2",
")",
"# Make convenience variable with short names for this method.",
"_create_uri",
"=",
"self",
".",
"_meta_data",
"[",
"'container'",
"]",
".",
"_meta_data",
"[",
"'uri'",
"]",
"session",
"=",
"self",
".",
"_meta_data",
"[",
"'bigip'",
"]",
".",
"_meta_data",
"[",
"'icr_session'",
"]",
"# We using try/except just in case some HF will fix",
"# this in 11.5.4",
"try",
":",
"response",
"=",
"session",
".",
"post",
"(",
"_create_uri",
",",
"json",
"=",
"kwargs",
",",
"*",
"*",
"requests_params",
")",
"except",
"HTTPError",
"as",
"err",
":",
"if",
"err",
".",
"response",
".",
"status_code",
"!=",
"404",
":",
"raise",
"if",
"err",
".",
"response",
".",
"status_code",
"==",
"404",
":",
"return",
"self",
".",
"_return_object",
"(",
"self",
".",
"_meta_data",
"[",
"'container'",
"]",
",",
"kwargs",
"[",
"'name'",
"]",
")",
"# Make new instance of self",
"return",
"self",
".",
"_produce_instance",
"(",
"response",
")",
"else",
":",
"return",
"self",
".",
"_create",
"(",
"*",
"*",
"kwargs",
")"
] | 47.071429 | 21.857143 |
def hexdump(x, dump=False):
"""Build a tcpdump like hexadecimal view
:param x: a Packet
:param dump: define if the result must be printed or returned in a variable
:returns: a String only when dump=True
"""
s = ""
x = bytes_encode(x)
x_len = len(x)
i = 0
while i < x_len:
s += "%04x " % i
for j in range(16):
if i + j < x_len:
s += "%02X " % orb(x[i + j])
else:
s += " "
s += " %s\n" % sane_color(x[i:i + 16])
i += 16
# remove trailing \n
s = s[:-1] if s.endswith("\n") else s
if dump:
return s
else:
print(s)
|
[
"def",
"hexdump",
"(",
"x",
",",
"dump",
"=",
"False",
")",
":",
"s",
"=",
"\"\"",
"x",
"=",
"bytes_encode",
"(",
"x",
")",
"x_len",
"=",
"len",
"(",
"x",
")",
"i",
"=",
"0",
"while",
"i",
"<",
"x_len",
":",
"s",
"+=",
"\"%04x \"",
"%",
"i",
"for",
"j",
"in",
"range",
"(",
"16",
")",
":",
"if",
"i",
"+",
"j",
"<",
"x_len",
":",
"s",
"+=",
"\"%02X \"",
"%",
"orb",
"(",
"x",
"[",
"i",
"+",
"j",
"]",
")",
"else",
":",
"s",
"+=",
"\" \"",
"s",
"+=",
"\" %s\\n\"",
"%",
"sane_color",
"(",
"x",
"[",
"i",
":",
"i",
"+",
"16",
"]",
")",
"i",
"+=",
"16",
"# remove trailing \\n",
"s",
"=",
"s",
"[",
":",
"-",
"1",
"]",
"if",
"s",
".",
"endswith",
"(",
"\"\\n\"",
")",
"else",
"s",
"if",
"dump",
":",
"return",
"s",
"else",
":",
"print",
"(",
"s",
")"
] | 24.923077 | 17.961538 |
def get_drill_bits_d_imperial():
"""Return array of possible drill diameters in imperial."""
step_32nd = np.arange(0.03125, 0.25, 0.03125)
step_8th = np.arange(0.25, 1.0, 0.125)
step_4th = np.arange(1.0, 2.0, 0.25)
maximum = [2.0]
return np.concatenate((step_32nd,
step_8th,
step_4th,
maximum)) * u.inch
|
[
"def",
"get_drill_bits_d_imperial",
"(",
")",
":",
"step_32nd",
"=",
"np",
".",
"arange",
"(",
"0.03125",
",",
"0.25",
",",
"0.03125",
")",
"step_8th",
"=",
"np",
".",
"arange",
"(",
"0.25",
",",
"1.0",
",",
"0.125",
")",
"step_4th",
"=",
"np",
".",
"arange",
"(",
"1.0",
",",
"2.0",
",",
"0.25",
")",
"maximum",
"=",
"[",
"2.0",
"]",
"return",
"np",
".",
"concatenate",
"(",
"(",
"step_32nd",
",",
"step_8th",
",",
"step_4th",
",",
"maximum",
")",
")",
"*",
"u",
".",
"inch"
] | 36.272727 | 8.727273 |
def load_page(self, payload):
"""
Parses the collection of records out of a list payload.
:param dict payload: The JSON-loaded content.
:return list: The list of records.
"""
if 'meta' in payload and 'key' in payload['meta']:
return payload[payload['meta']['key']]
else:
keys = set(payload.keys())
key = keys - self.META_KEYS
if len(key) == 1:
return payload[key.pop()]
raise TwilioException('Page Records can not be deserialized')
|
[
"def",
"load_page",
"(",
"self",
",",
"payload",
")",
":",
"if",
"'meta'",
"in",
"payload",
"and",
"'key'",
"in",
"payload",
"[",
"'meta'",
"]",
":",
"return",
"payload",
"[",
"payload",
"[",
"'meta'",
"]",
"[",
"'key'",
"]",
"]",
"else",
":",
"keys",
"=",
"set",
"(",
"payload",
".",
"keys",
"(",
")",
")",
"key",
"=",
"keys",
"-",
"self",
".",
"META_KEYS",
"if",
"len",
"(",
"key",
")",
"==",
"1",
":",
"return",
"payload",
"[",
"key",
".",
"pop",
"(",
")",
"]",
"raise",
"TwilioException",
"(",
"'Page Records can not be deserialized'",
")"
] | 34.125 | 14.25 |
def run_model(self, model_run, run_url):
"""Execute the given model run.
Throws a ValueError if the given run specifies an unknown model or if
the model connector is invalid. An EngineException is thrown if running
the model (i.e., communication with the backend) fails.
Parameters
----------
model_run : ModelRunHandle
Handle to model run
run_url : string
URL for model run information
"""
# Get model to verify that it exists and to get connector information
model = self.get_model(model_run.model_id)
if model is None:
raise ValueError('unknown model: ' + model_run.model_id)
# By now there is only one connector. Use the buffered connector to
# avoid closed connection exceptions
RabbitMQConnector(model.connector).run_model(model_run, run_url)
|
[
"def",
"run_model",
"(",
"self",
",",
"model_run",
",",
"run_url",
")",
":",
"# Get model to verify that it exists and to get connector information",
"model",
"=",
"self",
".",
"get_model",
"(",
"model_run",
".",
"model_id",
")",
"if",
"model",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'unknown model: '",
"+",
"model_run",
".",
"model_id",
")",
"# By now there is only one connector. Use the buffered connector to",
"# avoid closed connection exceptions",
"RabbitMQConnector",
"(",
"model",
".",
"connector",
")",
".",
"run_model",
"(",
"model_run",
",",
"run_url",
")"
] | 42.190476 | 19.809524 |
def one_hot(x, size, dtype=np.float32):
"""Make a n+1 dim one-hot array from n dim int-categorical array."""
return np.array(x[..., np.newaxis] == np.arange(size), dtype)
|
[
"def",
"one_hot",
"(",
"x",
",",
"size",
",",
"dtype",
"=",
"np",
".",
"float32",
")",
":",
"return",
"np",
".",
"array",
"(",
"x",
"[",
"...",
",",
"np",
".",
"newaxis",
"]",
"==",
"np",
".",
"arange",
"(",
"size",
")",
",",
"dtype",
")"
] | 57.333333 | 8 |
def validate_submission(self, filename):
"""Validates submission.
Args:
filename: submission filename
Returns:
submission metadata or None if submission is invalid
"""
self._prepare_temp_dir()
# Convert filename to be absolute path, relative path might cause problems
# with mounting directory in Docker
filename = os.path.abspath(filename)
# extract submission
if not self._extract_submission(filename):
return None
# verify submission size
if not self._verify_submission_size():
return None
# Load metadata
metadata = self._load_and_verify_metadata()
if not metadata:
return None
submission_type = metadata['type']
# verify docker container size
if not self._verify_docker_image_size(metadata['container_gpu']):
return None
# Try to run submission on sample data
self._prepare_sample_data(submission_type)
if not self._run_submission(metadata):
logging.error('Failure while running submission')
return None
if not self._verify_output(submission_type):
logging.warning('Some of the outputs of your submission are invalid or '
'missing. You submission still will be evaluation '
'but you might get lower score.')
return metadata
|
[
"def",
"validate_submission",
"(",
"self",
",",
"filename",
")",
":",
"self",
".",
"_prepare_temp_dir",
"(",
")",
"# Convert filename to be absolute path, relative path might cause problems",
"# with mounting directory in Docker",
"filename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"filename",
")",
"# extract submission",
"if",
"not",
"self",
".",
"_extract_submission",
"(",
"filename",
")",
":",
"return",
"None",
"# verify submission size",
"if",
"not",
"self",
".",
"_verify_submission_size",
"(",
")",
":",
"return",
"None",
"# Load metadata",
"metadata",
"=",
"self",
".",
"_load_and_verify_metadata",
"(",
")",
"if",
"not",
"metadata",
":",
"return",
"None",
"submission_type",
"=",
"metadata",
"[",
"'type'",
"]",
"# verify docker container size",
"if",
"not",
"self",
".",
"_verify_docker_image_size",
"(",
"metadata",
"[",
"'container_gpu'",
"]",
")",
":",
"return",
"None",
"# Try to run submission on sample data",
"self",
".",
"_prepare_sample_data",
"(",
"submission_type",
")",
"if",
"not",
"self",
".",
"_run_submission",
"(",
"metadata",
")",
":",
"logging",
".",
"error",
"(",
"'Failure while running submission'",
")",
"return",
"None",
"if",
"not",
"self",
".",
"_verify_output",
"(",
"submission_type",
")",
":",
"logging",
".",
"warning",
"(",
"'Some of the outputs of your submission are invalid or '",
"'missing. You submission still will be evaluation '",
"'but you might get lower score.'",
")",
"return",
"metadata"
] | 34.702703 | 15.918919 |
def setCol(self, x, l):
"""set the x-th column, starting at 0"""
for i in xrange(0, self.__size):
self.setCell(x, i, l[i])
|
[
"def",
"setCol",
"(",
"self",
",",
"x",
",",
"l",
")",
":",
"for",
"i",
"in",
"xrange",
"(",
"0",
",",
"self",
".",
"__size",
")",
":",
"self",
".",
"setCell",
"(",
"x",
",",
"i",
",",
"l",
"[",
"i",
"]",
")"
] | 36.75 | 5.25 |
def fit(self, y, **kwargs):
"""
Sets up y for the histogram and checks to
ensure that ``y`` is of the correct data type.
Fit calls draw.
Parameters
----------
y : an array of one dimension or a pandas Series
kwargs : dict
keyword arguments passed to scikit-learn API.
"""
#throw an error if y has more than 1 column
if y.ndim > 1:
raise YellowbrickValueError("y needs to be an array or Series with one dimension")
# Handle the target name if it is None.
if self.target is None:
self.target = 'Frequency'
self.draw(y)
return self
|
[
"def",
"fit",
"(",
"self",
",",
"y",
",",
"*",
"*",
"kwargs",
")",
":",
"#throw an error if y has more than 1 column",
"if",
"y",
".",
"ndim",
">",
"1",
":",
"raise",
"YellowbrickValueError",
"(",
"\"y needs to be an array or Series with one dimension\"",
")",
"# Handle the target name if it is None.",
"if",
"self",
".",
"target",
"is",
"None",
":",
"self",
".",
"target",
"=",
"'Frequency'",
"self",
".",
"draw",
"(",
"y",
")",
"return",
"self"
] | 26.68 | 21.32 |
def identify(self, text, **kwargs):
"""
Identify language.
Identifies the language of the input text.
:param str text: Input text in UTF-8 format.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse
"""
if text is None:
raise ValueError('text must be provided')
headers = {}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
sdk_headers = get_sdk_headers('language_translator', 'V3', 'identify')
headers.update(sdk_headers)
params = {'version': self.version}
data = text
headers['content-type'] = 'text/plain'
url = '/v3/identify'
response = self.request(
method='POST',
url=url,
headers=headers,
params=params,
data=data,
accept_json=True)
return response
|
[
"def",
"identify",
"(",
"self",
",",
"text",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"text",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'text must be provided'",
")",
"headers",
"=",
"{",
"}",
"if",
"'headers'",
"in",
"kwargs",
":",
"headers",
".",
"update",
"(",
"kwargs",
".",
"get",
"(",
"'headers'",
")",
")",
"sdk_headers",
"=",
"get_sdk_headers",
"(",
"'language_translator'",
",",
"'V3'",
",",
"'identify'",
")",
"headers",
".",
"update",
"(",
"sdk_headers",
")",
"params",
"=",
"{",
"'version'",
":",
"self",
".",
"version",
"}",
"data",
"=",
"text",
"headers",
"[",
"'content-type'",
"]",
"=",
"'text/plain'",
"url",
"=",
"'/v3/identify'",
"response",
"=",
"self",
".",
"request",
"(",
"method",
"=",
"'POST'",
",",
"url",
"=",
"url",
",",
"headers",
"=",
"headers",
",",
"params",
"=",
"params",
",",
"data",
"=",
"data",
",",
"accept_json",
"=",
"True",
")",
"return",
"response"
] | 28.742857 | 19.2 |
def update_aliases(self):
"""Get aliases information from room state.
Returns:
boolean: True if the aliases changed, False if not
"""
try:
response = self.client.api.get_room_state(self.room_id)
for chunk in response:
if "content" in chunk and "aliases" in chunk["content"]:
if chunk["content"]["aliases"] != self.aliases:
self.aliases = chunk["content"]["aliases"]
return True
else:
return False
except MatrixRequestError:
return False
|
[
"def",
"update_aliases",
"(",
"self",
")",
":",
"try",
":",
"response",
"=",
"self",
".",
"client",
".",
"api",
".",
"get_room_state",
"(",
"self",
".",
"room_id",
")",
"for",
"chunk",
"in",
"response",
":",
"if",
"\"content\"",
"in",
"chunk",
"and",
"\"aliases\"",
"in",
"chunk",
"[",
"\"content\"",
"]",
":",
"if",
"chunk",
"[",
"\"content\"",
"]",
"[",
"\"aliases\"",
"]",
"!=",
"self",
".",
"aliases",
":",
"self",
".",
"aliases",
"=",
"chunk",
"[",
"\"content\"",
"]",
"[",
"\"aliases\"",
"]",
"return",
"True",
"else",
":",
"return",
"False",
"except",
"MatrixRequestError",
":",
"return",
"False"
] | 37.470588 | 17.235294 |
def relaxation_as_linear_operator(method, A, b):
"""Create a linear operator that applies a relaxation method for the given right-hand-side.
Parameters
----------
methods : {tuple or string}
Relaxation descriptor: Each tuple must be of the form ('method','opts')
where 'method' is the name of a supported smoother, e.g., gauss_seidel,
and 'opts' a dict of keyword arguments to the smoother, e.g., opts =
{'sweep':symmetric}. If string, must be that of a supported smoother,
e.g., gauss_seidel.
Returns
-------
linear operator that applies the relaxation method to a vector for a
fixed right-hand-side, b.
Notes
-----
This method is primarily used to improve B during the aggregation setup
phase. Here b = 0, and each relaxation call can improve the quality of B,
especially near the boundaries.
Examples
--------
>>> from pyamg.gallery import poisson
>>> from pyamg.util.utils import relaxation_as_linear_operator
>>> import numpy as np
>>> A = poisson((100,100), format='csr') # matrix
>>> B = np.ones((A.shape[0],1)) # Candidate vector
>>> b = np.zeros((A.shape[0])) # RHS
>>> relax = relaxation_as_linear_operator('gauss_seidel', A, b)
>>> B = relax*B
"""
from pyamg import relaxation
from scipy.sparse.linalg.interface import LinearOperator
import pyamg.multilevel
def unpack_arg(v):
if isinstance(v, tuple):
return v[0], v[1]
else:
return v, {}
# setup variables
accepted_methods = ['gauss_seidel', 'block_gauss_seidel', 'sor',
'gauss_seidel_ne', 'gauss_seidel_nr', 'jacobi',
'block_jacobi', 'richardson', 'schwarz',
'strength_based_schwarz', 'jacobi_ne']
b = np.array(b, dtype=A.dtype)
fn, kwargs = unpack_arg(method)
lvl = pyamg.multilevel_solver.level()
lvl.A = A
# Retrieve setup call from relaxation.smoothing for this relaxation method
if not accepted_methods.__contains__(fn):
raise NameError("invalid relaxation method: ", fn)
try:
setup_smoother = getattr(relaxation.smoothing, 'setup_' + fn)
except NameError:
raise NameError("invalid presmoother method: ", fn)
# Get relaxation routine that takes only (A, x, b) as parameters
relax = setup_smoother(lvl, **kwargs)
# Define matvec
def matvec(x):
xcopy = x.copy()
relax(A, xcopy, b)
return xcopy
return LinearOperator(A.shape, matvec, dtype=A.dtype)
|
[
"def",
"relaxation_as_linear_operator",
"(",
"method",
",",
"A",
",",
"b",
")",
":",
"from",
"pyamg",
"import",
"relaxation",
"from",
"scipy",
".",
"sparse",
".",
"linalg",
".",
"interface",
"import",
"LinearOperator",
"import",
"pyamg",
".",
"multilevel",
"def",
"unpack_arg",
"(",
"v",
")",
":",
"if",
"isinstance",
"(",
"v",
",",
"tuple",
")",
":",
"return",
"v",
"[",
"0",
"]",
",",
"v",
"[",
"1",
"]",
"else",
":",
"return",
"v",
",",
"{",
"}",
"# setup variables",
"accepted_methods",
"=",
"[",
"'gauss_seidel'",
",",
"'block_gauss_seidel'",
",",
"'sor'",
",",
"'gauss_seidel_ne'",
",",
"'gauss_seidel_nr'",
",",
"'jacobi'",
",",
"'block_jacobi'",
",",
"'richardson'",
",",
"'schwarz'",
",",
"'strength_based_schwarz'",
",",
"'jacobi_ne'",
"]",
"b",
"=",
"np",
".",
"array",
"(",
"b",
",",
"dtype",
"=",
"A",
".",
"dtype",
")",
"fn",
",",
"kwargs",
"=",
"unpack_arg",
"(",
"method",
")",
"lvl",
"=",
"pyamg",
".",
"multilevel_solver",
".",
"level",
"(",
")",
"lvl",
".",
"A",
"=",
"A",
"# Retrieve setup call from relaxation.smoothing for this relaxation method",
"if",
"not",
"accepted_methods",
".",
"__contains__",
"(",
"fn",
")",
":",
"raise",
"NameError",
"(",
"\"invalid relaxation method: \"",
",",
"fn",
")",
"try",
":",
"setup_smoother",
"=",
"getattr",
"(",
"relaxation",
".",
"smoothing",
",",
"'setup_'",
"+",
"fn",
")",
"except",
"NameError",
":",
"raise",
"NameError",
"(",
"\"invalid presmoother method: \"",
",",
"fn",
")",
"# Get relaxation routine that takes only (A, x, b) as parameters",
"relax",
"=",
"setup_smoother",
"(",
"lvl",
",",
"*",
"*",
"kwargs",
")",
"# Define matvec",
"def",
"matvec",
"(",
"x",
")",
":",
"xcopy",
"=",
"x",
".",
"copy",
"(",
")",
"relax",
"(",
"A",
",",
"xcopy",
",",
"b",
")",
"return",
"xcopy",
"return",
"LinearOperator",
"(",
"A",
".",
"shape",
",",
"matvec",
",",
"dtype",
"=",
"A",
".",
"dtype",
")"
] | 35.191781 | 23.465753 |
def create_projection(self, fov: float = 75.0, near: float = 1.0, far: float = 100.0, aspect_ratio: float = None):
"""
Create a projection matrix with the following parameters.
When ``aspect_ratio`` is not provided the configured aspect
ratio for the window will be used.
Args:
fov (float): Field of view (float)
near (float): Camera near value
far (float): Camrea far value
Keyword Args:
aspect_ratio (float): Aspect ratio of the viewport
Returns:
The projection matrix as a float32 :py:class:`numpy.array`
"""
return matrix44.create_perspective_projection_matrix(
fov,
aspect_ratio or self.window.aspect_ratio,
near,
far,
dtype='f4',
)
|
[
"def",
"create_projection",
"(",
"self",
",",
"fov",
":",
"float",
"=",
"75.0",
",",
"near",
":",
"float",
"=",
"1.0",
",",
"far",
":",
"float",
"=",
"100.0",
",",
"aspect_ratio",
":",
"float",
"=",
"None",
")",
":",
"return",
"matrix44",
".",
"create_perspective_projection_matrix",
"(",
"fov",
",",
"aspect_ratio",
"or",
"self",
".",
"window",
".",
"aspect_ratio",
",",
"near",
",",
"far",
",",
"dtype",
"=",
"'f4'",
",",
")"
] | 34.041667 | 22.208333 |
def add_to_submission(self, submission_id, submission_objects):
"""Adds submission_objects to clinvar collection and update the coresponding submission object with their id
Args:
submission_id(str) : id of the submission to be updated
submission_objects(tuple): a tuple of 2 elements coresponding to a list of variants and a list of case data objects to add to submission
Returns:
updated_submission(obj): an open clinvar submission object, updated
"""
LOG.info("Adding new variants and case data to clinvar submission '%s'", submission_id)
# Insert variant submission_objects into clinvar collection
# Loop over the objects
for var_obj in submission_objects[0]:
try:
result = self.clinvar_collection.insert_one(var_obj)
self.clinvar_submission_collection.update_one({'_id':submission_id}, {'$push': { 'variant_data' : str(result.inserted_id) }}, upsert=True)
except pymongo.errors.DuplicateKeyError:
LOG.error("Attepted to insert a clinvar variant which is already in DB!")
# Insert casedata submission_objects into clinvar collection
if submission_objects[1]:
# Loop over the objects
for case_obj in submission_objects[1]:
try:
result = self.clinvar_collection.insert_one(case_obj)
self.clinvar_submission_collection.update_one({'_id':submission_id}, {'$push': { 'case_data': str(result.inserted_id)}}, upsert=True)
except pymongo.errors.DuplicateKeyError:
LOG.error("One or more casedata object is already present in clinvar collection!")
updated_submission = self.clinvar_submission_collection.find_one_and_update( {'_id':submission_id}, { '$set' : {'updated_at': datetime.now()} }, return_document=pymongo.ReturnDocument.AFTER )
return updated_submission
|
[
"def",
"add_to_submission",
"(",
"self",
",",
"submission_id",
",",
"submission_objects",
")",
":",
"LOG",
".",
"info",
"(",
"\"Adding new variants and case data to clinvar submission '%s'\"",
",",
"submission_id",
")",
"# Insert variant submission_objects into clinvar collection",
"# Loop over the objects",
"for",
"var_obj",
"in",
"submission_objects",
"[",
"0",
"]",
":",
"try",
":",
"result",
"=",
"self",
".",
"clinvar_collection",
".",
"insert_one",
"(",
"var_obj",
")",
"self",
".",
"clinvar_submission_collection",
".",
"update_one",
"(",
"{",
"'_id'",
":",
"submission_id",
"}",
",",
"{",
"'$push'",
":",
"{",
"'variant_data'",
":",
"str",
"(",
"result",
".",
"inserted_id",
")",
"}",
"}",
",",
"upsert",
"=",
"True",
")",
"except",
"pymongo",
".",
"errors",
".",
"DuplicateKeyError",
":",
"LOG",
".",
"error",
"(",
"\"Attepted to insert a clinvar variant which is already in DB!\"",
")",
"# Insert casedata submission_objects into clinvar collection",
"if",
"submission_objects",
"[",
"1",
"]",
":",
"# Loop over the objects",
"for",
"case_obj",
"in",
"submission_objects",
"[",
"1",
"]",
":",
"try",
":",
"result",
"=",
"self",
".",
"clinvar_collection",
".",
"insert_one",
"(",
"case_obj",
")",
"self",
".",
"clinvar_submission_collection",
".",
"update_one",
"(",
"{",
"'_id'",
":",
"submission_id",
"}",
",",
"{",
"'$push'",
":",
"{",
"'case_data'",
":",
"str",
"(",
"result",
".",
"inserted_id",
")",
"}",
"}",
",",
"upsert",
"=",
"True",
")",
"except",
"pymongo",
".",
"errors",
".",
"DuplicateKeyError",
":",
"LOG",
".",
"error",
"(",
"\"One or more casedata object is already present in clinvar collection!\"",
")",
"updated_submission",
"=",
"self",
".",
"clinvar_submission_collection",
".",
"find_one_and_update",
"(",
"{",
"'_id'",
":",
"submission_id",
"}",
",",
"{",
"'$set'",
":",
"{",
"'updated_at'",
":",
"datetime",
".",
"now",
"(",
")",
"}",
"}",
",",
"return_document",
"=",
"pymongo",
".",
"ReturnDocument",
".",
"AFTER",
")",
"return",
"updated_submission"
] | 58 | 37.5 |
def scan_aggs(search, source_aggs, inner_aggs={}, size=10):
"""
Helper function used to iterate over all possible bucket combinations of
``source_aggs``, returning results of ``inner_aggs`` for each. Uses the
``composite`` aggregation under the hood to perform this.
"""
def run_search(**kwargs):
s = search[:0]
s.aggs.bucket('comp', 'composite', sources=source_aggs, size=size, **kwargs)
for agg_name, agg in inner_aggs.items():
s.aggs['comp'][agg_name] = agg
return s.execute()
response = run_search()
while response.aggregations.comp.buckets:
for b in response.aggregations.comp.buckets:
yield b
if 'after_key' in response.aggregations.comp:
after = response.aggregations.comp.after_key
else:
after= response.aggregations.comp.buckets[-1].key
response = run_search(after=after)
|
[
"def",
"scan_aggs",
"(",
"search",
",",
"source_aggs",
",",
"inner_aggs",
"=",
"{",
"}",
",",
"size",
"=",
"10",
")",
":",
"def",
"run_search",
"(",
"*",
"*",
"kwargs",
")",
":",
"s",
"=",
"search",
"[",
":",
"0",
"]",
"s",
".",
"aggs",
".",
"bucket",
"(",
"'comp'",
",",
"'composite'",
",",
"sources",
"=",
"source_aggs",
",",
"size",
"=",
"size",
",",
"*",
"*",
"kwargs",
")",
"for",
"agg_name",
",",
"agg",
"in",
"inner_aggs",
".",
"items",
"(",
")",
":",
"s",
".",
"aggs",
"[",
"'comp'",
"]",
"[",
"agg_name",
"]",
"=",
"agg",
"return",
"s",
".",
"execute",
"(",
")",
"response",
"=",
"run_search",
"(",
")",
"while",
"response",
".",
"aggregations",
".",
"comp",
".",
"buckets",
":",
"for",
"b",
"in",
"response",
".",
"aggregations",
".",
"comp",
".",
"buckets",
":",
"yield",
"b",
"if",
"'after_key'",
"in",
"response",
".",
"aggregations",
".",
"comp",
":",
"after",
"=",
"response",
".",
"aggregations",
".",
"comp",
".",
"after_key",
"else",
":",
"after",
"=",
"response",
".",
"aggregations",
".",
"comp",
".",
"buckets",
"[",
"-",
"1",
"]",
".",
"key",
"response",
"=",
"run_search",
"(",
"after",
"=",
"after",
")"
] | 41.090909 | 17.181818 |
def _randomize_single_subject(data, seed=None):
"""Randomly permute the voxels of the subject.
The subject is organized as Voxel x TR,
this method shuffles the voxel dimension in place.
Parameters
----------
data: 2D array in shape [nVoxels, nTRs]
Activity image data to be shuffled.
seed: Optional[int]
Seed for random state used implicitly for shuffling.
Returns
-------
None.
"""
if seed is not None:
np.random.seed(seed)
np.random.shuffle(data)
|
[
"def",
"_randomize_single_subject",
"(",
"data",
",",
"seed",
"=",
"None",
")",
":",
"if",
"seed",
"is",
"not",
"None",
":",
"np",
".",
"random",
".",
"seed",
"(",
"seed",
")",
"np",
".",
"random",
".",
"shuffle",
"(",
"data",
")"
] | 25.5 | 18.55 |
def gen_batches(data, batch_size):
"""Divide input data into batches.
:param data: input data
:param batch_size: size of each batch
:return: data divided into batches
"""
data = np.array(data)
for i in range(0, data.shape[0], batch_size):
yield data[i:i + batch_size]
|
[
"def",
"gen_batches",
"(",
"data",
",",
"batch_size",
")",
":",
"data",
"=",
"np",
".",
"array",
"(",
"data",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"data",
".",
"shape",
"[",
"0",
"]",
",",
"batch_size",
")",
":",
"yield",
"data",
"[",
"i",
":",
"i",
"+",
"batch_size",
"]"
] | 26.818182 | 11.818182 |
def get_historical_orders(self, symbol=None, side=None,
start=None, end=None, page=None, limit=None):
"""List of KuCoin V1 historical orders.
https://docs.kucoin.com/#get-v1-historical-orders-list
:param symbol: (optional) Name of symbol e.g. KCS-BTC
:type symbol: string
:param side: (optional) buy or sell
:type side: string
:param start: (optional) Start time as unix timestamp
:type start: string
:param end: (optional) End time as unix timestamp
:type end: string
:param page: (optional) Page to fetch
:type page: int
:param limit: (optional) Number of orders
:type limit: int
.. code:: python
orders = client.get_historical_orders(symbol='KCS-BTC')
:returns: ApiResponse
.. code:: python
{
"currentPage": 1,
"pageSize": 50,
"totalNum": 1,
"totalPage": 1,
"items": [
{
"symbol": "SNOV-ETH",
"dealPrice": "0.0000246",
"dealValue": "0.018942",
"amount": "770",
"fee": "0.00001137",
"side": "sell",
"createdAt": 1540080199
}
]
}
:raises: KucoinResponseException, KucoinAPIException
"""
data = {}
if symbol:
data['symbol'] = symbol
if side:
data['side'] = side
if start:
data['startAt'] = start
if end:
data['endAt'] = end
if page:
data['page'] = page
if limit:
data['pageSize'] = limit
return self._get('hist-orders', True, data=data)
|
[
"def",
"get_historical_orders",
"(",
"self",
",",
"symbol",
"=",
"None",
",",
"side",
"=",
"None",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"page",
"=",
"None",
",",
"limit",
"=",
"None",
")",
":",
"data",
"=",
"{",
"}",
"if",
"symbol",
":",
"data",
"[",
"'symbol'",
"]",
"=",
"symbol",
"if",
"side",
":",
"data",
"[",
"'side'",
"]",
"=",
"side",
"if",
"start",
":",
"data",
"[",
"'startAt'",
"]",
"=",
"start",
"if",
"end",
":",
"data",
"[",
"'endAt'",
"]",
"=",
"end",
"if",
"page",
":",
"data",
"[",
"'page'",
"]",
"=",
"page",
"if",
"limit",
":",
"data",
"[",
"'pageSize'",
"]",
"=",
"limit",
"return",
"self",
".",
"_get",
"(",
"'hist-orders'",
",",
"True",
",",
"data",
"=",
"data",
")"
] | 28.338462 | 18.830769 |
def get_known_name(s: str) -> Optional[Tuple[str, ColorArg]]:
""" Reverse translate a terminal code to a known color name, if possible.
Returns a tuple of (codetype, knownname) on success.
Returns None on failure.
"""
if not s.endswith('m'):
# All codes end with 'm', so...
return None
if s.startswith('\033[38;5;'):
# Extended fore.
name = codes_reverse['fore'].get(s, None)
if name is None:
num = get_code_num(s)
return ('extended fore', num)
else:
return ('extended fore', name)
elif s.startswith('\033[48;5;'):
# Extended back.
name = codes_reverse['back'].get(s, None)
if name is None:
num = get_code_num(s)
return ('extended back', num)
else:
return ('extended back', name)
elif s.startswith('\033[38;2'):
# RGB fore.
vals = get_code_num_rgb(s)
if vals is not None:
return ('rgb fore', vals)
elif s.startswith('\033[48;2'):
# RGB back.
vals = get_code_num_rgb(s)
if vals is not None:
return ('rgb back', vals)
elif s.startswith('\033['):
# Fore, back, style.
number = get_code_num(s)
# Get code type based on number.
if (number <= 7) or (number == 22):
codetype = 'style'
elif (((number >= 30) and (number < 40)) or
((number >= 90) and (number < 100))):
codetype = 'fore'
elif (((number >= 40) and (number < 50)) or
((number >= 100) and (number < 110))):
codetype = 'back'
else:
raise InvalidEscapeCode(
number,
'Expecting 0-7, 22, 30-39, or 40-49 for escape code',
)
name = codes_reverse[codetype].get(s, None)
if name is not None:
return (codetype, name)
# Not a known escape code.
return None
|
[
"def",
"get_known_name",
"(",
"s",
":",
"str",
")",
"->",
"Optional",
"[",
"Tuple",
"[",
"str",
",",
"ColorArg",
"]",
"]",
":",
"if",
"not",
"s",
".",
"endswith",
"(",
"'m'",
")",
":",
"# All codes end with 'm', so...",
"return",
"None",
"if",
"s",
".",
"startswith",
"(",
"'\\033[38;5;'",
")",
":",
"# Extended fore.",
"name",
"=",
"codes_reverse",
"[",
"'fore'",
"]",
".",
"get",
"(",
"s",
",",
"None",
")",
"if",
"name",
"is",
"None",
":",
"num",
"=",
"get_code_num",
"(",
"s",
")",
"return",
"(",
"'extended fore'",
",",
"num",
")",
"else",
":",
"return",
"(",
"'extended fore'",
",",
"name",
")",
"elif",
"s",
".",
"startswith",
"(",
"'\\033[48;5;'",
")",
":",
"# Extended back.",
"name",
"=",
"codes_reverse",
"[",
"'back'",
"]",
".",
"get",
"(",
"s",
",",
"None",
")",
"if",
"name",
"is",
"None",
":",
"num",
"=",
"get_code_num",
"(",
"s",
")",
"return",
"(",
"'extended back'",
",",
"num",
")",
"else",
":",
"return",
"(",
"'extended back'",
",",
"name",
")",
"elif",
"s",
".",
"startswith",
"(",
"'\\033[38;2'",
")",
":",
"# RGB fore.",
"vals",
"=",
"get_code_num_rgb",
"(",
"s",
")",
"if",
"vals",
"is",
"not",
"None",
":",
"return",
"(",
"'rgb fore'",
",",
"vals",
")",
"elif",
"s",
".",
"startswith",
"(",
"'\\033[48;2'",
")",
":",
"# RGB back.",
"vals",
"=",
"get_code_num_rgb",
"(",
"s",
")",
"if",
"vals",
"is",
"not",
"None",
":",
"return",
"(",
"'rgb back'",
",",
"vals",
")",
"elif",
"s",
".",
"startswith",
"(",
"'\\033['",
")",
":",
"# Fore, back, style.",
"number",
"=",
"get_code_num",
"(",
"s",
")",
"# Get code type based on number.",
"if",
"(",
"number",
"<=",
"7",
")",
"or",
"(",
"number",
"==",
"22",
")",
":",
"codetype",
"=",
"'style'",
"elif",
"(",
"(",
"(",
"number",
">=",
"30",
")",
"and",
"(",
"number",
"<",
"40",
")",
")",
"or",
"(",
"(",
"number",
">=",
"90",
")",
"and",
"(",
"number",
"<",
"100",
")",
")",
")",
":",
"codetype",
"=",
"'fore'",
"elif",
"(",
"(",
"(",
"number",
">=",
"40",
")",
"and",
"(",
"number",
"<",
"50",
")",
")",
"or",
"(",
"(",
"number",
">=",
"100",
")",
"and",
"(",
"number",
"<",
"110",
")",
")",
")",
":",
"codetype",
"=",
"'back'",
"else",
":",
"raise",
"InvalidEscapeCode",
"(",
"number",
",",
"'Expecting 0-7, 22, 30-39, or 40-49 for escape code'",
",",
")",
"name",
"=",
"codes_reverse",
"[",
"codetype",
"]",
".",
"get",
"(",
"s",
",",
"None",
")",
"if",
"name",
"is",
"not",
"None",
":",
"return",
"(",
"codetype",
",",
"name",
")",
"# Not a known escape code.",
"return",
"None"
] | 32.779661 | 12.610169 |
def commit_to(self, db: BaseDB) -> None:
"""
Trying to commit changes when nothing has been written will raise a
ValidationError
"""
self.logger.debug2('persist storage root to data store')
if self._trie_nodes_batch is None:
raise ValidationError(
"It is invalid to commit an account's storage if it has no pending changes. "
"Always check storage_lookup.has_changed_root before attempting to commit."
)
self._trie_nodes_batch.commit_to(db, apply_deletes=False)
self._clear_changed_root()
|
[
"def",
"commit_to",
"(",
"self",
",",
"db",
":",
"BaseDB",
")",
"->",
"None",
":",
"self",
".",
"logger",
".",
"debug2",
"(",
"'persist storage root to data store'",
")",
"if",
"self",
".",
"_trie_nodes_batch",
"is",
"None",
":",
"raise",
"ValidationError",
"(",
"\"It is invalid to commit an account's storage if it has no pending changes. \"",
"\"Always check storage_lookup.has_changed_root before attempting to commit.\"",
")",
"self",
".",
"_trie_nodes_batch",
".",
"commit_to",
"(",
"db",
",",
"apply_deletes",
"=",
"False",
")",
"self",
".",
"_clear_changed_root",
"(",
")"
] | 45.846154 | 18.923077 |
def set_controller_value(self, index_or_name, value):
"""
Sets controller value
:param index_or_name integer index or string name
:param value float
"""
if not isinstance(index_or_name, int):
index = self.get_controller_index(index_or_name)
else:
index = index_or_name
self.dll.SetControllerValue(index, ctypes.c_float(value))
|
[
"def",
"set_controller_value",
"(",
"self",
",",
"index_or_name",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"index_or_name",
",",
"int",
")",
":",
"index",
"=",
"self",
".",
"get_controller_index",
"(",
"index_or_name",
")",
"else",
":",
"index",
"=",
"index_or_name",
"self",
".",
"dll",
".",
"SetControllerValue",
"(",
"index",
",",
"ctypes",
".",
"c_float",
"(",
"value",
")",
")"
] | 33.666667 | 15 |
def connect_model(self, model):
"""Link the Database to the Model instance.
In case a new database is created from scratch, ``connect_model``
creates Trace objects for all tallyable pymc objects defined in
`model`.
If the database is being loaded from an existing file, ``connect_model``
restore the objects trace to their stored value.
:Parameters:
model : pymc.Model instance
An instance holding the pymc objects defining a statistical
model (stochastics, deterministics, data, ...)
"""
# Changed this to allow non-Model models. -AP
# We could also remove it altogether. -DH
if isinstance(model, pymc.Model):
self.model = model
else:
raise AttributeError('Not a Model instance.')
# Restore the state of the Model from an existing Database.
# The `load` method will have already created the Trace objects.
if hasattr(self, '_state_'):
names = set()
for morenames in self.trace_names:
names.update(morenames)
for name, fun in six.iteritems(model._funs_to_tally):
if name in self._traces:
self._traces[name]._getfunc = fun
names.discard(name)
# if len(names) > 0:
# print_("Some objects from the database have not been assigned a
# getfunc", names)
# Create a fresh new state.
# We will be able to remove this when we deprecate traces on objects.
else:
for name, fun in six.iteritems(model._funs_to_tally):
if name not in self._traces:
self._traces[
name] = self.__Trace__(
name=name,
getfunc=fun,
db=self)
|
[
"def",
"connect_model",
"(",
"self",
",",
"model",
")",
":",
"# Changed this to allow non-Model models. -AP",
"# We could also remove it altogether. -DH",
"if",
"isinstance",
"(",
"model",
",",
"pymc",
".",
"Model",
")",
":",
"self",
".",
"model",
"=",
"model",
"else",
":",
"raise",
"AttributeError",
"(",
"'Not a Model instance.'",
")",
"# Restore the state of the Model from an existing Database.",
"# The `load` method will have already created the Trace objects.",
"if",
"hasattr",
"(",
"self",
",",
"'_state_'",
")",
":",
"names",
"=",
"set",
"(",
")",
"for",
"morenames",
"in",
"self",
".",
"trace_names",
":",
"names",
".",
"update",
"(",
"morenames",
")",
"for",
"name",
",",
"fun",
"in",
"six",
".",
"iteritems",
"(",
"model",
".",
"_funs_to_tally",
")",
":",
"if",
"name",
"in",
"self",
".",
"_traces",
":",
"self",
".",
"_traces",
"[",
"name",
"]",
".",
"_getfunc",
"=",
"fun",
"names",
".",
"discard",
"(",
"name",
")",
"# if len(names) > 0:",
"# print_(\"Some objects from the database have not been assigned a",
"# getfunc\", names)",
"# Create a fresh new state.",
"# We will be able to remove this when we deprecate traces on objects.",
"else",
":",
"for",
"name",
",",
"fun",
"in",
"six",
".",
"iteritems",
"(",
"model",
".",
"_funs_to_tally",
")",
":",
"if",
"name",
"not",
"in",
"self",
".",
"_traces",
":",
"self",
".",
"_traces",
"[",
"name",
"]",
"=",
"self",
".",
"__Trace__",
"(",
"name",
"=",
"name",
",",
"getfunc",
"=",
"fun",
",",
"db",
"=",
"self",
")"
] | 40.456522 | 17.326087 |
def is_ip_addr(value):
"""
Check that the supplied value is an Internet Protocol address, v.4,
represented by a dotted-quad string, i.e. '1.2.3.4'.
>>> vtor = Validator()
>>> vtor.check('ip_addr', '1 ')
'1'
>>> vtor.check('ip_addr', ' 1.2')
'1.2'
>>> vtor.check('ip_addr', ' 1.2.3 ')
'1.2.3'
>>> vtor.check('ip_addr', '1.2.3.4')
'1.2.3.4'
>>> vtor.check('ip_addr', '0.0.0.0')
'0.0.0.0'
>>> vtor.check('ip_addr', '255.255.255.255')
'255.255.255.255'
>>> vtor.check('ip_addr', '255.255.255.256') # doctest: +SKIP
Traceback (most recent call last):
VdtValueError: the value "255.255.255.256" is unacceptable.
>>> vtor.check('ip_addr', '1.2.3.4.5') # doctest: +SKIP
Traceback (most recent call last):
VdtValueError: the value "1.2.3.4.5" is unacceptable.
>>> vtor.check('ip_addr', 0) # doctest: +SKIP
Traceback (most recent call last):
VdtTypeError: the value "0" is of the wrong type.
"""
if not isinstance(value, string_types):
raise VdtTypeError(value)
value = value.strip()
try:
dottedQuadToNum(value)
except ValueError:
raise VdtValueError(value)
return value
|
[
"def",
"is_ip_addr",
"(",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"string_types",
")",
":",
"raise",
"VdtTypeError",
"(",
"value",
")",
"value",
"=",
"value",
".",
"strip",
"(",
")",
"try",
":",
"dottedQuadToNum",
"(",
"value",
")",
"except",
"ValueError",
":",
"raise",
"VdtValueError",
"(",
"value",
")",
"return",
"value"
] | 32.694444 | 14.75 |
def _b64encode_to_str(data):
"""
Wrapper around b64encode which takes and returns same-named types
on both Python 2 and Python 3.
:type data: bytes
:return: str
"""
ret = b64encode(data)
if not isinstance(ret, str): # Python3
return ret.decode('ascii')
else:
return ret
|
[
"def",
"_b64encode_to_str",
"(",
"data",
")",
":",
"ret",
"=",
"b64encode",
"(",
"data",
")",
"if",
"not",
"isinstance",
"(",
"ret",
",",
"str",
")",
":",
"# Python3",
"return",
"ret",
".",
"decode",
"(",
"'ascii'",
")",
"else",
":",
"return",
"ret"
] | 25.916667 | 13.916667 |
def get_atext(value):
"""atext = <matches _atext_matcher>
We allow any non-ATOM_ENDS in atext, but add an InvalidATextDefect to
the token's defects list if we find non-atext characters.
"""
m = _non_atom_end_matcher(value)
if not m:
raise errors.HeaderParseError(
"expected atext but found '{}'".format(value))
atext = m.group()
value = value[len(atext):]
atext = ValueTerminal(atext, 'atext')
_validate_xtext(atext)
return atext, value
|
[
"def",
"get_atext",
"(",
"value",
")",
":",
"m",
"=",
"_non_atom_end_matcher",
"(",
"value",
")",
"if",
"not",
"m",
":",
"raise",
"errors",
".",
"HeaderParseError",
"(",
"\"expected atext but found '{}'\"",
".",
"format",
"(",
"value",
")",
")",
"atext",
"=",
"m",
".",
"group",
"(",
")",
"value",
"=",
"value",
"[",
"len",
"(",
"atext",
")",
":",
"]",
"atext",
"=",
"ValueTerminal",
"(",
"atext",
",",
"'atext'",
")",
"_validate_xtext",
"(",
"atext",
")",
"return",
"atext",
",",
"value"
] | 32.466667 | 15 |
def remover(self, id_tipo_acesso):
"""Removes access type by its identifier.
:param id_tipo_acesso: Access type identifier.
:return: None
:raise TipoAcessoError: Access type associated with equipment, cannot be removed.
:raise InvalidParameterError: Protocol value is invalid or none.
:raise TipoAcessoNaoExisteError: Access type doesn't exist.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response.
"""
if not is_valid_int_param(id_tipo_acesso):
raise InvalidParameterError(
u'Access type id is invalid or was not informed.')
url = 'tipoacesso/' + str(id_tipo_acesso) + '/'
code, xml = self.submit(None, 'DELETE', url)
return self.response(code, xml)
|
[
"def",
"remover",
"(",
"self",
",",
"id_tipo_acesso",
")",
":",
"if",
"not",
"is_valid_int_param",
"(",
"id_tipo_acesso",
")",
":",
"raise",
"InvalidParameterError",
"(",
"u'Access type id is invalid or was not informed.'",
")",
"url",
"=",
"'tipoacesso/'",
"+",
"str",
"(",
"id_tipo_acesso",
")",
"+",
"'/'",
"code",
",",
"xml",
"=",
"self",
".",
"submit",
"(",
"None",
",",
"'DELETE'",
",",
"url",
")",
"return",
"self",
".",
"response",
"(",
"code",
",",
"xml",
")"
] | 38.272727 | 23.363636 |
def getBranch(self, name, **context):
"""Return a branch of this tree where the 'name' OID may reside"""
for keyLen in self._vars.getKeysLens():
subName = name[:keyLen]
if subName in self._vars:
return self._vars[subName]
raise error.NoSuchObjectError(name=name, idx=context.get('idx'))
|
[
"def",
"getBranch",
"(",
"self",
",",
"name",
",",
"*",
"*",
"context",
")",
":",
"for",
"keyLen",
"in",
"self",
".",
"_vars",
".",
"getKeysLens",
"(",
")",
":",
"subName",
"=",
"name",
"[",
":",
"keyLen",
"]",
"if",
"subName",
"in",
"self",
".",
"_vars",
":",
"return",
"self",
".",
"_vars",
"[",
"subName",
"]",
"raise",
"error",
".",
"NoSuchObjectError",
"(",
"name",
"=",
"name",
",",
"idx",
"=",
"context",
".",
"get",
"(",
"'idx'",
")",
")"
] | 43 | 11.5 |
def _modules_to_main(modList):
"""Force every module in modList to be placed into main"""
if not modList:
return
main = sys.modules['__main__']
for modname in modList:
if isinstance(modname, str):
try:
mod = __import__(modname)
except Exception:
sys.stderr.write(
'warning: could not import %s\n. '
'Your function may unexpectedly error due to this import failing;'
'A version mismatch is likely. Specific error was:\n' % modname)
print_exec(sys.stderr)
else:
setattr(main, mod.__name__, mod)
|
[
"def",
"_modules_to_main",
"(",
"modList",
")",
":",
"if",
"not",
"modList",
":",
"return",
"main",
"=",
"sys",
".",
"modules",
"[",
"'__main__'",
"]",
"for",
"modname",
"in",
"modList",
":",
"if",
"isinstance",
"(",
"modname",
",",
"str",
")",
":",
"try",
":",
"mod",
"=",
"__import__",
"(",
"modname",
")",
"except",
"Exception",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'warning: could not import %s\\n. '",
"'Your function may unexpectedly error due to this import failing;'",
"'A version mismatch is likely. Specific error was:\\n'",
"%",
"modname",
")",
"print_exec",
"(",
"sys",
".",
"stderr",
")",
"else",
":",
"setattr",
"(",
"main",
",",
"mod",
".",
"__name__",
",",
"mod",
")"
] | 32.222222 | 18 |
def iter_trees(self, *args, **kwargs):
""":return: Iterator yielding Tree objects
:note: Takes all arguments known to iter_commits method"""
return (c.tree for c in self.iter_commits(*args, **kwargs))
|
[
"def",
"iter_trees",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"(",
"c",
".",
"tree",
"for",
"c",
"in",
"self",
".",
"iter_commits",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")"
] | 55.25 | 7.25 |
def hook(reverse=False,
align=False,
strip_path=False,
enable_on_envvar_only=False,
on_tty=False,
conservative=False,
styles=None,
tb=None,
tpe=None,
value=None):
"""Hook the current excepthook to the backtrace.
If `align` is True, all parts (line numbers, file names, etc..) will be
aligned to the left according to the longest entry.
If `strip_path` is True, only the file name will be shown, not its full
path.
If `enable_on_envvar_only` is True, only if the environment variable
`ENABLE_BACKTRACE` is set, backtrace will be activated.
If `on_tty` is True, backtrace will be activated only if you're running
in a readl terminal (i.e. not piped, redirected, etc..).
If `convervative` is True, the traceback will have more seemingly original
style (There will be no alignment by default, 'File', 'line' and 'in'
prefixes and will ignore any styling provided by the user.)
See https://github.com/nir0s/backtrace/blob/master/README.md for
information on `styles`.
"""
if enable_on_envvar_only and 'ENABLE_BACKTRACE' not in os.environ:
return
isatty = getattr(sys.stderr, 'isatty', lambda: False)
if on_tty and not isatty():
return
if conservative:
styles = CONVERVATIVE_STYLES
align = align or False
elif styles:
for k in STYLES.keys():
styles[k] = styles.get(k, STYLES[k])
else:
styles = STYLES
# For Windows
colorama.init()
def backtrace_excepthook(tpe, value, tb=None):
# Don't know if we're getting traceback or traceback entries.
# We'll try to parse a traceback object.
try:
traceback_entries = traceback.extract_tb(tb)
except AttributeError:
traceback_entries = tb
parser = _Hook(traceback_entries, align, strip_path, conservative)
tpe = tpe if isinstance(tpe, str) else tpe.__name__
tb_message = styles['backtrace'].format('Traceback ({0}):'.format(
'Most recent call ' + ('first' if reverse else 'last'))) + \
Style.RESET_ALL
err_message = styles['error'].format(tpe + ': ' + str(value)) + \
Style.RESET_ALL
if reverse:
parser.reverse()
_flush(tb_message)
backtrace = parser.generate_backtrace(styles)
backtrace.insert(0 if reverse else len(backtrace), err_message)
for entry in backtrace:
_flush(entry)
if tb:
backtrace_excepthook(tpe=tpe, value=value, tb=tb)
else:
sys.excepthook = backtrace_excepthook
|
[
"def",
"hook",
"(",
"reverse",
"=",
"False",
",",
"align",
"=",
"False",
",",
"strip_path",
"=",
"False",
",",
"enable_on_envvar_only",
"=",
"False",
",",
"on_tty",
"=",
"False",
",",
"conservative",
"=",
"False",
",",
"styles",
"=",
"None",
",",
"tb",
"=",
"None",
",",
"tpe",
"=",
"None",
",",
"value",
"=",
"None",
")",
":",
"if",
"enable_on_envvar_only",
"and",
"'ENABLE_BACKTRACE'",
"not",
"in",
"os",
".",
"environ",
":",
"return",
"isatty",
"=",
"getattr",
"(",
"sys",
".",
"stderr",
",",
"'isatty'",
",",
"lambda",
":",
"False",
")",
"if",
"on_tty",
"and",
"not",
"isatty",
"(",
")",
":",
"return",
"if",
"conservative",
":",
"styles",
"=",
"CONVERVATIVE_STYLES",
"align",
"=",
"align",
"or",
"False",
"elif",
"styles",
":",
"for",
"k",
"in",
"STYLES",
".",
"keys",
"(",
")",
":",
"styles",
"[",
"k",
"]",
"=",
"styles",
".",
"get",
"(",
"k",
",",
"STYLES",
"[",
"k",
"]",
")",
"else",
":",
"styles",
"=",
"STYLES",
"# For Windows",
"colorama",
".",
"init",
"(",
")",
"def",
"backtrace_excepthook",
"(",
"tpe",
",",
"value",
",",
"tb",
"=",
"None",
")",
":",
"# Don't know if we're getting traceback or traceback entries.",
"# We'll try to parse a traceback object.",
"try",
":",
"traceback_entries",
"=",
"traceback",
".",
"extract_tb",
"(",
"tb",
")",
"except",
"AttributeError",
":",
"traceback_entries",
"=",
"tb",
"parser",
"=",
"_Hook",
"(",
"traceback_entries",
",",
"align",
",",
"strip_path",
",",
"conservative",
")",
"tpe",
"=",
"tpe",
"if",
"isinstance",
"(",
"tpe",
",",
"str",
")",
"else",
"tpe",
".",
"__name__",
"tb_message",
"=",
"styles",
"[",
"'backtrace'",
"]",
".",
"format",
"(",
"'Traceback ({0}):'",
".",
"format",
"(",
"'Most recent call '",
"+",
"(",
"'first'",
"if",
"reverse",
"else",
"'last'",
")",
")",
")",
"+",
"Style",
".",
"RESET_ALL",
"err_message",
"=",
"styles",
"[",
"'error'",
"]",
".",
"format",
"(",
"tpe",
"+",
"': '",
"+",
"str",
"(",
"value",
")",
")",
"+",
"Style",
".",
"RESET_ALL",
"if",
"reverse",
":",
"parser",
".",
"reverse",
"(",
")",
"_flush",
"(",
"tb_message",
")",
"backtrace",
"=",
"parser",
".",
"generate_backtrace",
"(",
"styles",
")",
"backtrace",
".",
"insert",
"(",
"0",
"if",
"reverse",
"else",
"len",
"(",
"backtrace",
")",
",",
"err_message",
")",
"for",
"entry",
"in",
"backtrace",
":",
"_flush",
"(",
"entry",
")",
"if",
"tb",
":",
"backtrace_excepthook",
"(",
"tpe",
"=",
"tpe",
",",
"value",
"=",
"value",
",",
"tb",
"=",
"tb",
")",
"else",
":",
"sys",
".",
"excepthook",
"=",
"backtrace_excepthook"
] | 32.873418 | 23.291139 |
def _commit(self):
"""Transactionally commit the changes accumulated.
Returns:
List[google.cloud.proto.firestore.v1beta1.\
write_pb2.WriteResult, ...]: The write results corresponding
to the changes committed, returned in the same order as the
changes were applied to this transaction. A write result contains
an ``update_time`` field.
Raises:
ValueError: If no transaction is in progress.
"""
if not self.in_progress:
raise ValueError(_CANT_COMMIT)
commit_response = _commit_with_retry(self._client, self._write_pbs, self._id)
self._clean_up()
return list(commit_response.write_results)
|
[
"def",
"_commit",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"in_progress",
":",
"raise",
"ValueError",
"(",
"_CANT_COMMIT",
")",
"commit_response",
"=",
"_commit_with_retry",
"(",
"self",
".",
"_client",
",",
"self",
".",
"_write_pbs",
",",
"self",
".",
"_id",
")",
"self",
".",
"_clean_up",
"(",
")",
"return",
"list",
"(",
"commit_response",
".",
"write_results",
")"
] | 36.2 | 22.55 |
def _align_header(header, alignment, width, visible_width, is_multiline=False,
width_fn=None):
"Pad string header to width chars given known visible_width of the header."
if is_multiline:
header_lines = re.split(_multiline_codes, header)
padded_lines = [_align_header(h, alignment, width, width_fn(h))
for h in header_lines]
return "\n".join(padded_lines)
# else: not multiline
ninvisible = len(header) - visible_width
width += ninvisible
if alignment == "left":
return _padright(width, header)
elif alignment == "center":
return _padboth(width, header)
elif not alignment:
return "{0}".format(header)
else:
return _padleft(width, header)
|
[
"def",
"_align_header",
"(",
"header",
",",
"alignment",
",",
"width",
",",
"visible_width",
",",
"is_multiline",
"=",
"False",
",",
"width_fn",
"=",
"None",
")",
":",
"if",
"is_multiline",
":",
"header_lines",
"=",
"re",
".",
"split",
"(",
"_multiline_codes",
",",
"header",
")",
"padded_lines",
"=",
"[",
"_align_header",
"(",
"h",
",",
"alignment",
",",
"width",
",",
"width_fn",
"(",
"h",
")",
")",
"for",
"h",
"in",
"header_lines",
"]",
"return",
"\"\\n\"",
".",
"join",
"(",
"padded_lines",
")",
"# else: not multiline",
"ninvisible",
"=",
"len",
"(",
"header",
")",
"-",
"visible_width",
"width",
"+=",
"ninvisible",
"if",
"alignment",
"==",
"\"left\"",
":",
"return",
"_padright",
"(",
"width",
",",
"header",
")",
"elif",
"alignment",
"==",
"\"center\"",
":",
"return",
"_padboth",
"(",
"width",
",",
"header",
")",
"elif",
"not",
"alignment",
":",
"return",
"\"{0}\"",
".",
"format",
"(",
"header",
")",
"else",
":",
"return",
"_padleft",
"(",
"width",
",",
"header",
")"
] | 39.684211 | 14.526316 |
def tour(self, action='start', channel=0, start=True, tour_path_number=1):
"""
Params:
action - start or stop
channel - channel number
start - True (StartTour) or False (StopTour)
tour_path_number - tour path number
"""
ret = self.command(
'ptz.cgi?action={0}&channel={1}&code={2}Tour&arg1={3}'
'&arg2=0&arg3=0&arg4=0'.format(
action, channel, 'Start' if start else 'Stop',
tour_path_number)
)
return ret.content.decode('utf-8')
|
[
"def",
"tour",
"(",
"self",
",",
"action",
"=",
"'start'",
",",
"channel",
"=",
"0",
",",
"start",
"=",
"True",
",",
"tour_path_number",
"=",
"1",
")",
":",
"ret",
"=",
"self",
".",
"command",
"(",
"'ptz.cgi?action={0}&channel={1}&code={2}Tour&arg1={3}'",
"'&arg2=0&arg3=0&arg4=0'",
".",
"format",
"(",
"action",
",",
"channel",
",",
"'Start'",
"if",
"start",
"else",
"'Stop'",
",",
"tour_path_number",
")",
")",
"return",
"ret",
".",
"content",
".",
"decode",
"(",
"'utf-8'",
")"
] | 40.533333 | 14.533333 |
async def get_blueprint_params(request, left: int, right: int) -> str:
"""
API Description: Multiply, left * right. This will show in the swagger page (localhost:8000/api/v1/).
"""
res = left * right
return "{left}*{right}={res}".format(left=left, right=right, res=res)
|
[
"async",
"def",
"get_blueprint_params",
"(",
"request",
",",
"left",
":",
"int",
",",
"right",
":",
"int",
")",
"->",
"str",
":",
"res",
"=",
"left",
"*",
"right",
"return",
"\"{left}*{right}={res}\"",
".",
"format",
"(",
"left",
"=",
"left",
",",
"right",
"=",
"right",
",",
"res",
"=",
"res",
")"
] | 47.333333 | 24.333333 |
def replace(self, match, content):
"""Replace all occurences of the regex in all matches
from a file with a specific value.
"""
new_string = self.replace_expression.sub(self.replace_with, match)
logger.info('Replacing: [ %s ] --> [ %s ]', match, new_string)
new_content = content.replace(match, new_string)
return new_content
|
[
"def",
"replace",
"(",
"self",
",",
"match",
",",
"content",
")",
":",
"new_string",
"=",
"self",
".",
"replace_expression",
".",
"sub",
"(",
"self",
".",
"replace_with",
",",
"match",
")",
"logger",
".",
"info",
"(",
"'Replacing: [ %s ] --> [ %s ]'",
",",
"match",
",",
"new_string",
")",
"new_content",
"=",
"content",
".",
"replace",
"(",
"match",
",",
"new_string",
")",
"return",
"new_content"
] | 46.75 | 12.75 |
def my_glob(pattern):
"""
get a listing matching pattern
@param pattern:
@return:
"""
result = []
if pattern[0:4] == 'vos:':
dirname = os.path.dirname(pattern)
flist = listdir(dirname)
for fname in flist:
fname = '/'.join([dirname, fname])
if fnmatch.fnmatch(fname, pattern):
result.append(fname)
else:
result = glob(pattern)
return result
|
[
"def",
"my_glob",
"(",
"pattern",
")",
":",
"result",
"=",
"[",
"]",
"if",
"pattern",
"[",
"0",
":",
"4",
"]",
"==",
"'vos:'",
":",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"pattern",
")",
"flist",
"=",
"listdir",
"(",
"dirname",
")",
"for",
"fname",
"in",
"flist",
":",
"fname",
"=",
"'/'",
".",
"join",
"(",
"[",
"dirname",
",",
"fname",
"]",
")",
"if",
"fnmatch",
".",
"fnmatch",
"(",
"fname",
",",
"pattern",
")",
":",
"result",
".",
"append",
"(",
"fname",
")",
"else",
":",
"result",
"=",
"glob",
"(",
"pattern",
")",
"return",
"result"
] | 23.944444 | 14.055556 |
def wait_until_complete(self, timeout=None):
"""Wait until sequencer is finished.
This method blocks your application until the sequencer has completed
its operation. It returns once the sequencer has finished.
Arguments:
timeout -- Optional. Seconds to wait for sequencer to finish. If this
time is exceeded, then an exception is raised.
Return:
Sequencer testState value.
"""
timeout_at = None
if timeout:
timeout_at = time.time() + int(timeout)
sequencer = self.get('system1', 'children-sequencer')
while True:
cur_test_state = self.get(sequencer, 'state')
if 'PAUSE' in cur_test_state or 'IDLE' in cur_test_state:
break
time.sleep(2)
if timeout_at and time.time() >= timeout_at:
raise RuntimeError('wait_until_complete timed out after %s sec'
% timeout)
return self.get(sequencer, 'testState')
|
[
"def",
"wait_until_complete",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"timeout_at",
"=",
"None",
"if",
"timeout",
":",
"timeout_at",
"=",
"time",
".",
"time",
"(",
")",
"+",
"int",
"(",
"timeout",
")",
"sequencer",
"=",
"self",
".",
"get",
"(",
"'system1'",
",",
"'children-sequencer'",
")",
"while",
"True",
":",
"cur_test_state",
"=",
"self",
".",
"get",
"(",
"sequencer",
",",
"'state'",
")",
"if",
"'PAUSE'",
"in",
"cur_test_state",
"or",
"'IDLE'",
"in",
"cur_test_state",
":",
"break",
"time",
".",
"sleep",
"(",
"2",
")",
"if",
"timeout_at",
"and",
"time",
".",
"time",
"(",
")",
">=",
"timeout_at",
":",
"raise",
"RuntimeError",
"(",
"'wait_until_complete timed out after %s sec'",
"%",
"timeout",
")",
"return",
"self",
".",
"get",
"(",
"sequencer",
",",
"'testState'",
")"
] | 35.448276 | 22.793103 |
def _on_channel_open(self, channel):
"""
Callback used when a channel is opened.
This registers all the channel callbacks.
Args:
channel (pika.channel.Channel): The channel that successfully opened.
"""
channel.add_on_close_callback(self._on_channel_close)
channel.add_on_cancel_callback(self._on_cancel)
channel.basic_qos(callback=self._on_qosok, **config.conf["qos"])
|
[
"def",
"_on_channel_open",
"(",
"self",
",",
"channel",
")",
":",
"channel",
".",
"add_on_close_callback",
"(",
"self",
".",
"_on_channel_close",
")",
"channel",
".",
"add_on_cancel_callback",
"(",
"self",
".",
"_on_cancel",
")",
"channel",
".",
"basic_qos",
"(",
"callback",
"=",
"self",
".",
"_on_qosok",
",",
"*",
"*",
"config",
".",
"conf",
"[",
"\"qos\"",
"]",
")"
] | 33.538462 | 21.230769 |
def shape(self):
"""Returns a tuple of row, column, (band count if multidimensional)."""
shp = (self.ds.RasterYSize, self.ds.RasterXSize, self.ds.RasterCount)
return shp[:2] if shp[2] <= 1 else shp
|
[
"def",
"shape",
"(",
"self",
")",
":",
"shp",
"=",
"(",
"self",
".",
"ds",
".",
"RasterYSize",
",",
"self",
".",
"ds",
".",
"RasterXSize",
",",
"self",
".",
"ds",
".",
"RasterCount",
")",
"return",
"shp",
"[",
":",
"2",
"]",
"if",
"shp",
"[",
"2",
"]",
"<=",
"1",
"else",
"shp"
] | 54.5 | 16.75 |
def mkdir(self, remote_path):
"""Makes new directory on WebDAV server.
More information you can find by link http://webdav.org/specs/rfc4918.html#METHOD_MKCOL
:param remote_path: path to directory
:return: True if request executed with code 200 or 201 and False otherwise.
"""
directory_urn = Urn(remote_path, directory=True)
try:
response = self.execute_request(action='mkdir', path=directory_urn.quote())
return response.status_code in (200, 201)
except ResponseErrorCode as e:
if e.code == 405:
return True
raise
|
[
"def",
"mkdir",
"(",
"self",
",",
"remote_path",
")",
":",
"directory_urn",
"=",
"Urn",
"(",
"remote_path",
",",
"directory",
"=",
"True",
")",
"try",
":",
"response",
"=",
"self",
".",
"execute_request",
"(",
"action",
"=",
"'mkdir'",
",",
"path",
"=",
"directory_urn",
".",
"quote",
"(",
")",
")",
"return",
"response",
".",
"status_code",
"in",
"(",
"200",
",",
"201",
")",
"except",
"ResponseErrorCode",
"as",
"e",
":",
"if",
"e",
".",
"code",
"==",
"405",
":",
"return",
"True",
"raise"
] | 33.157895 | 24.578947 |
def unquote_redirection_tokens(args: List[str]) -> None:
"""
Unquote redirection tokens in a list of command-line arguments
This is used when redirection tokens have to be passed to another command
:param args: the command line args
"""
for i, arg in enumerate(args):
unquoted_arg = strip_quotes(arg)
if unquoted_arg in constants.REDIRECTION_TOKENS:
args[i] = unquoted_arg
|
[
"def",
"unquote_redirection_tokens",
"(",
"args",
":",
"List",
"[",
"str",
"]",
")",
"->",
"None",
":",
"for",
"i",
",",
"arg",
"in",
"enumerate",
"(",
"args",
")",
":",
"unquoted_arg",
"=",
"strip_quotes",
"(",
"arg",
")",
"if",
"unquoted_arg",
"in",
"constants",
".",
"REDIRECTION_TOKENS",
":",
"args",
"[",
"i",
"]",
"=",
"unquoted_arg"
] | 41.5 | 10.9 |
def check_settings(required_settings):
"""
Checks all settings required by a module have been set.
If a setting is required and it could not be found a
NotImplementedError will be raised informing which settings are
missing.
:param required_settings: List of settings names (as strings) that
are anticipated to be in the settings module.
:return: None.
"""
defined_settings = [
setting if hasattr(settings, setting) else None for setting in required_settings
]
if not all(defined_settings):
raise NotImplementedError(
'The following settings have not been set: %s' % ', '.join(
set(required_settings) - set(defined_settings)
)
)
|
[
"def",
"check_settings",
"(",
"required_settings",
")",
":",
"defined_settings",
"=",
"[",
"setting",
"if",
"hasattr",
"(",
"settings",
",",
"setting",
")",
"else",
"None",
"for",
"setting",
"in",
"required_settings",
"]",
"if",
"not",
"all",
"(",
"defined_settings",
")",
":",
"raise",
"NotImplementedError",
"(",
"'The following settings have not been set: %s'",
"%",
"', '",
".",
"join",
"(",
"set",
"(",
"required_settings",
")",
"-",
"set",
"(",
"defined_settings",
")",
")",
")"
] | 32.818182 | 22.545455 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.