text
stringlengths 89
104k
| code_tokens
list | avg_line_len
float64 7.91
980
| score
float64 0
630
|
---|---|---|---|
def device_info(soapy_args=''):
"""Returns info about selected SoapySDR device"""
text = []
try:
device = simplesoapy.SoapyDevice(soapy_args)
text.append('Selected device: {}'.format(device.hardware))
text.append(' Available RX channels:')
text.append(' {}'.format(', '.join(str(x) for x in device.list_channels())))
text.append(' Available antennas:')
text.append(' {}'.format(', '.join(device.list_antennas())))
text.append(' Available tunable elements:')
text.append(' {}'.format(', '.join(device.list_frequencies())))
text.append(' Available amplification elements:')
text.append(' {}'.format(', '.join(device.list_gains())))
text.append(' Available device settings:')
for key, s in device.list_settings().items():
text.append(wrap('{} ... {} - {} (default: {})'.format(key, s['name'], s['description'], s['value'])))
text.append(' Available stream arguments:')
for key, s in device.list_stream_args().items():
text.append(wrap('{} ... {} - {} (default: {})'.format(key, s['name'], s['description'], s['value'])))
text.append(' Allowed gain range [dB]:')
text.append(' {:.2f} - {:.2f}'.format(*device.get_gain_range()))
text.append(' Allowed frequency range [MHz]:')
text.append(' {:.2f} - {:.2f}'.format(*[x / 1e6 for x in device.get_frequency_range()]))
text.append(' Allowed sample rates [MHz]:')
rates = []
for r in device.list_sample_rates():
if r[0] == r[1]:
rates.append('{:.2f}'.format(r[0] / 1e6))
else:
rates.append('{:.2f} - {:.2f}'.format(r[0] / 1e6, r[1] / 1e6))
text.append(wrap(', '.join(rates)))
text.append(' Allowed bandwidths [MHz]:')
bandwidths = []
for b in device.list_bandwidths():
if b[0] == b[1]:
bandwidths.append('{:.2f}'.format(b[0] / 1e6))
else:
bandwidths.append('{:.2f} - {:.2f}'.format(b[0] / 1e6, b[1] / 1e6))
if bandwidths:
text.append(wrap(', '.join(bandwidths)))
else:
text.append(' N/A')
except RuntimeError:
device = None
text.append('No devices found!')
return (device, '\n'.join(text))
|
[
"def",
"device_info",
"(",
"soapy_args",
"=",
"''",
")",
":",
"text",
"=",
"[",
"]",
"try",
":",
"device",
"=",
"simplesoapy",
".",
"SoapyDevice",
"(",
"soapy_args",
")",
"text",
".",
"append",
"(",
"'Selected device: {}'",
".",
"format",
"(",
"device",
".",
"hardware",
")",
")",
"text",
".",
"append",
"(",
"' Available RX channels:'",
")",
"text",
".",
"append",
"(",
"' {}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"device",
".",
"list_channels",
"(",
")",
")",
")",
")",
"text",
".",
"append",
"(",
"' Available antennas:'",
")",
"text",
".",
"append",
"(",
"' {}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"device",
".",
"list_antennas",
"(",
")",
")",
")",
")",
"text",
".",
"append",
"(",
"' Available tunable elements:'",
")",
"text",
".",
"append",
"(",
"' {}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"device",
".",
"list_frequencies",
"(",
")",
")",
")",
")",
"text",
".",
"append",
"(",
"' Available amplification elements:'",
")",
"text",
".",
"append",
"(",
"' {}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"device",
".",
"list_gains",
"(",
")",
")",
")",
")",
"text",
".",
"append",
"(",
"' Available device settings:'",
")",
"for",
"key",
",",
"s",
"in",
"device",
".",
"list_settings",
"(",
")",
".",
"items",
"(",
")",
":",
"text",
".",
"append",
"(",
"wrap",
"(",
"'{} ... {} - {} (default: {})'",
".",
"format",
"(",
"key",
",",
"s",
"[",
"'name'",
"]",
",",
"s",
"[",
"'description'",
"]",
",",
"s",
"[",
"'value'",
"]",
")",
")",
")",
"text",
".",
"append",
"(",
"' Available stream arguments:'",
")",
"for",
"key",
",",
"s",
"in",
"device",
".",
"list_stream_args",
"(",
")",
".",
"items",
"(",
")",
":",
"text",
".",
"append",
"(",
"wrap",
"(",
"'{} ... {} - {} (default: {})'",
".",
"format",
"(",
"key",
",",
"s",
"[",
"'name'",
"]",
",",
"s",
"[",
"'description'",
"]",
",",
"s",
"[",
"'value'",
"]",
")",
")",
")",
"text",
".",
"append",
"(",
"' Allowed gain range [dB]:'",
")",
"text",
".",
"append",
"(",
"' {:.2f} - {:.2f}'",
".",
"format",
"(",
"*",
"device",
".",
"get_gain_range",
"(",
")",
")",
")",
"text",
".",
"append",
"(",
"' Allowed frequency range [MHz]:'",
")",
"text",
".",
"append",
"(",
"' {:.2f} - {:.2f}'",
".",
"format",
"(",
"*",
"[",
"x",
"/",
"1e6",
"for",
"x",
"in",
"device",
".",
"get_frequency_range",
"(",
")",
"]",
")",
")",
"text",
".",
"append",
"(",
"' Allowed sample rates [MHz]:'",
")",
"rates",
"=",
"[",
"]",
"for",
"r",
"in",
"device",
".",
"list_sample_rates",
"(",
")",
":",
"if",
"r",
"[",
"0",
"]",
"==",
"r",
"[",
"1",
"]",
":",
"rates",
".",
"append",
"(",
"'{:.2f}'",
".",
"format",
"(",
"r",
"[",
"0",
"]",
"/",
"1e6",
")",
")",
"else",
":",
"rates",
".",
"append",
"(",
"'{:.2f} - {:.2f}'",
".",
"format",
"(",
"r",
"[",
"0",
"]",
"/",
"1e6",
",",
"r",
"[",
"1",
"]",
"/",
"1e6",
")",
")",
"text",
".",
"append",
"(",
"wrap",
"(",
"', '",
".",
"join",
"(",
"rates",
")",
")",
")",
"text",
".",
"append",
"(",
"' Allowed bandwidths [MHz]:'",
")",
"bandwidths",
"=",
"[",
"]",
"for",
"b",
"in",
"device",
".",
"list_bandwidths",
"(",
")",
":",
"if",
"b",
"[",
"0",
"]",
"==",
"b",
"[",
"1",
"]",
":",
"bandwidths",
".",
"append",
"(",
"'{:.2f}'",
".",
"format",
"(",
"b",
"[",
"0",
"]",
"/",
"1e6",
")",
")",
"else",
":",
"bandwidths",
".",
"append",
"(",
"'{:.2f} - {:.2f}'",
".",
"format",
"(",
"b",
"[",
"0",
"]",
"/",
"1e6",
",",
"b",
"[",
"1",
"]",
"/",
"1e6",
")",
")",
"if",
"bandwidths",
":",
"text",
".",
"append",
"(",
"wrap",
"(",
"', '",
".",
"join",
"(",
"bandwidths",
")",
")",
")",
"else",
":",
"text",
".",
"append",
"(",
"' N/A'",
")",
"except",
"RuntimeError",
":",
"device",
"=",
"None",
"text",
".",
"append",
"(",
"'No devices found!'",
")",
"return",
"(",
"device",
",",
"'\\n'",
".",
"join",
"(",
"text",
")",
")"
] | 49.489362 | 20.574468 |
def output_sizes(self):
"""Returns a tuple of all output sizes of all the layers."""
return tuple([l() if callable(l) else l for l in self._output_sizes])
|
[
"def",
"output_sizes",
"(",
"self",
")",
":",
"return",
"tuple",
"(",
"[",
"l",
"(",
")",
"if",
"callable",
"(",
"l",
")",
"else",
"l",
"for",
"l",
"in",
"self",
".",
"_output_sizes",
"]",
")"
] | 53.333333 | 16.666667 |
def prior(self, samples):
"""priori distribution
Parameters
----------
samples: list
a collection of sample, it's a (NUM_OF_INSTANCE * NUM_OF_FUNCTIONS) matrix,
representing{{w11, w12, ..., w1k}, {w21, w22, ... w2k}, ...{wk1, wk2,..., wkk}}
Returns
-------
float
priori distribution
"""
ret = np.ones(NUM_OF_INSTANCE)
for i in range(NUM_OF_INSTANCE):
for j in range(self.effective_model_num):
if not samples[i][j] > 0:
ret[i] = 0
if self.f_comb(1, samples[i]) >= self.f_comb(self.target_pos, samples[i]):
ret[i] = 0
return ret
|
[
"def",
"prior",
"(",
"self",
",",
"samples",
")",
":",
"ret",
"=",
"np",
".",
"ones",
"(",
"NUM_OF_INSTANCE",
")",
"for",
"i",
"in",
"range",
"(",
"NUM_OF_INSTANCE",
")",
":",
"for",
"j",
"in",
"range",
"(",
"self",
".",
"effective_model_num",
")",
":",
"if",
"not",
"samples",
"[",
"i",
"]",
"[",
"j",
"]",
">",
"0",
":",
"ret",
"[",
"i",
"]",
"=",
"0",
"if",
"self",
".",
"f_comb",
"(",
"1",
",",
"samples",
"[",
"i",
"]",
")",
">=",
"self",
".",
"f_comb",
"(",
"self",
".",
"target_pos",
",",
"samples",
"[",
"i",
"]",
")",
":",
"ret",
"[",
"i",
"]",
"=",
"0",
"return",
"ret"
] | 32.545455 | 20.045455 |
def iterable(item):
"""generate iterable from item, but leaves out strings
"""
if isinstance(item, collections.Iterable) and not isinstance(item, basestring):
return item
else:
return [item]
|
[
"def",
"iterable",
"(",
"item",
")",
":",
"if",
"isinstance",
"(",
"item",
",",
"collections",
".",
"Iterable",
")",
"and",
"not",
"isinstance",
"(",
"item",
",",
"basestring",
")",
":",
"return",
"item",
"else",
":",
"return",
"[",
"item",
"]"
] | 27.875 | 21.5 |
def remove_info_file():
"""Remove the current process's TensorBoardInfo file, if it exists.
If the file does not exist, no action is taken and no error is raised.
"""
try:
os.unlink(_get_info_file_path())
except OSError as e:
if e.errno == errno.ENOENT:
# The user may have wiped their temporary directory or something.
# Not a problem: we're already in the state that we want to be in.
pass
else:
raise
|
[
"def",
"remove_info_file",
"(",
")",
":",
"try",
":",
"os",
".",
"unlink",
"(",
"_get_info_file_path",
"(",
")",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"==",
"errno",
".",
"ENOENT",
":",
"# The user may have wiped their temporary directory or something.",
"# Not a problem: we're already in the state that we want to be in.",
"pass",
"else",
":",
"raise"
] | 31.214286 | 21.928571 |
def display_start(self):
"""Set up status display if option selected. NB: this method
assumes that the first entry is the iteration count and the last
is the rho value.
"""
if self.opt['Verbose']:
# If AutoRho option enabled rho is included in iteration status
if self.opt['AutoRho', 'Enabled']:
hdrtxt = type(self).hdrtxt()
else:
hdrtxt = type(self).hdrtxt()[0:-1]
# Call utility function to construct status display formatting
hdrstr, fmtstr, nsep = common.solve_status_str(
hdrtxt, fwdth0=type(self).fwiter, fprec=type(self).fpothr)
# Print header and separator strings
if self.opt['StatusHeader']:
print(hdrstr)
print("-" * nsep)
else:
fmtstr, nsep = '', 0
return fmtstr, nsep
|
[
"def",
"display_start",
"(",
"self",
")",
":",
"if",
"self",
".",
"opt",
"[",
"'Verbose'",
"]",
":",
"# If AutoRho option enabled rho is included in iteration status",
"if",
"self",
".",
"opt",
"[",
"'AutoRho'",
",",
"'Enabled'",
"]",
":",
"hdrtxt",
"=",
"type",
"(",
"self",
")",
".",
"hdrtxt",
"(",
")",
"else",
":",
"hdrtxt",
"=",
"type",
"(",
"self",
")",
".",
"hdrtxt",
"(",
")",
"[",
"0",
":",
"-",
"1",
"]",
"# Call utility function to construct status display formatting",
"hdrstr",
",",
"fmtstr",
",",
"nsep",
"=",
"common",
".",
"solve_status_str",
"(",
"hdrtxt",
",",
"fwdth0",
"=",
"type",
"(",
"self",
")",
".",
"fwiter",
",",
"fprec",
"=",
"type",
"(",
"self",
")",
".",
"fpothr",
")",
"# Print header and separator strings",
"if",
"self",
".",
"opt",
"[",
"'StatusHeader'",
"]",
":",
"print",
"(",
"hdrstr",
")",
"print",
"(",
"\"-\"",
"*",
"nsep",
")",
"else",
":",
"fmtstr",
",",
"nsep",
"=",
"''",
",",
"0",
"return",
"fmtstr",
",",
"nsep"
] | 38.782609 | 17 |
def _onActivateItem(self, index):
'''Handle activation of item in listing.'''
item = self._filesystemWidget.model().item(index)
if not isinstance(item, riffle.model.File):
self._acceptButton.setDisabled(True)
self.setLocation(item.path, interactive=True)
|
[
"def",
"_onActivateItem",
"(",
"self",
",",
"index",
")",
":",
"item",
"=",
"self",
".",
"_filesystemWidget",
".",
"model",
"(",
")",
".",
"item",
"(",
"index",
")",
"if",
"not",
"isinstance",
"(",
"item",
",",
"riffle",
".",
"model",
".",
"File",
")",
":",
"self",
".",
"_acceptButton",
".",
"setDisabled",
"(",
"True",
")",
"self",
".",
"setLocation",
"(",
"item",
".",
"path",
",",
"interactive",
"=",
"True",
")"
] | 49.5 | 11.833333 |
def get_search_regex(query, ignore_case=True):
"""Returns a compiled regex pattern to search for query letters in order.
Parameters
----------
query : str
String to search in another string (in order of character occurrence).
ignore_case : True
Optional value perform a case insensitive search (True by default).
Returns
-------
pattern : SRE_Pattern
Notes
-----
This function adds '.*' between the query characters and compiles the
resulting regular expression.
"""
regex_text = [char for char in query if char != ' ']
regex_text = '.*'.join(regex_text)
regex = r'({0})'.format(regex_text)
if ignore_case:
pattern = re.compile(regex, re.IGNORECASE)
else:
pattern = re.compile(regex)
return pattern
|
[
"def",
"get_search_regex",
"(",
"query",
",",
"ignore_case",
"=",
"True",
")",
":",
"regex_text",
"=",
"[",
"char",
"for",
"char",
"in",
"query",
"if",
"char",
"!=",
"' '",
"]",
"regex_text",
"=",
"'.*'",
".",
"join",
"(",
"regex_text",
")",
"regex",
"=",
"r'({0})'",
".",
"format",
"(",
"regex_text",
")",
"if",
"ignore_case",
":",
"pattern",
"=",
"re",
".",
"compile",
"(",
"regex",
",",
"re",
".",
"IGNORECASE",
")",
"else",
":",
"pattern",
"=",
"re",
".",
"compile",
"(",
"regex",
")",
"return",
"pattern"
] | 26.1 | 23.233333 |
def query(cls, volume=None, state=None, offset=None,
limit=None, api=None):
"""
Query (List) exports.
:param volume: Optional volume identifier.
:param state: Optional import sate.
:param api: Api instance.
:return: Collection object.
"""
api = api or cls._API
if volume:
volume = Transform.to_volume(volume)
return super(Export, cls)._query(
url=cls._URL['query'], volume=volume, state=state, offset=offset,
limit=limit, fields='_all', api=api
)
|
[
"def",
"query",
"(",
"cls",
",",
"volume",
"=",
"None",
",",
"state",
"=",
"None",
",",
"offset",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"api",
"=",
"None",
")",
":",
"api",
"=",
"api",
"or",
"cls",
".",
"_API",
"if",
"volume",
":",
"volume",
"=",
"Transform",
".",
"to_volume",
"(",
"volume",
")",
"return",
"super",
"(",
"Export",
",",
"cls",
")",
".",
"_query",
"(",
"url",
"=",
"cls",
".",
"_URL",
"[",
"'query'",
"]",
",",
"volume",
"=",
"volume",
",",
"state",
"=",
"state",
",",
"offset",
"=",
"offset",
",",
"limit",
"=",
"limit",
",",
"fields",
"=",
"'_all'",
",",
"api",
"=",
"api",
")"
] | 29.947368 | 15.210526 |
def _does_require_deprecation(self):
"""
Check if we have to put the previous version into the deprecated list.
"""
for index, version_number in enumerate(self.current_version[0][:2]):
# We loop through the 2 last elements of the version.
if version_number > self.version_yaml[index]:
# The currently read version number is greater than the one we have in
# the version.yaml.
# We return True.
return True
# We return False, we do not need to deprecate anything.
return False
|
[
"def",
"_does_require_deprecation",
"(",
"self",
")",
":",
"for",
"index",
",",
"version_number",
"in",
"enumerate",
"(",
"self",
".",
"current_version",
"[",
"0",
"]",
"[",
":",
"2",
"]",
")",
":",
"# We loop through the 2 last elements of the version.",
"if",
"version_number",
">",
"self",
".",
"version_yaml",
"[",
"index",
"]",
":",
"# The currently read version number is greater than the one we have in",
"# the version.yaml.",
"# We return True.",
"return",
"True",
"# We return False, we do not need to deprecate anything.",
"return",
"False"
] | 35.235294 | 23.235294 |
def create_default_policies(self):
'''**Description**
Create a set of default policies using the current system falco rules file as a reference. For every falco rule in the system
falco rules file, one policy will be created. The policy will take the name and description from the name and description of
the corresponding falco rule. If a policy already exists with the same name, no policy is added or modified. Existing
policies will be unchanged.
**Arguments**
- None
**Success Return Value**
JSON containing details on any new policies that were added.
**Example**
`examples/create_default_policies.py <https://github.com/draios/python-sdc-client/blob/master/examples/create_default_policies.py>`_
'''
res = requests.post(self.url + '/api/policies/createDefault', headers=self.hdrs, verify=self.ssl_verify)
return self._request_result(res)
|
[
"def",
"create_default_policies",
"(",
"self",
")",
":",
"res",
"=",
"requests",
".",
"post",
"(",
"self",
".",
"url",
"+",
"'/api/policies/createDefault'",
",",
"headers",
"=",
"self",
".",
"hdrs",
",",
"verify",
"=",
"self",
".",
"ssl_verify",
")",
"return",
"self",
".",
"_request_result",
"(",
"res",
")"
] | 51.052632 | 40.526316 |
def delete_event(self, calendar_id, event_id):
"""Delete an event from the specified calendar.
:param string calendar_id: ID of calendar to delete from.
:param string event_id: ID of event to delete.
"""
self.request_handler.delete(endpoint='calendars/%s/events' % calendar_id, data={'event_id': event_id})
|
[
"def",
"delete_event",
"(",
"self",
",",
"calendar_id",
",",
"event_id",
")",
":",
"self",
".",
"request_handler",
".",
"delete",
"(",
"endpoint",
"=",
"'calendars/%s/events'",
"%",
"calendar_id",
",",
"data",
"=",
"{",
"'event_id'",
":",
"event_id",
"}",
")"
] | 48.714286 | 22.142857 |
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
raise hg_util.Abort(err)
if not cl.local:
raise hg_util.Abort("cannot upload non-local change")
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return 0
|
[
"def",
"upload",
"(",
"ui",
",",
"repo",
",",
"name",
",",
"*",
"*",
"opts",
")",
":",
"if",
"codereview_disabled",
":",
"raise",
"hg_util",
".",
"Abort",
"(",
"codereview_disabled",
")",
"repo",
".",
"ui",
".",
"quiet",
"=",
"True",
"cl",
",",
"err",
"=",
"LoadCL",
"(",
"ui",
",",
"repo",
",",
"name",
",",
"web",
"=",
"True",
")",
"if",
"err",
"!=",
"\"\"",
":",
"raise",
"hg_util",
".",
"Abort",
"(",
"err",
")",
"if",
"not",
"cl",
".",
"local",
":",
"raise",
"hg_util",
".",
"Abort",
"(",
"\"cannot upload non-local change\"",
")",
"cl",
".",
"Upload",
"(",
"ui",
",",
"repo",
")",
"print",
"\"%s%s\\n\"",
"%",
"(",
"server_url_base",
",",
"cl",
".",
"name",
")",
"return",
"0"
] | 27.294118 | 16.823529 |
def smart_open(filename_or_file, *args, **kwargs):
'''
This context manager allows you to open a filename, if you want to default
some already-existing file object, like sys.stdout, which shouldn't be
closed at the end of the context. If the filename argument is a str, bytes,
or int, the file object is created via a call to open with the given *args
and **kwargs, sent to the context, and closed at the end of the context,
just like "with open(filename) as f:". If it isn't one of the openable
types, the object simply sent to the context unchanged, and left unclosed
at the end of the context. Example:
def work_with_file(name=sys.stdout):
with smart_open(name) as f:
# Works correctly if name is a str filename or sys.stdout
print("Some stuff", file=f)
# If it was a filename, f is closed at the end here.
'''
if isinstance(filename_or_file, (str, bytes, int)):
with open(filename_or_file, *args, **kwargs) as file:
yield file
else:
yield filename_or_file
|
[
"def",
"smart_open",
"(",
"filename_or_file",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"filename_or_file",
",",
"(",
"str",
",",
"bytes",
",",
"int",
")",
")",
":",
"with",
"open",
"(",
"filename_or_file",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"as",
"file",
":",
"yield",
"file",
"else",
":",
"yield",
"filename_or_file"
] | 49.227273 | 24.409091 |
def OpenEnumerateInstancePaths(self, ClassName, namespace=None,
FilterQueryLanguage=None, FilterQuery=None,
OperationTimeout=None, ContinueOnError=None,
MaxObjectCount=None, **extra):
# pylint: disable=invalid-name
"""
Open an enumeration session to enumerate the instance paths of
instances of a class (including instances of its subclasses) in
a namespace.
*New in pywbem 0.9.*
This method performs the OpenEnumerateInstancePaths operation
(see :term:`DSP0200`). See :ref:`WBEM operations` for a list of all
methods performing such operations.
If the operation succeeds, this method returns status on the
enumeration session and optionally instance paths.
Otherwise, this method raises an exception.
Use the :meth:`~pywbem.WBEMConnection.PullInstancePaths` method to
retrieve the next set of instance paths or the
:meth:`~pywbem.WBEMConnection.CloseEnumeration` method to close the
enumeration session before it is exhausted.
Parameters:
ClassName (:term:`string` or :class:`~pywbem.CIMClassName`):
Name of the class to be enumerated (case independent).
If specified as a :class:`~pywbem.CIMClassName` object, its
`namespace` attribute will be used as a default namespace as
described for the `namespace` parameter, and its `host` attribute
will be ignored.
namespace (:term:`string`):
Name of the CIM namespace to be used (case independent).
Leading and trailing slash characters will be stripped. The lexical
case will be preserved.
If `None`, the namespace of the `ClassName` parameter will be used,
if specified as a :class:`~pywbem.CIMClassName` object. If that is
also `None`, the default namespace of the connection will be used.
FilterQueryLanguage (:term:`string`):
The name of the filter query language used for the `FilterQuery`
parameter. The DMTF-defined Filter Query Language (see
:term:`DSP0212`) is specified as "DMTF:FQL".
Not all WBEM servers support filtering for this operation because
it returns instance paths and the act of the server filtering
requires that it generate instances just for that purpose and then
discard them.
FilterQuery (:term:`string`):
The filter query in the query language defined by the
`FilterQueryLanguage` parameter.
OperationTimeout (:class:`~pywbem.Uint32`):
Minimum time in seconds the WBEM Server shall maintain an open
enumeration session after a previous Open or Pull request is
sent to the client. Once this timeout time has expired, the
WBEM server may close the enumeration session.
* If not `None`, this parameter is sent to the WBEM server as the
proposed timeout for the enumeration session. A value of 0
indicates that the server is expected to never time out. The
server may reject the proposed value, causing a
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_INVALID_OPERATION_TIMEOUT`.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default timeout to be used.
ContinueOnError (:class:`py:bool`):
Indicates to the WBEM server to continue sending responses
after an error response has been sent.
* If `True`, the server is to continue sending responses after
sending an error response. Not all servers support continuation
on error; a server that does not support it must send an error
response if `True` was specified, causing
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_CONTINUATION_ON_ERROR_NOT_SUPPORTED`.
* If `False`, the server is requested to close the enumeration after
sending an error response.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default behaviour to be used.
:term:`DSP0200` defines that the server-implemented default is
`False`.
MaxObjectCount (:class:`~pywbem.Uint32`)
Maximum number of instances the WBEM server may return
for this request.
* If positive, the WBEM server is to return no more than the
specified number of instances.
* If zero, the WBEM server is to return no instances. This may
be used by a client to leave the handling of any returned
instances to a loop of Pull operations.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default behaviour to be used.
:term:`DSP0200` defines that the server-implemented default is
to return zero instances.
**extra :
Additional keyword arguments are passed as additional operation
parameters to the WBEM server.
Note that :term:`DSP0200` does not define any additional parameters
for this operation.
Returns:
A :func:`~py:collections.namedtuple` object containing the following
named items:
* **paths** (:class:`py:list` of :class:`~pywbem.CIMInstanceName`):
Representations of the retrieved instance paths, with their
attributes set as follows:
* `classname`: Name of the creation class of the instance.
* `keybindings`: Keybindings of the instance.
* `namespace`: Name of the CIM namespace containing the instance.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace.
* **eos** (:class:`py:bool`):
Indicates whether the enumeration session is exhausted after
this operation:
- If `True`, the enumeration session is exhausted, and the
server has closed the enumeration session.
- If `False`, the enumeration session is not exhausted and the
`context` item is the context object for the next operation on
the enumeration session.
* **context** (:func:`py:tuple` of server_context, namespace):
A context object identifying the open enumeration session,
including its current enumeration state, and the namespace. This
object must be supplied with the next pull or close operation for
this enumeration session.
The tuple items are:
* server_context (:term:`string`):
Enumeration context string returned by the server if
the session is not exhausted, or `None` otherwise. This string
is opaque for the client.
* namespace (:term:`string`):
Name of the CIM namespace that was used for this operation.
NOTE: This inner tuple hides the need for a CIM namespace
on subsequent operations in the enumeration session. CIM
operations always require target namespace, but it never
makes sense to specify a different one in subsequent
operations on the same enumeration session.
Raises:
Exceptions described in :class:`~pywbem.WBEMConnection`.
Example::
max_object_count = 100
rslt_tuple = conn.OpenEnumerateInstancePaths(
'CIM_Blah', MaxObjectCount=max_object_count)
paths = rslt_tuple.paths
while not rslt_tuple.eos:
rslt_tuple = conn.PullInstancePaths(rslt_tupl.context,
max_object_count)
paths.extend(rslt_tupl.paths)
for path in paths:
print('path {0}'.format(path))
"""
exc = None
result_tuple = None
method_name = 'OpenEnumerateInstancePaths'
if self._operation_recorders:
self.operation_recorder_reset(pull_op=True)
self.operation_recorder_stage_pywbem_args(
method=method_name,
ClassName=ClassName,
namespace=namespace,
FilterQueryLanguage=FilterQueryLanguage,
FilterQuery=FilterQuery,
OperationTimeout=OperationTimeout,
ContinueOnError=ContinueOnError,
MaxObjectCount=MaxObjectCount,
**extra)
try:
stats = self.statistics.start_timer(method_name)
if namespace is None and isinstance(ClassName, CIMClassName):
namespace = ClassName.namespace
namespace = self._iparam_namespace_from_namespace(namespace)
classname = self._iparam_classname(ClassName, 'ClassName')
result = self._imethodcall(
method_name,
namespace,
ClassName=classname,
FilterQueryLanguage=FilterQueryLanguage,
FilterQuery=FilterQuery,
OperationTimeout=OperationTimeout,
ContinueOnError=ContinueOnError,
MaxObjectCount=MaxObjectCount,
has_out_params=True,
**extra)
result_tuple = pull_path_result_tuple(
*self._get_rslt_params(result, namespace))
return result_tuple
except (CIMXMLParseError, XMLParseError) as exce:
exce.request_data = self.last_raw_request
exce.response_data = self.last_raw_reply
exc = exce
raise
except Exception as exce:
exc = exce
raise
finally:
self._last_operation_time = stats.stop_timer(
self.last_request_len, self.last_reply_len,
self.last_server_response_time, exc)
if self._operation_recorders:
self.operation_recorder_stage_result(result_tuple, exc)
|
[
"def",
"OpenEnumerateInstancePaths",
"(",
"self",
",",
"ClassName",
",",
"namespace",
"=",
"None",
",",
"FilterQueryLanguage",
"=",
"None",
",",
"FilterQuery",
"=",
"None",
",",
"OperationTimeout",
"=",
"None",
",",
"ContinueOnError",
"=",
"None",
",",
"MaxObjectCount",
"=",
"None",
",",
"*",
"*",
"extra",
")",
":",
"# pylint: disable=invalid-name",
"exc",
"=",
"None",
"result_tuple",
"=",
"None",
"method_name",
"=",
"'OpenEnumerateInstancePaths'",
"if",
"self",
".",
"_operation_recorders",
":",
"self",
".",
"operation_recorder_reset",
"(",
"pull_op",
"=",
"True",
")",
"self",
".",
"operation_recorder_stage_pywbem_args",
"(",
"method",
"=",
"method_name",
",",
"ClassName",
"=",
"ClassName",
",",
"namespace",
"=",
"namespace",
",",
"FilterQueryLanguage",
"=",
"FilterQueryLanguage",
",",
"FilterQuery",
"=",
"FilterQuery",
",",
"OperationTimeout",
"=",
"OperationTimeout",
",",
"ContinueOnError",
"=",
"ContinueOnError",
",",
"MaxObjectCount",
"=",
"MaxObjectCount",
",",
"*",
"*",
"extra",
")",
"try",
":",
"stats",
"=",
"self",
".",
"statistics",
".",
"start_timer",
"(",
"method_name",
")",
"if",
"namespace",
"is",
"None",
"and",
"isinstance",
"(",
"ClassName",
",",
"CIMClassName",
")",
":",
"namespace",
"=",
"ClassName",
".",
"namespace",
"namespace",
"=",
"self",
".",
"_iparam_namespace_from_namespace",
"(",
"namespace",
")",
"classname",
"=",
"self",
".",
"_iparam_classname",
"(",
"ClassName",
",",
"'ClassName'",
")",
"result",
"=",
"self",
".",
"_imethodcall",
"(",
"method_name",
",",
"namespace",
",",
"ClassName",
"=",
"classname",
",",
"FilterQueryLanguage",
"=",
"FilterQueryLanguage",
",",
"FilterQuery",
"=",
"FilterQuery",
",",
"OperationTimeout",
"=",
"OperationTimeout",
",",
"ContinueOnError",
"=",
"ContinueOnError",
",",
"MaxObjectCount",
"=",
"MaxObjectCount",
",",
"has_out_params",
"=",
"True",
",",
"*",
"*",
"extra",
")",
"result_tuple",
"=",
"pull_path_result_tuple",
"(",
"*",
"self",
".",
"_get_rslt_params",
"(",
"result",
",",
"namespace",
")",
")",
"return",
"result_tuple",
"except",
"(",
"CIMXMLParseError",
",",
"XMLParseError",
")",
"as",
"exce",
":",
"exce",
".",
"request_data",
"=",
"self",
".",
"last_raw_request",
"exce",
".",
"response_data",
"=",
"self",
".",
"last_raw_reply",
"exc",
"=",
"exce",
"raise",
"except",
"Exception",
"as",
"exce",
":",
"exc",
"=",
"exce",
"raise",
"finally",
":",
"self",
".",
"_last_operation_time",
"=",
"stats",
".",
"stop_timer",
"(",
"self",
".",
"last_request_len",
",",
"self",
".",
"last_reply_len",
",",
"self",
".",
"last_server_response_time",
",",
"exc",
")",
"if",
"self",
".",
"_operation_recorders",
":",
"self",
".",
"operation_recorder_stage_result",
"(",
"result_tuple",
",",
"exc",
")"
] | 45.0131 | 24.078603 |
def _pad_bytes_new(name, length):
"""
Takes a bytes instance and pads it with null bytes until it's length chars.
"""
if isinstance(name, str):
name = bytes(name, 'utf-8')
return name + b'\x00' * (length - len(name))
|
[
"def",
"_pad_bytes_new",
"(",
"name",
",",
"length",
")",
":",
"if",
"isinstance",
"(",
"name",
",",
"str",
")",
":",
"name",
"=",
"bytes",
"(",
"name",
",",
"'utf-8'",
")",
"return",
"name",
"+",
"b'\\x00'",
"*",
"(",
"length",
"-",
"len",
"(",
"name",
")",
")"
] | 34 | 10 |
def _authenticate_https(org):
"""Try authenticating via HTTPS, if succesful yields User, otherwise raises Error."""
_CREDENTIAL_SOCKET.parent.mkdir(mode=0o700, exist_ok=True)
try:
Git.cache = f"-c credential.helper= -c credential.helper='cache --socket {_CREDENTIAL_SOCKET}'"
git = Git(Git.cache)
# Get credentials from cache if possible
with _spawn(git("credential fill"), quiet=True) as child:
child.sendline("protocol=https")
child.sendline("host=github.com")
child.sendline("")
i = child.expect(["Username for '.+'", "Password for '.+'",
"username=([^\r]+)\r\npassword=([^\r]+)\r\n"])
if i == 2:
username, password = child.match.groups()
else:
username = password = None
child.close()
child.exitstatus = 0
# No credentials found, need to ask user
if password is None:
username = _prompt_username(_("GitHub username: "))
password = _prompt_password(_("GitHub password: "))
# Check if credentials are correct
res = requests.get("https://api.github.com/user", auth=(username, password))
# Check for 2-factor authentication https://developer.github.com/v3/auth/#working-with-two-factor-authentication
if "X-GitHub-OTP" in res.headers:
raise Error("Looks like you have two-factor authentication enabled!"
" Please generate a personal access token and use it as your password."
" See https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line for more info.")
if res.status_code != 200:
logger.info(res.headers)
logger.info(res.text)
raise Error(_("Invalid username and/or password.") if res.status_code ==
401 else _("Could not authenticate user."))
# Canonicalize (capitalization of) username,
# Especially if user logged in via email address
username = res.json()["login"]
# Credentials are correct, best cache them
with _spawn(git("-c credentialcache.ignoresighup=true credential approve"), quiet=True) as child:
child.sendline("protocol=https")
child.sendline("host=github.com")
child.sendline(f"path={org}/{username}")
child.sendline(f"username={username}")
child.sendline(f"password={password}")
child.sendline("")
yield User(name=username,
repo=f"https://{username}@github.com/{org}/{username}")
except BaseException:
# Some error occured while this context manager is active, best forget credentials.
logout()
raise
|
[
"def",
"_authenticate_https",
"(",
"org",
")",
":",
"_CREDENTIAL_SOCKET",
".",
"parent",
".",
"mkdir",
"(",
"mode",
"=",
"0o700",
",",
"exist_ok",
"=",
"True",
")",
"try",
":",
"Git",
".",
"cache",
"=",
"f\"-c credential.helper= -c credential.helper='cache --socket {_CREDENTIAL_SOCKET}'\"",
"git",
"=",
"Git",
"(",
"Git",
".",
"cache",
")",
"# Get credentials from cache if possible",
"with",
"_spawn",
"(",
"git",
"(",
"\"credential fill\"",
")",
",",
"quiet",
"=",
"True",
")",
"as",
"child",
":",
"child",
".",
"sendline",
"(",
"\"protocol=https\"",
")",
"child",
".",
"sendline",
"(",
"\"host=github.com\"",
")",
"child",
".",
"sendline",
"(",
"\"\"",
")",
"i",
"=",
"child",
".",
"expect",
"(",
"[",
"\"Username for '.+'\"",
",",
"\"Password for '.+'\"",
",",
"\"username=([^\\r]+)\\r\\npassword=([^\\r]+)\\r\\n\"",
"]",
")",
"if",
"i",
"==",
"2",
":",
"username",
",",
"password",
"=",
"child",
".",
"match",
".",
"groups",
"(",
")",
"else",
":",
"username",
"=",
"password",
"=",
"None",
"child",
".",
"close",
"(",
")",
"child",
".",
"exitstatus",
"=",
"0",
"# No credentials found, need to ask user",
"if",
"password",
"is",
"None",
":",
"username",
"=",
"_prompt_username",
"(",
"_",
"(",
"\"GitHub username: \"",
")",
")",
"password",
"=",
"_prompt_password",
"(",
"_",
"(",
"\"GitHub password: \"",
")",
")",
"# Check if credentials are correct",
"res",
"=",
"requests",
".",
"get",
"(",
"\"https://api.github.com/user\"",
",",
"auth",
"=",
"(",
"username",
",",
"password",
")",
")",
"# Check for 2-factor authentication https://developer.github.com/v3/auth/#working-with-two-factor-authentication",
"if",
"\"X-GitHub-OTP\"",
"in",
"res",
".",
"headers",
":",
"raise",
"Error",
"(",
"\"Looks like you have two-factor authentication enabled!\"",
"\" Please generate a personal access token and use it as your password.\"",
"\" See https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line for more info.\"",
")",
"if",
"res",
".",
"status_code",
"!=",
"200",
":",
"logger",
".",
"info",
"(",
"res",
".",
"headers",
")",
"logger",
".",
"info",
"(",
"res",
".",
"text",
")",
"raise",
"Error",
"(",
"_",
"(",
"\"Invalid username and/or password.\"",
")",
"if",
"res",
".",
"status_code",
"==",
"401",
"else",
"_",
"(",
"\"Could not authenticate user.\"",
")",
")",
"# Canonicalize (capitalization of) username,",
"# Especially if user logged in via email address",
"username",
"=",
"res",
".",
"json",
"(",
")",
"[",
"\"login\"",
"]",
"# Credentials are correct, best cache them",
"with",
"_spawn",
"(",
"git",
"(",
"\"-c credentialcache.ignoresighup=true credential approve\"",
")",
",",
"quiet",
"=",
"True",
")",
"as",
"child",
":",
"child",
".",
"sendline",
"(",
"\"protocol=https\"",
")",
"child",
".",
"sendline",
"(",
"\"host=github.com\"",
")",
"child",
".",
"sendline",
"(",
"f\"path={org}/{username}\"",
")",
"child",
".",
"sendline",
"(",
"f\"username={username}\"",
")",
"child",
".",
"sendline",
"(",
"f\"password={password}\"",
")",
"child",
".",
"sendline",
"(",
"\"\"",
")",
"yield",
"User",
"(",
"name",
"=",
"username",
",",
"repo",
"=",
"f\"https://{username}@github.com/{org}/{username}\"",
")",
"except",
"BaseException",
":",
"# Some error occured while this context manager is active, best forget credentials.",
"logout",
"(",
")",
"raise"
] | 46.266667 | 23.95 |
def auto_override_module(md, force_recursive = False):
"""Works like auto_override, but is only applicable to modules (by explicit call).
md must be a module or a module name contained in sys.modules.
"""
if not pytypes.checking_enabled:
return md
if isinstance(md, str):
if md in sys.modules:
md = sys.modules[md]
if md is None:
return md
elif md in _pending_modules:
# if import is pending, we just store this call for later
_pending_modules[md].append(lambda t: auto_override_module(t, True))
return md
assert(ismodule(md))
if md.__name__ in _pending_modules:
# if import is pending, we just store this call for later
_pending_modules[md.__name__].append(lambda t: auto_override_module(t, True))
# we already process the module now as far as possible for its internal use
# todo: Issue warning here that not the whole module might be covered yet
if md.__name__ in _auto_override_modules and \
_auto_override_modules[md.__name__] == len(md.__dict__):
return md
# To play it safe we avoid to modify the dict while iterating over it,
# so we previously cache keys.
# For this we don't use keys() because of Python 3.
# Todo: Better use inspect.getmembers here
keys = [key for key in md.__dict__]
for key in keys:
memb = md.__dict__[key]
if force_recursive or not is_no_type_check(memb):
if isclass(memb) and memb.__module__ == md.__name__:
auto_override_class(memb, force_recursive, force_recursive)
if not md.__name__ in _pending_modules:
_auto_override_modules[md.__name__] = len(md.__dict__)
return md
|
[
"def",
"auto_override_module",
"(",
"md",
",",
"force_recursive",
"=",
"False",
")",
":",
"if",
"not",
"pytypes",
".",
"checking_enabled",
":",
"return",
"md",
"if",
"isinstance",
"(",
"md",
",",
"str",
")",
":",
"if",
"md",
"in",
"sys",
".",
"modules",
":",
"md",
"=",
"sys",
".",
"modules",
"[",
"md",
"]",
"if",
"md",
"is",
"None",
":",
"return",
"md",
"elif",
"md",
"in",
"_pending_modules",
":",
"# if import is pending, we just store this call for later",
"_pending_modules",
"[",
"md",
"]",
".",
"append",
"(",
"lambda",
"t",
":",
"auto_override_module",
"(",
"t",
",",
"True",
")",
")",
"return",
"md",
"assert",
"(",
"ismodule",
"(",
"md",
")",
")",
"if",
"md",
".",
"__name__",
"in",
"_pending_modules",
":",
"# if import is pending, we just store this call for later",
"_pending_modules",
"[",
"md",
".",
"__name__",
"]",
".",
"append",
"(",
"lambda",
"t",
":",
"auto_override_module",
"(",
"t",
",",
"True",
")",
")",
"# we already process the module now as far as possible for its internal use",
"# todo: Issue warning here that not the whole module might be covered yet",
"if",
"md",
".",
"__name__",
"in",
"_auto_override_modules",
"and",
"_auto_override_modules",
"[",
"md",
".",
"__name__",
"]",
"==",
"len",
"(",
"md",
".",
"__dict__",
")",
":",
"return",
"md",
"# To play it safe we avoid to modify the dict while iterating over it,",
"# so we previously cache keys.",
"# For this we don't use keys() because of Python 3.",
"# Todo: Better use inspect.getmembers here",
"keys",
"=",
"[",
"key",
"for",
"key",
"in",
"md",
".",
"__dict__",
"]",
"for",
"key",
"in",
"keys",
":",
"memb",
"=",
"md",
".",
"__dict__",
"[",
"key",
"]",
"if",
"force_recursive",
"or",
"not",
"is_no_type_check",
"(",
"memb",
")",
":",
"if",
"isclass",
"(",
"memb",
")",
"and",
"memb",
".",
"__module__",
"==",
"md",
".",
"__name__",
":",
"auto_override_class",
"(",
"memb",
",",
"force_recursive",
",",
"force_recursive",
")",
"if",
"not",
"md",
".",
"__name__",
"in",
"_pending_modules",
":",
"_auto_override_modules",
"[",
"md",
".",
"__name__",
"]",
"=",
"len",
"(",
"md",
".",
"__dict__",
")",
"return",
"md"
] | 47.351351 | 18.567568 |
def contains_point(self, x, y):
"""
:param x: x coordinate of a point
:param y: y coordinate of a point
:returns: True if the point (x, y) is on the curve, False otherwise
"""
if x is None and y is None:
return True
return (y * y - (x * x * x + self._a * x + self._b)) % self._p == 0
|
[
"def",
"contains_point",
"(",
"self",
",",
"x",
",",
"y",
")",
":",
"if",
"x",
"is",
"None",
"and",
"y",
"is",
"None",
":",
"return",
"True",
"return",
"(",
"y",
"*",
"y",
"-",
"(",
"x",
"*",
"x",
"*",
"x",
"+",
"self",
".",
"_a",
"*",
"x",
"+",
"self",
".",
"_b",
")",
")",
"%",
"self",
".",
"_p",
"==",
"0"
] | 38.111111 | 11.444444 |
def get(self, max_lines=None):
"""Returns a big list of all log lines since the last run
"""
rows = []
self.get_fn(lambda row: rows.append(row), max_lines=max_lines)
return rows
|
[
"def",
"get",
"(",
"self",
",",
"max_lines",
"=",
"None",
")",
":",
"rows",
"=",
"[",
"]",
"self",
".",
"get_fn",
"(",
"lambda",
"row",
":",
"rows",
".",
"append",
"(",
"row",
")",
",",
"max_lines",
"=",
"max_lines",
")",
"return",
"rows"
] | 26.5 | 20.5 |
def set_inode(self, ino):
# type: (inode.Inode) -> None
'''
A method to set the Inode associated with this El Torito Entry.
Parameters:
ino - The Inode object corresponding to this entry.
Returns:
Nothing.
'''
if not self._initialized:
raise pycdlibexception.PyCdlibInternalError('El Torito Entry not yet initialized')
self.inode = ino
|
[
"def",
"set_inode",
"(",
"self",
",",
"ino",
")",
":",
"# type: (inode.Inode) -> None",
"if",
"not",
"self",
".",
"_initialized",
":",
"raise",
"pycdlibexception",
".",
"PyCdlibInternalError",
"(",
"'El Torito Entry not yet initialized'",
")",
"self",
".",
"inode",
"=",
"ino"
] | 32.153846 | 24 |
def create_postgresql_psycopg2cffi(self, **kwargs):
"""
:rtype: Engine
"""
return self._ce(
self._ccs(self.DialectAndDriver.psql_psycopg2cffi), **kwargs
)
|
[
"def",
"create_postgresql_psycopg2cffi",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"_ce",
"(",
"self",
".",
"_ccs",
"(",
"self",
".",
"DialectAndDriver",
".",
"psql_psycopg2cffi",
")",
",",
"*",
"*",
"kwargs",
")"
] | 28.571429 | 15.428571 |
def require_valid_type(value, *classes):
"""
Checks that the specified object reference is instance of classes and
throws a :py:class:`TypeError` if it is not.
:param value: The object.
:type value: object
:param classes: The classes.
:type classes: list(class)
"""
if value is not None:
valid = False
for auxiliar_class in classes:
if isinstance(value, auxiliar_class):
valid = True
break
if not valid:
raise TypeError()
|
[
"def",
"require_valid_type",
"(",
"value",
",",
"*",
"classes",
")",
":",
"if",
"value",
"is",
"not",
"None",
":",
"valid",
"=",
"False",
"for",
"auxiliar_class",
"in",
"classes",
":",
"if",
"isinstance",
"(",
"value",
",",
"auxiliar_class",
")",
":",
"valid",
"=",
"True",
"break",
"if",
"not",
"valid",
":",
"raise",
"TypeError",
"(",
")"
] | 27.421053 | 14.368421 |
def multiply(multiplicand: list, multiplier: list) -> list:
"""
:type A: List[List[int]]
:type B: List[List[int]]
:rtype: List[List[int]]
"""
multiplicand_row, multiplicand_col = len(
multiplicand), len(multiplicand[0])
multiplier_row, multiplier_col = len(multiplier), len(multiplier[0])
if(multiplicand_col != multiplier_row):
raise Exception(
"Multiplicand matrix not compatible with Multiplier matrix.")
# create a result matrix
result = [[0] * multiplier_col for i in range(multiplicand_row)]
for i in range(multiplicand_row):
for j in range(multiplier_col):
for k in range(len(multiplier)):
result[i][j] += multiplicand[i][k] * multiplier[k][j]
return result
|
[
"def",
"multiply",
"(",
"multiplicand",
":",
"list",
",",
"multiplier",
":",
"list",
")",
"->",
"list",
":",
"multiplicand_row",
",",
"multiplicand_col",
"=",
"len",
"(",
"multiplicand",
")",
",",
"len",
"(",
"multiplicand",
"[",
"0",
"]",
")",
"multiplier_row",
",",
"multiplier_col",
"=",
"len",
"(",
"multiplier",
")",
",",
"len",
"(",
"multiplier",
"[",
"0",
"]",
")",
"if",
"(",
"multiplicand_col",
"!=",
"multiplier_row",
")",
":",
"raise",
"Exception",
"(",
"\"Multiplicand matrix not compatible with Multiplier matrix.\"",
")",
"# create a result matrix",
"result",
"=",
"[",
"[",
"0",
"]",
"*",
"multiplier_col",
"for",
"i",
"in",
"range",
"(",
"multiplicand_row",
")",
"]",
"for",
"i",
"in",
"range",
"(",
"multiplicand_row",
")",
":",
"for",
"j",
"in",
"range",
"(",
"multiplier_col",
")",
":",
"for",
"k",
"in",
"range",
"(",
"len",
"(",
"multiplier",
")",
")",
":",
"result",
"[",
"i",
"]",
"[",
"j",
"]",
"+=",
"multiplicand",
"[",
"i",
"]",
"[",
"k",
"]",
"*",
"multiplier",
"[",
"k",
"]",
"[",
"j",
"]",
"return",
"result"
] | 39.894737 | 13.052632 |
def _construct_output_to_match(output_block):
"""Transform a ConstructResult block into a MATCH query string."""
output_block.validate()
selections = (
u'%s AS `%s`' % (output_block.fields[key].to_match(), key)
for key in sorted(output_block.fields.keys()) # Sort keys for deterministic output order.
)
return u'SELECT %s FROM' % (u', '.join(selections),)
|
[
"def",
"_construct_output_to_match",
"(",
"output_block",
")",
":",
"output_block",
".",
"validate",
"(",
")",
"selections",
"=",
"(",
"u'%s AS `%s`'",
"%",
"(",
"output_block",
".",
"fields",
"[",
"key",
"]",
".",
"to_match",
"(",
")",
",",
"key",
")",
"for",
"key",
"in",
"sorted",
"(",
"output_block",
".",
"fields",
".",
"keys",
"(",
")",
")",
"# Sort keys for deterministic output order.",
")",
"return",
"u'SELECT %s FROM'",
"%",
"(",
"u', '",
".",
"join",
"(",
"selections",
")",
",",
")"
] | 38.5 | 25.5 |
def deployments(namespace='default', **kwargs):
'''
Return a list of kubernetes deployments defined in the namespace
CLI Examples::
salt '*' kubernetes.deployments
salt '*' kubernetes.deployments namespace=default
'''
cfg = _setup_conn(**kwargs)
try:
api_instance = kubernetes.client.ExtensionsV1beta1Api()
api_response = api_instance.list_namespaced_deployment(namespace)
return [dep['metadata']['name'] for dep in api_response.to_dict().get('items')]
except (ApiException, HTTPError) as exc:
if isinstance(exc, ApiException) and exc.status == 404:
return None
else:
log.exception(
'Exception when calling '
'ExtensionsV1beta1Api->list_namespaced_deployment'
)
raise CommandExecutionError(exc)
finally:
_cleanup(**cfg)
|
[
"def",
"deployments",
"(",
"namespace",
"=",
"'default'",
",",
"*",
"*",
"kwargs",
")",
":",
"cfg",
"=",
"_setup_conn",
"(",
"*",
"*",
"kwargs",
")",
"try",
":",
"api_instance",
"=",
"kubernetes",
".",
"client",
".",
"ExtensionsV1beta1Api",
"(",
")",
"api_response",
"=",
"api_instance",
".",
"list_namespaced_deployment",
"(",
"namespace",
")",
"return",
"[",
"dep",
"[",
"'metadata'",
"]",
"[",
"'name'",
"]",
"for",
"dep",
"in",
"api_response",
".",
"to_dict",
"(",
")",
".",
"get",
"(",
"'items'",
")",
"]",
"except",
"(",
"ApiException",
",",
"HTTPError",
")",
"as",
"exc",
":",
"if",
"isinstance",
"(",
"exc",
",",
"ApiException",
")",
"and",
"exc",
".",
"status",
"==",
"404",
":",
"return",
"None",
"else",
":",
"log",
".",
"exception",
"(",
"'Exception when calling '",
"'ExtensionsV1beta1Api->list_namespaced_deployment'",
")",
"raise",
"CommandExecutionError",
"(",
"exc",
")",
"finally",
":",
"_cleanup",
"(",
"*",
"*",
"cfg",
")"
] | 33.576923 | 22.807692 |
def sanitize_line(txt: str) -> str:
"""
Fixes common mistakes with 'new line' signifiers so that they can be recognized
"""
for key in LINE_FIXES:
index = txt.find(key)
if index > -1:
txt = txt[:index] + LINE_FIXES[key] + txt[index + len(key):]
# Fix when space is missing following new line signifiers
for item in ['BECMG', 'TEMPO']:
if item in txt and item + ' ' not in txt:
index = txt.find(item) + len(item)
txt = txt[:index] + ' ' + txt[index:]
return txt
|
[
"def",
"sanitize_line",
"(",
"txt",
":",
"str",
")",
"->",
"str",
":",
"for",
"key",
"in",
"LINE_FIXES",
":",
"index",
"=",
"txt",
".",
"find",
"(",
"key",
")",
"if",
"index",
">",
"-",
"1",
":",
"txt",
"=",
"txt",
"[",
":",
"index",
"]",
"+",
"LINE_FIXES",
"[",
"key",
"]",
"+",
"txt",
"[",
"index",
"+",
"len",
"(",
"key",
")",
":",
"]",
"# Fix when space is missing following new line signifiers",
"for",
"item",
"in",
"[",
"'BECMG'",
",",
"'TEMPO'",
"]",
":",
"if",
"item",
"in",
"txt",
"and",
"item",
"+",
"' '",
"not",
"in",
"txt",
":",
"index",
"=",
"txt",
".",
"find",
"(",
"item",
")",
"+",
"len",
"(",
"item",
")",
"txt",
"=",
"txt",
"[",
":",
"index",
"]",
"+",
"' '",
"+",
"txt",
"[",
"index",
":",
"]",
"return",
"txt"
] | 38.214286 | 14.214286 |
def print_list(cls, l, output='table'):
"""
prints a list
:param l: the list
:param output: the output, default is a table
:return:
"""
def dict_from_list(l):
"""
returns a dict from a list for printing
:param l: the list
:return:
"""
d = dict([(idx, item) for idx, item in enumerate(l)])
return d
if output == 'table':
x = PrettyTable(["Index", "Host"])
for (idx, item) in enumerate(l):
x.add_row([idx, item])
x.align = "l"
x.align["Index"] = "r"
return x
elif output == 'csv':
return ",".join(l)
elif output == 'dict':
d = dict_from_list(l)
return d
elif output == 'json':
d = dict_from_list(l)
result = json.dumps(d, indent=4)
return result
elif output == 'yaml':
d = dict_from_list(l)
result = yaml.dump(d, default_flow_style=False)
return result
elif output == 'txt':
return "\n".join(l)
|
[
"def",
"print_list",
"(",
"cls",
",",
"l",
",",
"output",
"=",
"'table'",
")",
":",
"def",
"dict_from_list",
"(",
"l",
")",
":",
"\"\"\"\n returns a dict from a list for printing\n :param l: the list\n :return: \n \"\"\"",
"d",
"=",
"dict",
"(",
"[",
"(",
"idx",
",",
"item",
")",
"for",
"idx",
",",
"item",
"in",
"enumerate",
"(",
"l",
")",
"]",
")",
"return",
"d",
"if",
"output",
"==",
"'table'",
":",
"x",
"=",
"PrettyTable",
"(",
"[",
"\"Index\"",
",",
"\"Host\"",
"]",
")",
"for",
"(",
"idx",
",",
"item",
")",
"in",
"enumerate",
"(",
"l",
")",
":",
"x",
".",
"add_row",
"(",
"[",
"idx",
",",
"item",
"]",
")",
"x",
".",
"align",
"=",
"\"l\"",
"x",
".",
"align",
"[",
"\"Index\"",
"]",
"=",
"\"r\"",
"return",
"x",
"elif",
"output",
"==",
"'csv'",
":",
"return",
"\",\"",
".",
"join",
"(",
"l",
")",
"elif",
"output",
"==",
"'dict'",
":",
"d",
"=",
"dict_from_list",
"(",
"l",
")",
"return",
"d",
"elif",
"output",
"==",
"'json'",
":",
"d",
"=",
"dict_from_list",
"(",
"l",
")",
"result",
"=",
"json",
".",
"dumps",
"(",
"d",
",",
"indent",
"=",
"4",
")",
"return",
"result",
"elif",
"output",
"==",
"'yaml'",
":",
"d",
"=",
"dict_from_list",
"(",
"l",
")",
"result",
"=",
"yaml",
".",
"dump",
"(",
"d",
",",
"default_flow_style",
"=",
"False",
")",
"return",
"result",
"elif",
"output",
"==",
"'txt'",
":",
"return",
"\"\\n\"",
".",
"join",
"(",
"l",
")"
] | 29.282051 | 12.153846 |
def group(text, size):
"""Group ``text`` into blocks of ``size``.
Example:
>>> group("test", 2)
['te', 'st']
Args:
text (str): text to separate
size (int): size of groups to split the text into
Returns:
List of n-sized groups of text
Raises:
ValueError: If n is non positive
"""
if size <= 0:
raise ValueError("n must be a positive integer")
return [text[i:i + size] for i in range(0, len(text), size)]
|
[
"def",
"group",
"(",
"text",
",",
"size",
")",
":",
"if",
"size",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"\"n must be a positive integer\"",
")",
"return",
"[",
"text",
"[",
"i",
":",
"i",
"+",
"size",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"text",
")",
",",
"size",
")",
"]"
] | 22.619048 | 21.52381 |
def convert_to_sympy_matrix(expr, full_space=None):
"""Convert a QNET expression to an explicit ``n x n`` instance of
`sympy.Matrix`, where ``n`` is the dimension of `full_space`. The entries
of the matrix may contain symbols.
Parameters:
expr: a QNET expression
full_space (qnet.algebra.hilbert_space_algebra.HilbertSpace): The
Hilbert space in which `expr` is defined. If not given,
``expr.space`` is used. The Hilbert space must have a well-defined
basis.
Raises:
qnet.algebra.hilbert_space_algebra.BasisNotSetError: if `full_space`
does not have a defined basis
ValueError: if `expr` is not in `full_space`, or if `expr` cannot be
converted.
"""
if full_space is None:
full_space = expr.space
if not expr.space.is_tensor_factor_of(full_space):
raise ValueError("expr must be in full_space")
if expr is IdentityOperator:
return sympy.eye(full_space.dimension)
elif expr is ZeroOperator:
return 0
elif isinstance(expr, LocalOperator):
n = full_space.dimension
if full_space != expr.space:
all_spaces = full_space.local_factors
own_space_index = all_spaces.index(expr.space)
factors = [sympy.eye(s.dimension)
for s in all_spaces[:own_space_index]]
factors.append(convert_to_sympy_matrix(expr, expr.space))
factors.extend([sympy.eye(s.dimension)
for s in all_spaces[own_space_index + 1:]])
return tensor(*factors)
if isinstance(expr, (Create, Jz, Jplus)):
return SympyCreate(n)
elif isinstance(expr, (Destroy, Jminus)):
return SympyCreate(n).H
elif isinstance(expr, Phase):
phi = expr.phase
result = sympy.zeros(n)
for i in range(n):
result[i, i] = sympy.exp(sympy.I * i * phi)
return result
elif isinstance(expr, Displace):
alpha = expr.operands[1]
a = SympyCreate(n)
return (alpha * a - alpha.conjugate() * a.H).exp()
elif isinstance(expr, Squeeze):
eta = expr.operands[1]
a = SympyCreate(n)
return ((eta/2) * a**2 - (eta.conjugate()/2) * (a.H)**2).exp()
elif isinstance(expr, LocalSigma):
ket = basis_state(expr.index_j, n)
bra = basis_state(expr.index_k, n).H
return ket * bra
else:
raise ValueError("Cannot convert '%s' of type %s"
% (str(expr), type(expr)))
elif (isinstance(expr, Operator) and isinstance(expr, Operation)):
if isinstance(expr, OperatorPlus):
s = convert_to_sympy_matrix(expr.operands[0], full_space)
for op in expr.operands[1:]:
s += convert_to_sympy_matrix(op, full_space)
return s
elif isinstance(expr, OperatorTimes):
# if any factor acts non-locally, we need to expand distributively.
if any(len(op.space) > 1 for op in expr.operands):
se = expr.expand()
if se == expr:
raise ValueError("Cannot represent as sympy matrix: %s"
% expr)
return convert_to_sympy_matrix(se, full_space)
all_spaces = full_space.local_factors
by_space = []
ck = 0
for ls in all_spaces:
# group factors by associated local space
ls_ops = [convert_to_sympy_matrix(o, o.space)
for o in expr.operands if o.space == ls]
if len(ls_ops):
# compute factor associated with local space
by_space.append(ls_ops[0])
for ls_op in ls_ops[1:]:
by_space[-1] *= ls_op
ck += len(ls_ops)
else:
# if trivial action, take identity matrix
by_space.append(sympy.eye(ls.dimension))
assert ck == len(expr.operands)
# combine local factors in tensor product
if len(by_space) == 1:
return by_space[0]
else:
return tensor(*by_space)
elif isinstance(expr, Adjoint):
return convert_to_sympy_matrix(expr.operand, full_space).H
elif isinstance(expr, PseudoInverse):
raise NotImplementedError(
'Cannot convert PseudoInverse to sympy matrix')
elif isinstance(expr, NullSpaceProjector):
raise NotImplementedError(
'Cannot convert NullSpaceProjector to sympy')
elif isinstance(expr, ScalarTimesOperator):
return expr.coeff * convert_to_sympy_matrix(expr.term, full_space)
else:
raise ValueError(
"Cannot convert '%s' of type %s" % (str(expr), type(expr)))
else:
raise ValueError(
"Cannot convert '%s' of type %s" % (str(expr), type(expr)))
|
[
"def",
"convert_to_sympy_matrix",
"(",
"expr",
",",
"full_space",
"=",
"None",
")",
":",
"if",
"full_space",
"is",
"None",
":",
"full_space",
"=",
"expr",
".",
"space",
"if",
"not",
"expr",
".",
"space",
".",
"is_tensor_factor_of",
"(",
"full_space",
")",
":",
"raise",
"ValueError",
"(",
"\"expr must be in full_space\"",
")",
"if",
"expr",
"is",
"IdentityOperator",
":",
"return",
"sympy",
".",
"eye",
"(",
"full_space",
".",
"dimension",
")",
"elif",
"expr",
"is",
"ZeroOperator",
":",
"return",
"0",
"elif",
"isinstance",
"(",
"expr",
",",
"LocalOperator",
")",
":",
"n",
"=",
"full_space",
".",
"dimension",
"if",
"full_space",
"!=",
"expr",
".",
"space",
":",
"all_spaces",
"=",
"full_space",
".",
"local_factors",
"own_space_index",
"=",
"all_spaces",
".",
"index",
"(",
"expr",
".",
"space",
")",
"factors",
"=",
"[",
"sympy",
".",
"eye",
"(",
"s",
".",
"dimension",
")",
"for",
"s",
"in",
"all_spaces",
"[",
":",
"own_space_index",
"]",
"]",
"factors",
".",
"append",
"(",
"convert_to_sympy_matrix",
"(",
"expr",
",",
"expr",
".",
"space",
")",
")",
"factors",
".",
"extend",
"(",
"[",
"sympy",
".",
"eye",
"(",
"s",
".",
"dimension",
")",
"for",
"s",
"in",
"all_spaces",
"[",
"own_space_index",
"+",
"1",
":",
"]",
"]",
")",
"return",
"tensor",
"(",
"*",
"factors",
")",
"if",
"isinstance",
"(",
"expr",
",",
"(",
"Create",
",",
"Jz",
",",
"Jplus",
")",
")",
":",
"return",
"SympyCreate",
"(",
"n",
")",
"elif",
"isinstance",
"(",
"expr",
",",
"(",
"Destroy",
",",
"Jminus",
")",
")",
":",
"return",
"SympyCreate",
"(",
"n",
")",
".",
"H",
"elif",
"isinstance",
"(",
"expr",
",",
"Phase",
")",
":",
"phi",
"=",
"expr",
".",
"phase",
"result",
"=",
"sympy",
".",
"zeros",
"(",
"n",
")",
"for",
"i",
"in",
"range",
"(",
"n",
")",
":",
"result",
"[",
"i",
",",
"i",
"]",
"=",
"sympy",
".",
"exp",
"(",
"sympy",
".",
"I",
"*",
"i",
"*",
"phi",
")",
"return",
"result",
"elif",
"isinstance",
"(",
"expr",
",",
"Displace",
")",
":",
"alpha",
"=",
"expr",
".",
"operands",
"[",
"1",
"]",
"a",
"=",
"SympyCreate",
"(",
"n",
")",
"return",
"(",
"alpha",
"*",
"a",
"-",
"alpha",
".",
"conjugate",
"(",
")",
"*",
"a",
".",
"H",
")",
".",
"exp",
"(",
")",
"elif",
"isinstance",
"(",
"expr",
",",
"Squeeze",
")",
":",
"eta",
"=",
"expr",
".",
"operands",
"[",
"1",
"]",
"a",
"=",
"SympyCreate",
"(",
"n",
")",
"return",
"(",
"(",
"eta",
"/",
"2",
")",
"*",
"a",
"**",
"2",
"-",
"(",
"eta",
".",
"conjugate",
"(",
")",
"/",
"2",
")",
"*",
"(",
"a",
".",
"H",
")",
"**",
"2",
")",
".",
"exp",
"(",
")",
"elif",
"isinstance",
"(",
"expr",
",",
"LocalSigma",
")",
":",
"ket",
"=",
"basis_state",
"(",
"expr",
".",
"index_j",
",",
"n",
")",
"bra",
"=",
"basis_state",
"(",
"expr",
".",
"index_k",
",",
"n",
")",
".",
"H",
"return",
"ket",
"*",
"bra",
"else",
":",
"raise",
"ValueError",
"(",
"\"Cannot convert '%s' of type %s\"",
"%",
"(",
"str",
"(",
"expr",
")",
",",
"type",
"(",
"expr",
")",
")",
")",
"elif",
"(",
"isinstance",
"(",
"expr",
",",
"Operator",
")",
"and",
"isinstance",
"(",
"expr",
",",
"Operation",
")",
")",
":",
"if",
"isinstance",
"(",
"expr",
",",
"OperatorPlus",
")",
":",
"s",
"=",
"convert_to_sympy_matrix",
"(",
"expr",
".",
"operands",
"[",
"0",
"]",
",",
"full_space",
")",
"for",
"op",
"in",
"expr",
".",
"operands",
"[",
"1",
":",
"]",
":",
"s",
"+=",
"convert_to_sympy_matrix",
"(",
"op",
",",
"full_space",
")",
"return",
"s",
"elif",
"isinstance",
"(",
"expr",
",",
"OperatorTimes",
")",
":",
"# if any factor acts non-locally, we need to expand distributively.",
"if",
"any",
"(",
"len",
"(",
"op",
".",
"space",
")",
">",
"1",
"for",
"op",
"in",
"expr",
".",
"operands",
")",
":",
"se",
"=",
"expr",
".",
"expand",
"(",
")",
"if",
"se",
"==",
"expr",
":",
"raise",
"ValueError",
"(",
"\"Cannot represent as sympy matrix: %s\"",
"%",
"expr",
")",
"return",
"convert_to_sympy_matrix",
"(",
"se",
",",
"full_space",
")",
"all_spaces",
"=",
"full_space",
".",
"local_factors",
"by_space",
"=",
"[",
"]",
"ck",
"=",
"0",
"for",
"ls",
"in",
"all_spaces",
":",
"# group factors by associated local space",
"ls_ops",
"=",
"[",
"convert_to_sympy_matrix",
"(",
"o",
",",
"o",
".",
"space",
")",
"for",
"o",
"in",
"expr",
".",
"operands",
"if",
"o",
".",
"space",
"==",
"ls",
"]",
"if",
"len",
"(",
"ls_ops",
")",
":",
"# compute factor associated with local space",
"by_space",
".",
"append",
"(",
"ls_ops",
"[",
"0",
"]",
")",
"for",
"ls_op",
"in",
"ls_ops",
"[",
"1",
":",
"]",
":",
"by_space",
"[",
"-",
"1",
"]",
"*=",
"ls_op",
"ck",
"+=",
"len",
"(",
"ls_ops",
")",
"else",
":",
"# if trivial action, take identity matrix",
"by_space",
".",
"append",
"(",
"sympy",
".",
"eye",
"(",
"ls",
".",
"dimension",
")",
")",
"assert",
"ck",
"==",
"len",
"(",
"expr",
".",
"operands",
")",
"# combine local factors in tensor product",
"if",
"len",
"(",
"by_space",
")",
"==",
"1",
":",
"return",
"by_space",
"[",
"0",
"]",
"else",
":",
"return",
"tensor",
"(",
"*",
"by_space",
")",
"elif",
"isinstance",
"(",
"expr",
",",
"Adjoint",
")",
":",
"return",
"convert_to_sympy_matrix",
"(",
"expr",
".",
"operand",
",",
"full_space",
")",
".",
"H",
"elif",
"isinstance",
"(",
"expr",
",",
"PseudoInverse",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Cannot convert PseudoInverse to sympy matrix'",
")",
"elif",
"isinstance",
"(",
"expr",
",",
"NullSpaceProjector",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Cannot convert NullSpaceProjector to sympy'",
")",
"elif",
"isinstance",
"(",
"expr",
",",
"ScalarTimesOperator",
")",
":",
"return",
"expr",
".",
"coeff",
"*",
"convert_to_sympy_matrix",
"(",
"expr",
".",
"term",
",",
"full_space",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Cannot convert '%s' of type %s\"",
"%",
"(",
"str",
"(",
"expr",
")",
",",
"type",
"(",
"expr",
")",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Cannot convert '%s' of type %s\"",
"%",
"(",
"str",
"(",
"expr",
")",
",",
"type",
"(",
"expr",
")",
")",
")"
] | 44.122807 | 15.192982 |
def get_smart_data():
"""
Get SMART attribute data
:return: list of multi leveled dictionaries
each dict has a key "DeviceName" with the identification of the device in smartctl
also has keys of the SMART attribute id, with value of another dict of the attributes
[
{
"DeviceName": "/dev/sda blahblah",
"1":
{
"flags": "..",
"raw": "..",
etc,
}
}
]
"""
stats = []
# get all devices
devlist = DeviceList()
for dev in devlist.devices:
stats.append({
DEVKEY: str(dev)
})
for attribute in dev.attributes:
if attribute is None:
pass
else:
attribdict = convert_attribute_to_dict(attribute)
# we will use the attribute number as the key
num = attribdict.pop('num', None)
try:
assert num is not None
except Exception as e:
# we should never get here, but if we do, continue to next iteration and skip this attribute
continue
stats[-1][num] = attribdict
return stats
|
[
"def",
"get_smart_data",
"(",
")",
":",
"stats",
"=",
"[",
"]",
"# get all devices",
"devlist",
"=",
"DeviceList",
"(",
")",
"for",
"dev",
"in",
"devlist",
".",
"devices",
":",
"stats",
".",
"append",
"(",
"{",
"DEVKEY",
":",
"str",
"(",
"dev",
")",
"}",
")",
"for",
"attribute",
"in",
"dev",
".",
"attributes",
":",
"if",
"attribute",
"is",
"None",
":",
"pass",
"else",
":",
"attribdict",
"=",
"convert_attribute_to_dict",
"(",
"attribute",
")",
"# we will use the attribute number as the key",
"num",
"=",
"attribdict",
".",
"pop",
"(",
"'num'",
",",
"None",
")",
"try",
":",
"assert",
"num",
"is",
"not",
"None",
"except",
"Exception",
"as",
"e",
":",
"# we should never get here, but if we do, continue to next iteration and skip this attribute",
"continue",
"stats",
"[",
"-",
"1",
"]",
"[",
"num",
"]",
"=",
"attribdict",
"return",
"stats"
] | 31.5 | 19.595238 |
def get_redshift(self, dist):
"""Returns the redshift for the given distance.
"""
dist, input_is_array = ensurearray(dist)
try:
zs = self.nearby_d2z(dist)
except TypeError:
# interpolant hasn't been setup yet
self.setup_interpolant()
zs = self.nearby_d2z(dist)
# if any points had red shifts beyond the nearby, will have nans;
# replace using the faraway interpolation
replacemask = numpy.isnan(zs)
if replacemask.any():
zs[replacemask] = self.faraway_d2z(dist[replacemask])
replacemask = numpy.isnan(zs)
# if we still have nans, means that some distances are beyond our
# furthest default; fall back to using astropy
if replacemask.any():
# well... check that the distance is positive and finite first
if not (dist > 0.).all() and numpy.isfinite(dist).all():
raise ValueError("distance must be finite and > 0")
zs[replacemask] = _redshift(dist[replacemask],
cosmology=self.cosmology)
return formatreturn(zs, input_is_array)
|
[
"def",
"get_redshift",
"(",
"self",
",",
"dist",
")",
":",
"dist",
",",
"input_is_array",
"=",
"ensurearray",
"(",
"dist",
")",
"try",
":",
"zs",
"=",
"self",
".",
"nearby_d2z",
"(",
"dist",
")",
"except",
"TypeError",
":",
"# interpolant hasn't been setup yet",
"self",
".",
"setup_interpolant",
"(",
")",
"zs",
"=",
"self",
".",
"nearby_d2z",
"(",
"dist",
")",
"# if any points had red shifts beyond the nearby, will have nans;",
"# replace using the faraway interpolation",
"replacemask",
"=",
"numpy",
".",
"isnan",
"(",
"zs",
")",
"if",
"replacemask",
".",
"any",
"(",
")",
":",
"zs",
"[",
"replacemask",
"]",
"=",
"self",
".",
"faraway_d2z",
"(",
"dist",
"[",
"replacemask",
"]",
")",
"replacemask",
"=",
"numpy",
".",
"isnan",
"(",
"zs",
")",
"# if we still have nans, means that some distances are beyond our",
"# furthest default; fall back to using astropy",
"if",
"replacemask",
".",
"any",
"(",
")",
":",
"# well... check that the distance is positive and finite first",
"if",
"not",
"(",
"dist",
">",
"0.",
")",
".",
"all",
"(",
")",
"and",
"numpy",
".",
"isfinite",
"(",
"dist",
")",
".",
"all",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"distance must be finite and > 0\"",
")",
"zs",
"[",
"replacemask",
"]",
"=",
"_redshift",
"(",
"dist",
"[",
"replacemask",
"]",
",",
"cosmology",
"=",
"self",
".",
"cosmology",
")",
"return",
"formatreturn",
"(",
"zs",
",",
"input_is_array",
")"
] | 46.72 | 14.24 |
def get_supported_versions(self):
"""
Gets a list of supported U2F versions from the device.
"""
if not hasattr(self, '_versions'):
try:
self._versions = [self.send_apdu(INS_GET_VERSION).decode()]
except exc.APDUError as e:
# v0 didn't support the instruction.
self._versions = ['v0'] if e.code == 0x6d00 else []
return self._versions
|
[
"def",
"get_supported_versions",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_versions'",
")",
":",
"try",
":",
"self",
".",
"_versions",
"=",
"[",
"self",
".",
"send_apdu",
"(",
"INS_GET_VERSION",
")",
".",
"decode",
"(",
")",
"]",
"except",
"exc",
".",
"APDUError",
"as",
"e",
":",
"# v0 didn't support the instruction.",
"self",
".",
"_versions",
"=",
"[",
"'v0'",
"]",
"if",
"e",
".",
"code",
"==",
"0x6d00",
"else",
"[",
"]",
"return",
"self",
".",
"_versions"
] | 36.333333 | 15.166667 |
def _get_template_dict(template):
"""
Parse string template and or copy dictionary template.
:param template: Content of a packaged YAML or JSON SAM template
:type template: str_or_dict
:return: Template as a dictionary
:rtype: dict
:raises ValueError
"""
if isinstance(template, str):
return parse_template(template)
if isinstance(template, dict):
return copy.deepcopy(template)
raise ValueError('Input template should be a string or dictionary')
|
[
"def",
"_get_template_dict",
"(",
"template",
")",
":",
"if",
"isinstance",
"(",
"template",
",",
"str",
")",
":",
"return",
"parse_template",
"(",
"template",
")",
"if",
"isinstance",
"(",
"template",
",",
"dict",
")",
":",
"return",
"copy",
".",
"deepcopy",
"(",
"template",
")",
"raise",
"ValueError",
"(",
"'Input template should be a string or dictionary'",
")"
] | 29.058824 | 16.117647 |
def copy_selection_access_string(self):
"""Copys access_string to selection to the clipboard
An access string is Python code to reference the selection
If there is no selection then a reference to the current cell is copied
"""
selection = self.get_selection()
if not selection:
cursor = self.grid.actions.cursor
selection = Selection([], [], [], [], [tuple(cursor[:2])])
shape = self.grid.code_array.shape
tab = self.grid.current_table
access_string = selection.get_access_string(shape, tab)
# Copy access string to clipboard
self.grid.main_window.clipboard.set_clipboard(access_string)
# Display copy operation and access string in status bar
statustext = _("Cell reference copied to clipboard: {access_string}")
statustext = statustext.format(access_string=access_string)
post_command_event(self.main_window, self.StatusBarMsg,
text=statustext)
|
[
"def",
"copy_selection_access_string",
"(",
"self",
")",
":",
"selection",
"=",
"self",
".",
"get_selection",
"(",
")",
"if",
"not",
"selection",
":",
"cursor",
"=",
"self",
".",
"grid",
".",
"actions",
".",
"cursor",
"selection",
"=",
"Selection",
"(",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"tuple",
"(",
"cursor",
"[",
":",
"2",
"]",
")",
"]",
")",
"shape",
"=",
"self",
".",
"grid",
".",
"code_array",
".",
"shape",
"tab",
"=",
"self",
".",
"grid",
".",
"current_table",
"access_string",
"=",
"selection",
".",
"get_access_string",
"(",
"shape",
",",
"tab",
")",
"# Copy access string to clipboard",
"self",
".",
"grid",
".",
"main_window",
".",
"clipboard",
".",
"set_clipboard",
"(",
"access_string",
")",
"# Display copy operation and access string in status bar",
"statustext",
"=",
"_",
"(",
"\"Cell reference copied to clipboard: {access_string}\"",
")",
"statustext",
"=",
"statustext",
".",
"format",
"(",
"access_string",
"=",
"access_string",
")",
"post_command_event",
"(",
"self",
".",
"main_window",
",",
"self",
".",
"StatusBarMsg",
",",
"text",
"=",
"statustext",
")"
] | 38.461538 | 21.807692 |
def update_agent_state():
'''Update the current agent state in opencast.
'''
configure_service('capture.admin')
status = 'idle'
# Determine reported agent state with priority list
if get_service_status(db.Service.SCHEDULE) == db.ServiceStatus.STOPPED:
status = 'offline'
elif get_service_status(db.Service.CAPTURE) == db.ServiceStatus.BUSY:
status = 'capturing'
elif get_service_status(db.Service.INGEST) == db.ServiceStatus.BUSY:
status = 'uploading'
register_ca(status=status)
|
[
"def",
"update_agent_state",
"(",
")",
":",
"configure_service",
"(",
"'capture.admin'",
")",
"status",
"=",
"'idle'",
"# Determine reported agent state with priority list",
"if",
"get_service_status",
"(",
"db",
".",
"Service",
".",
"SCHEDULE",
")",
"==",
"db",
".",
"ServiceStatus",
".",
"STOPPED",
":",
"status",
"=",
"'offline'",
"elif",
"get_service_status",
"(",
"db",
".",
"Service",
".",
"CAPTURE",
")",
"==",
"db",
".",
"ServiceStatus",
".",
"BUSY",
":",
"status",
"=",
"'capturing'",
"elif",
"get_service_status",
"(",
"db",
".",
"Service",
".",
"INGEST",
")",
"==",
"db",
".",
"ServiceStatus",
".",
"BUSY",
":",
"status",
"=",
"'uploading'",
"register_ca",
"(",
"status",
"=",
"status",
")"
] | 35.066667 | 21.6 |
def write(cls, filename, samples, write_params=None, static_args=None,
**metadata):
"""Writes the injection samples to the given hdf file.
Parameters
----------
filename : str
The name of the file to write to.
samples : io.FieldArray
FieldArray of parameters.
write_params : list, optional
Only write the given parameter names. All given names must be keys
in ``samples``. Default is to write all parameters in ``samples``.
static_args : dict, optional
Dictionary mapping static parameter names to values. These are
written to the ``attrs``.
\**metadata :
All other keyword arguments will be written to the file's attrs.
"""
with h5py.File(filename, 'w') as fp:
# write metadata
if static_args is None:
static_args = {}
fp.attrs["static_args"] = static_args.keys()
fp.attrs['injtype'] = cls.injtype
for key, val in metadata.items():
fp.attrs[key] = val
if write_params is None:
write_params = samples.fieldnames
for arg, val in static_args.items():
fp.attrs[arg] = val
for field in write_params:
fp[field] = samples[field]
|
[
"def",
"write",
"(",
"cls",
",",
"filename",
",",
"samples",
",",
"write_params",
"=",
"None",
",",
"static_args",
"=",
"None",
",",
"*",
"*",
"metadata",
")",
":",
"with",
"h5py",
".",
"File",
"(",
"filename",
",",
"'w'",
")",
"as",
"fp",
":",
"# write metadata",
"if",
"static_args",
"is",
"None",
":",
"static_args",
"=",
"{",
"}",
"fp",
".",
"attrs",
"[",
"\"static_args\"",
"]",
"=",
"static_args",
".",
"keys",
"(",
")",
"fp",
".",
"attrs",
"[",
"'injtype'",
"]",
"=",
"cls",
".",
"injtype",
"for",
"key",
",",
"val",
"in",
"metadata",
".",
"items",
"(",
")",
":",
"fp",
".",
"attrs",
"[",
"key",
"]",
"=",
"val",
"if",
"write_params",
"is",
"None",
":",
"write_params",
"=",
"samples",
".",
"fieldnames",
"for",
"arg",
",",
"val",
"in",
"static_args",
".",
"items",
"(",
")",
":",
"fp",
".",
"attrs",
"[",
"arg",
"]",
"=",
"val",
"for",
"field",
"in",
"write_params",
":",
"fp",
"[",
"field",
"]",
"=",
"samples",
"[",
"field",
"]"
] | 40.757576 | 12.969697 |
def ris(self):
"""Bibliographic entry in RIS (Research Information System Format)
format.
Returns
-------
ris : str
The RIS string representing an item.
Raises
------
ValueError : If the item's aggregationType is not Journal.
"""
if self.aggregationType != 'Journal':
raise ValueError('Only Journal articles supported.')
template = u'''TY - JOUR
TI - {title}
JO - {journal}
VL - {volume}
DA - {date}
SP - {pages}
PY - {year}
DO - {doi}
UR - https://doi.org/{doi}
'''
ris = template.format(
title=self.title, journal=self.publicationName,
volume=self.volume, date=self.coverDate, pages=self.pageRange,
year=self.coverDate[0:4], doi=self.doi)
for au in self.authors:
ris += 'AU - {}\n'.format(au.indexed_name)
if self.issueIdentifier is not None:
ris += 'IS - {}\n'.format(self.issueIdentifier)
ris += 'ER - \n\n'
return ris
|
[
"def",
"ris",
"(",
"self",
")",
":",
"if",
"self",
".",
"aggregationType",
"!=",
"'Journal'",
":",
"raise",
"ValueError",
"(",
"'Only Journal articles supported.'",
")",
"template",
"=",
"u'''TY - JOUR\nTI - {title}\nJO - {journal}\nVL - {volume}\nDA - {date}\nSP - {pages}\nPY - {year}\nDO - {doi}\nUR - https://doi.org/{doi}\n'''",
"ris",
"=",
"template",
".",
"format",
"(",
"title",
"=",
"self",
".",
"title",
",",
"journal",
"=",
"self",
".",
"publicationName",
",",
"volume",
"=",
"self",
".",
"volume",
",",
"date",
"=",
"self",
".",
"coverDate",
",",
"pages",
"=",
"self",
".",
"pageRange",
",",
"year",
"=",
"self",
".",
"coverDate",
"[",
"0",
":",
"4",
"]",
",",
"doi",
"=",
"self",
".",
"doi",
")",
"for",
"au",
"in",
"self",
".",
"authors",
":",
"ris",
"+=",
"'AU - {}\\n'",
".",
"format",
"(",
"au",
".",
"indexed_name",
")",
"if",
"self",
".",
"issueIdentifier",
"is",
"not",
"None",
":",
"ris",
"+=",
"'IS - {}\\n'",
".",
"format",
"(",
"self",
".",
"issueIdentifier",
")",
"ris",
"+=",
"'ER - \\n\\n'",
"return",
"ris"
] | 28.971429 | 20.657143 |
def getActiveAxes(self):
"""Return a list of the selected axes."""
active = []
for i in range(len(self._axisId)):
if self._menu.IsChecked(self._axisId[i]):
active.append(i)
return active
|
[
"def",
"getActiveAxes",
"(",
"self",
")",
":",
"active",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"self",
".",
"_axisId",
")",
")",
":",
"if",
"self",
".",
"_menu",
".",
"IsChecked",
"(",
"self",
".",
"_axisId",
"[",
"i",
"]",
")",
":",
"active",
".",
"append",
"(",
"i",
")",
"return",
"active"
] | 34.285714 | 11.285714 |
def _prepare_arguments(self, argv):
"""Prepares arguments by:
- removing placeholder and moving arguments after it to beginning,
we need this to distinguish arguments from `command` with ours;
- adding `--` before `command`, so our parse would ignore arguments
of `command`.
"""
if ARGUMENT_PLACEHOLDER in argv:
index = argv.index(ARGUMENT_PLACEHOLDER)
return argv[index + 1:] + ['--'] + argv[:index]
elif argv and not argv[0].startswith('-') and argv[0] != '--':
return ['--'] + argv
else:
return argv
|
[
"def",
"_prepare_arguments",
"(",
"self",
",",
"argv",
")",
":",
"if",
"ARGUMENT_PLACEHOLDER",
"in",
"argv",
":",
"index",
"=",
"argv",
".",
"index",
"(",
"ARGUMENT_PLACEHOLDER",
")",
"return",
"argv",
"[",
"index",
"+",
"1",
":",
"]",
"+",
"[",
"'--'",
"]",
"+",
"argv",
"[",
":",
"index",
"]",
"elif",
"argv",
"and",
"not",
"argv",
"[",
"0",
"]",
".",
"startswith",
"(",
"'-'",
")",
"and",
"argv",
"[",
"0",
"]",
"!=",
"'--'",
":",
"return",
"[",
"'--'",
"]",
"+",
"argv",
"else",
":",
"return",
"argv"
] | 36.058824 | 21 |
def initialize(self, *args):
"""Initialize a recommender by resetting stored users and items.
"""
# number of observed users
self.n_user = 0
# store user data
self.users = {}
# number of observed items
self.n_item = 0
# store item data
self.items = {}
|
[
"def",
"initialize",
"(",
"self",
",",
"*",
"args",
")",
":",
"# number of observed users",
"self",
".",
"n_user",
"=",
"0",
"# store user data",
"self",
".",
"users",
"=",
"{",
"}",
"# number of observed items",
"self",
".",
"n_item",
"=",
"0",
"# store item data",
"self",
".",
"items",
"=",
"{",
"}"
] | 22.928571 | 17.285714 |
def chi_eff(mass1, mass2, spin1z, spin2z):
"""Returns the effective spin from mass1, mass2, spin1z, and spin2z."""
return (spin1z * mass1 + spin2z * mass2) / (mass1 + mass2)
|
[
"def",
"chi_eff",
"(",
"mass1",
",",
"mass2",
",",
"spin1z",
",",
"spin2z",
")",
":",
"return",
"(",
"spin1z",
"*",
"mass1",
"+",
"spin2z",
"*",
"mass2",
")",
"/",
"(",
"mass1",
"+",
"mass2",
")"
] | 59.666667 | 8 |
def get_vnetwork_portgroups_input_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups")
config = get_vnetwork_portgroups
input = ET.SubElement(get_vnetwork_portgroups, "input")
name = ET.SubElement(input, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
[
"def",
"get_vnetwork_portgroups_input_name",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"config",
"=",
"ET",
".",
"Element",
"(",
"\"config\"",
")",
"get_vnetwork_portgroups",
"=",
"ET",
".",
"Element",
"(",
"\"get_vnetwork_portgroups\"",
")",
"config",
"=",
"get_vnetwork_portgroups",
"input",
"=",
"ET",
".",
"SubElement",
"(",
"get_vnetwork_portgroups",
",",
"\"input\"",
")",
"name",
"=",
"ET",
".",
"SubElement",
"(",
"input",
",",
"\"name\"",
")",
"name",
".",
"text",
"=",
"kwargs",
".",
"pop",
"(",
"'name'",
")",
"callback",
"=",
"kwargs",
".",
"pop",
"(",
"'callback'",
",",
"self",
".",
"_callback",
")",
"return",
"callback",
"(",
"config",
")"
] | 39.666667 | 11.916667 |
def buffer(self,
geometries,
inSR,
distances,
units,
outSR=None,
bufferSR=None,
unionResults=True,
geodesic=True
):
"""
The buffer operation is performed on a geometry service resource
The result of this operation is buffered polygons at the
specified distances for the input geometry array. Options are
available to union buffers and to use geodesic distance.
Inputs:
geometries - array of geometries (structured as JSON geometry
objects returned by the ArcGIS REST API).
inSR - spatial reference of the input geometries WKID.
outSR - spatial reference for the returned geometries.
bufferSR - WKID or a spatial reference JSON object in
which the geometries are buffered.
distances - distances that each of the input geometries is buffered
unit - units for calculating each buffer distance.
unionResults - if true, all geometries buffered at a given distance
are unioned into a single (possibly multipart) polygon,
and the unioned geometry is placed in the output array.
geodesic - set geodesic to true to buffer the using geodesic distance.
"""
url = self._url + "/buffer"
params = {
"f" : "json",
"inSR" : inSR,
"geodesic" : geodesic,
"unionResults" : unionResults
}
if isinstance(geometries, list) and len(geometries) > 0:
g = geometries[0]
if isinstance(g, Polygon):
params['geometries'] = {"geometryType": "esriGeometryPolygon",
"geometries" : self.__geomToStringArray(geometries, "list")}
elif isinstance(g, Point):
params['geometries'] = {"geometryType": "esriGeometryPoint",
"geometries" : self.__geomToStringArray(geometries, "list")}
elif isinstance(g, Polyline):
params['geometries'] = {"geometryType": "esriGeometryPolyline",
"geometries" : self.__geomToStringArray(geometries, "list")}
else:
return None
if isinstance(distances, list):
distances = [str(d) for d in distances]
params['distances'] = ",".join(distances)
else:
params['distances'] = str(distances)
params['units'] = units
if bufferSR is not None:
params['bufferSR'] = bufferSR
if outSR is not None:
params['outSR'] = outSR
return self._get(url, param_dict=params,
proxy_port=self._proxy_port,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url)
|
[
"def",
"buffer",
"(",
"self",
",",
"geometries",
",",
"inSR",
",",
"distances",
",",
"units",
",",
"outSR",
"=",
"None",
",",
"bufferSR",
"=",
"None",
",",
"unionResults",
"=",
"True",
",",
"geodesic",
"=",
"True",
")",
":",
"url",
"=",
"self",
".",
"_url",
"+",
"\"/buffer\"",
"params",
"=",
"{",
"\"f\"",
":",
"\"json\"",
",",
"\"inSR\"",
":",
"inSR",
",",
"\"geodesic\"",
":",
"geodesic",
",",
"\"unionResults\"",
":",
"unionResults",
"}",
"if",
"isinstance",
"(",
"geometries",
",",
"list",
")",
"and",
"len",
"(",
"geometries",
")",
">",
"0",
":",
"g",
"=",
"geometries",
"[",
"0",
"]",
"if",
"isinstance",
"(",
"g",
",",
"Polygon",
")",
":",
"params",
"[",
"'geometries'",
"]",
"=",
"{",
"\"geometryType\"",
":",
"\"esriGeometryPolygon\"",
",",
"\"geometries\"",
":",
"self",
".",
"__geomToStringArray",
"(",
"geometries",
",",
"\"list\"",
")",
"}",
"elif",
"isinstance",
"(",
"g",
",",
"Point",
")",
":",
"params",
"[",
"'geometries'",
"]",
"=",
"{",
"\"geometryType\"",
":",
"\"esriGeometryPoint\"",
",",
"\"geometries\"",
":",
"self",
".",
"__geomToStringArray",
"(",
"geometries",
",",
"\"list\"",
")",
"}",
"elif",
"isinstance",
"(",
"g",
",",
"Polyline",
")",
":",
"params",
"[",
"'geometries'",
"]",
"=",
"{",
"\"geometryType\"",
":",
"\"esriGeometryPolyline\"",
",",
"\"geometries\"",
":",
"self",
".",
"__geomToStringArray",
"(",
"geometries",
",",
"\"list\"",
")",
"}",
"else",
":",
"return",
"None",
"if",
"isinstance",
"(",
"distances",
",",
"list",
")",
":",
"distances",
"=",
"[",
"str",
"(",
"d",
")",
"for",
"d",
"in",
"distances",
"]",
"params",
"[",
"'distances'",
"]",
"=",
"\",\"",
".",
"join",
"(",
"distances",
")",
"else",
":",
"params",
"[",
"'distances'",
"]",
"=",
"str",
"(",
"distances",
")",
"params",
"[",
"'units'",
"]",
"=",
"units",
"if",
"bufferSR",
"is",
"not",
"None",
":",
"params",
"[",
"'bufferSR'",
"]",
"=",
"bufferSR",
"if",
"outSR",
"is",
"not",
"None",
":",
"params",
"[",
"'outSR'",
"]",
"=",
"outSR",
"return",
"self",
".",
"_get",
"(",
"url",
",",
"param_dict",
"=",
"params",
",",
"proxy_port",
"=",
"self",
".",
"_proxy_port",
",",
"securityHandler",
"=",
"self",
".",
"_securityHandler",
",",
"proxy_url",
"=",
"self",
".",
"_proxy_url",
")"
] | 46.4 | 21.876923 |
def bucket_ops(bid, api=""):
"""Context manager for dealing with s3 errors in one place
bid: bucket_id in form of account_name:bucket_name
"""
try:
yield 42
except ClientError as e:
code = e.response['Error']['Code']
log.info(
"bucket error bucket:%s error:%s",
bid,
e.response['Error']['Code'])
if code == "NoSuchBucket":
pass
elif code == 'AccessDenied':
connection.sadd('buckets-denied', bid)
else:
connection.hset(
'buckets-unknown-errors',
bid,
"%s:%s" % (api, e.response['Error']['Code']))
except Exception as e:
connection.hset(
'buckets-unknown-errors',
bid,
"%s:%s" % (api, str(e)))
# Let the error queue catch it
raise
|
[
"def",
"bucket_ops",
"(",
"bid",
",",
"api",
"=",
"\"\"",
")",
":",
"try",
":",
"yield",
"42",
"except",
"ClientError",
"as",
"e",
":",
"code",
"=",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
"log",
".",
"info",
"(",
"\"bucket error bucket:%s error:%s\"",
",",
"bid",
",",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
")",
"if",
"code",
"==",
"\"NoSuchBucket\"",
":",
"pass",
"elif",
"code",
"==",
"'AccessDenied'",
":",
"connection",
".",
"sadd",
"(",
"'buckets-denied'",
",",
"bid",
")",
"else",
":",
"connection",
".",
"hset",
"(",
"'buckets-unknown-errors'",
",",
"bid",
",",
"\"%s:%s\"",
"%",
"(",
"api",
",",
"e",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Code'",
"]",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"connection",
".",
"hset",
"(",
"'buckets-unknown-errors'",
",",
"bid",
",",
"\"%s:%s\"",
"%",
"(",
"api",
",",
"str",
"(",
"e",
")",
")",
")",
"# Let the error queue catch it",
"raise"
] | 29.413793 | 13.931034 |
def bind(self, __fun, *args, **kwargs):
"""
Bind a worker function to the future. This worker function will be
executed when the future is executed.
"""
with self._lock:
if self._running or self._completed or self._cancelled:
raise RuntimeError('Future object can not be reused')
if self._worker:
raise RuntimeError('Future object is already bound')
self._worker = functools.partial(__fun, *args, **kwargs)
return self
|
[
"def",
"bind",
"(",
"self",
",",
"__fun",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"self",
".",
"_running",
"or",
"self",
".",
"_completed",
"or",
"self",
".",
"_cancelled",
":",
"raise",
"RuntimeError",
"(",
"'Future object can not be reused'",
")",
"if",
"self",
".",
"_worker",
":",
"raise",
"RuntimeError",
"(",
"'Future object is already bound'",
")",
"self",
".",
"_worker",
"=",
"functools",
".",
"partial",
"(",
"__fun",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"self"
] | 35.769231 | 16.846154 |
def install_package(self, team, user, package, contents):
"""
Creates a new package in the default package store
and allocates a per-user directory if needed.
"""
self.check_name(team, user, package)
assert contents is not None
self.create_dirs()
path = self.package_path(team, user, package)
# Delete any existing data.
try:
os.remove(path)
except OSError:
pass
|
[
"def",
"install_package",
"(",
"self",
",",
"team",
",",
"user",
",",
"package",
",",
"contents",
")",
":",
"self",
".",
"check_name",
"(",
"team",
",",
"user",
",",
"package",
")",
"assert",
"contents",
"is",
"not",
"None",
"self",
".",
"create_dirs",
"(",
")",
"path",
"=",
"self",
".",
"package_path",
"(",
"team",
",",
"user",
",",
"package",
")",
"# Delete any existing data.",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"OSError",
":",
"pass"
] | 28.8125 | 15.6875 |
def parallel_execute(self, cell, block=None, groupby='type', save_name=None):
"""implementation used by %px and %%parallel"""
# defaults:
block = self.view.block if block is None else block
base = "Parallel" if block else "Async parallel"
targets = self.view.targets
if isinstance(targets, list) and len(targets) > 10:
str_targets = str(targets[:4])[:-1] + ', ..., ' + str(targets[-4:])[1:]
else:
str_targets = str(targets)
if self.verbose:
print base + " execution on engine(s): %s" % str_targets
result = self.view.execute(cell, silent=False, block=False)
self.last_result = result
if save_name:
self.shell.user_ns[save_name] = result
if block:
result.get()
result.display_outputs(groupby)
else:
# return AsyncResult only on non-blocking submission
return result
|
[
"def",
"parallel_execute",
"(",
"self",
",",
"cell",
",",
"block",
"=",
"None",
",",
"groupby",
"=",
"'type'",
",",
"save_name",
"=",
"None",
")",
":",
"# defaults:",
"block",
"=",
"self",
".",
"view",
".",
"block",
"if",
"block",
"is",
"None",
"else",
"block",
"base",
"=",
"\"Parallel\"",
"if",
"block",
"else",
"\"Async parallel\"",
"targets",
"=",
"self",
".",
"view",
".",
"targets",
"if",
"isinstance",
"(",
"targets",
",",
"list",
")",
"and",
"len",
"(",
"targets",
")",
">",
"10",
":",
"str_targets",
"=",
"str",
"(",
"targets",
"[",
":",
"4",
"]",
")",
"[",
":",
"-",
"1",
"]",
"+",
"', ..., '",
"+",
"str",
"(",
"targets",
"[",
"-",
"4",
":",
"]",
")",
"[",
"1",
":",
"]",
"else",
":",
"str_targets",
"=",
"str",
"(",
"targets",
")",
"if",
"self",
".",
"verbose",
":",
"print",
"base",
"+",
"\" execution on engine(s): %s\"",
"%",
"str_targets",
"result",
"=",
"self",
".",
"view",
".",
"execute",
"(",
"cell",
",",
"silent",
"=",
"False",
",",
"block",
"=",
"False",
")",
"self",
".",
"last_result",
"=",
"result",
"if",
"save_name",
":",
"self",
".",
"shell",
".",
"user_ns",
"[",
"save_name",
"]",
"=",
"result",
"if",
"block",
":",
"result",
".",
"get",
"(",
")",
"result",
".",
"display_outputs",
"(",
"groupby",
")",
"else",
":",
"# return AsyncResult only on non-blocking submission",
"return",
"result"
] | 35.107143 | 21.571429 |
def _infer_mutants_handler(self, request):
"""Returns JSON for the `vz-line-chart`s for a feature.
Args:
request: A request that should contain 'feature_name', 'example_index',
'inference_address', 'model_name', 'model_type', 'model_version', and
'model_signature'.
Returns:
A list of JSON objects, one for each chart.
"""
try:
if request.method != 'GET':
logger.error('%s requests are forbidden.', request.method)
return http_util.Respond(request, {'error': 'invalid non-GET request'},
'application/json', code=405)
example_index = int(request.args.get('example_index', '0'))
feature_name = request.args.get('feature_name')
examples = (self.examples if example_index == -1
else [self.examples[example_index]])
(inference_addresses, model_names, model_versions,
model_signatures) = self._parse_request_arguments(request)
serving_bundles = []
for model_num in xrange(len(inference_addresses)):
serving_bundles.append(inference_utils.ServingBundle(
inference_addresses[model_num],
model_names[model_num],
request.args.get('model_type'),
model_versions[model_num],
model_signatures[model_num],
request.args.get('use_predict') == 'true',
request.args.get('predict_input_tensor'),
request.args.get('predict_output_tensor')))
viz_params = inference_utils.VizParams(
request.args.get('x_min'), request.args.get('x_max'),
self.examples[0:NUM_EXAMPLES_TO_SCAN], NUM_MUTANTS,
request.args.get('feature_index_pattern'))
json_mapping = inference_utils.mutant_charts_for_feature(
examples, feature_name, serving_bundles, viz_params)
return http_util.Respond(request, json_mapping, 'application/json')
except common_utils.InvalidUserInputError as e:
return http_util.Respond(request, {'error': e.message},
'application/json', code=400)
|
[
"def",
"_infer_mutants_handler",
"(",
"self",
",",
"request",
")",
":",
"try",
":",
"if",
"request",
".",
"method",
"!=",
"'GET'",
":",
"logger",
".",
"error",
"(",
"'%s requests are forbidden.'",
",",
"request",
".",
"method",
")",
"return",
"http_util",
".",
"Respond",
"(",
"request",
",",
"{",
"'error'",
":",
"'invalid non-GET request'",
"}",
",",
"'application/json'",
",",
"code",
"=",
"405",
")",
"example_index",
"=",
"int",
"(",
"request",
".",
"args",
".",
"get",
"(",
"'example_index'",
",",
"'0'",
")",
")",
"feature_name",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'feature_name'",
")",
"examples",
"=",
"(",
"self",
".",
"examples",
"if",
"example_index",
"==",
"-",
"1",
"else",
"[",
"self",
".",
"examples",
"[",
"example_index",
"]",
"]",
")",
"(",
"inference_addresses",
",",
"model_names",
",",
"model_versions",
",",
"model_signatures",
")",
"=",
"self",
".",
"_parse_request_arguments",
"(",
"request",
")",
"serving_bundles",
"=",
"[",
"]",
"for",
"model_num",
"in",
"xrange",
"(",
"len",
"(",
"inference_addresses",
")",
")",
":",
"serving_bundles",
".",
"append",
"(",
"inference_utils",
".",
"ServingBundle",
"(",
"inference_addresses",
"[",
"model_num",
"]",
",",
"model_names",
"[",
"model_num",
"]",
",",
"request",
".",
"args",
".",
"get",
"(",
"'model_type'",
")",
",",
"model_versions",
"[",
"model_num",
"]",
",",
"model_signatures",
"[",
"model_num",
"]",
",",
"request",
".",
"args",
".",
"get",
"(",
"'use_predict'",
")",
"==",
"'true'",
",",
"request",
".",
"args",
".",
"get",
"(",
"'predict_input_tensor'",
")",
",",
"request",
".",
"args",
".",
"get",
"(",
"'predict_output_tensor'",
")",
")",
")",
"viz_params",
"=",
"inference_utils",
".",
"VizParams",
"(",
"request",
".",
"args",
".",
"get",
"(",
"'x_min'",
")",
",",
"request",
".",
"args",
".",
"get",
"(",
"'x_max'",
")",
",",
"self",
".",
"examples",
"[",
"0",
":",
"NUM_EXAMPLES_TO_SCAN",
"]",
",",
"NUM_MUTANTS",
",",
"request",
".",
"args",
".",
"get",
"(",
"'feature_index_pattern'",
")",
")",
"json_mapping",
"=",
"inference_utils",
".",
"mutant_charts_for_feature",
"(",
"examples",
",",
"feature_name",
",",
"serving_bundles",
",",
"viz_params",
")",
"return",
"http_util",
".",
"Respond",
"(",
"request",
",",
"json_mapping",
",",
"'application/json'",
")",
"except",
"common_utils",
".",
"InvalidUserInputError",
"as",
"e",
":",
"return",
"http_util",
".",
"Respond",
"(",
"request",
",",
"{",
"'error'",
":",
"e",
".",
"message",
"}",
",",
"'application/json'",
",",
"code",
"=",
"400",
")"
] | 43.510638 | 19.638298 |
def _cutadapt_se_cmd(fastq_files, out_files, base_cmd, data):
"""
this has to use the -o option, not redirect to stdout in order for
gzipping to be supported
"""
min_length = dd.get_min_read_length(data)
cmd = base_cmd + " --minimum-length={min_length} ".format(**locals())
fq1 = objectstore.cl_input(fastq_files[0])
of1 = out_files[0]
cmd += " -o {of1_tx} " + str(fq1)
cmd = "%s | tee > {log_tx}" % cmd
return cmd
|
[
"def",
"_cutadapt_se_cmd",
"(",
"fastq_files",
",",
"out_files",
",",
"base_cmd",
",",
"data",
")",
":",
"min_length",
"=",
"dd",
".",
"get_min_read_length",
"(",
"data",
")",
"cmd",
"=",
"base_cmd",
"+",
"\" --minimum-length={min_length} \"",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
"fq1",
"=",
"objectstore",
".",
"cl_input",
"(",
"fastq_files",
"[",
"0",
"]",
")",
"of1",
"=",
"out_files",
"[",
"0",
"]",
"cmd",
"+=",
"\" -o {of1_tx} \"",
"+",
"str",
"(",
"fq1",
")",
"cmd",
"=",
"\"%s | tee > {log_tx}\"",
"%",
"cmd",
"return",
"cmd"
] | 37.25 | 13.083333 |
def set(self, section, key, value):
"""
Creates the section value if it does not exists and sets the value.
Use write_config to actually set the value.
"""
if not section in self.config:
self.config.add_section(section)
self.config.set(section, key, value)
|
[
"def",
"set",
"(",
"self",
",",
"section",
",",
"key",
",",
"value",
")",
":",
"if",
"not",
"section",
"in",
"self",
".",
"config",
":",
"self",
".",
"config",
".",
"add_section",
"(",
"section",
")",
"self",
".",
"config",
".",
"set",
"(",
"section",
",",
"key",
",",
"value",
")"
] | 39.625 | 8.625 |
def fetch(self):
"""
Fetch & return a new `Action` object representing the action's current
state
:rtype: Action
:raises DOAPIError: if the API endpoint replies with an error
"""
api = self.doapi_manager
return api._action(api.request(self.url)["action"])
|
[
"def",
"fetch",
"(",
"self",
")",
":",
"api",
"=",
"self",
".",
"doapi_manager",
"return",
"api",
".",
"_action",
"(",
"api",
".",
"request",
"(",
"self",
".",
"url",
")",
"[",
"\"action\"",
"]",
")"
] | 31.1 | 20.3 |
def _parse_layer_info(list_of_layers, resources):
"""
Creates a list of Layer objects that are represented by the resources and the list of layers
Parameters
----------
list_of_layers List(str)
List of layers that are defined within the Layers Property on a function
resources dict
The Resources dictionary defined in a template
Returns
-------
List(samcli.commands.local.lib.provider.Layer)
List of the Layer objects created from the template and layer list defined on the function. The order
of the layers does not change.
I.E: list_of_layers = ["layer1", "layer2"] the return would be [Layer("layer1"), Layer("layer2")]
"""
layers = []
for layer in list_of_layers:
# If the layer is a string, assume it is the arn
if isinstance(layer, six.string_types):
layers.append(LayerVersion(layer, None))
continue
# In the list of layers that is defined within a template, you can reference a LayerVersion resource.
# When running locally, we need to follow that Ref so we can extract the local path to the layer code.
if isinstance(layer, dict) and layer.get("Ref"):
layer_logical_id = layer.get("Ref")
layer_resource = resources.get(layer_logical_id)
if not layer_resource or \
layer_resource.get("Type", "") not in (SamFunctionProvider._SERVERLESS_LAYER,
SamFunctionProvider._LAMBDA_LAYER):
raise InvalidLayerReference()
layer_properties = layer_resource.get("Properties", {})
resource_type = layer_resource.get("Type")
codeuri = None
if resource_type == SamFunctionProvider._LAMBDA_LAYER:
codeuri = SamFunctionProvider._extract_lambda_function_code(layer_properties, "Content")
if resource_type == SamFunctionProvider._SERVERLESS_LAYER:
codeuri = SamFunctionProvider._extract_sam_function_codeuri(layer_logical_id,
layer_properties,
"ContentUri")
layers.append(LayerVersion(layer_logical_id, codeuri))
return layers
|
[
"def",
"_parse_layer_info",
"(",
"list_of_layers",
",",
"resources",
")",
":",
"layers",
"=",
"[",
"]",
"for",
"layer",
"in",
"list_of_layers",
":",
"# If the layer is a string, assume it is the arn",
"if",
"isinstance",
"(",
"layer",
",",
"six",
".",
"string_types",
")",
":",
"layers",
".",
"append",
"(",
"LayerVersion",
"(",
"layer",
",",
"None",
")",
")",
"continue",
"# In the list of layers that is defined within a template, you can reference a LayerVersion resource.",
"# When running locally, we need to follow that Ref so we can extract the local path to the layer code.",
"if",
"isinstance",
"(",
"layer",
",",
"dict",
")",
"and",
"layer",
".",
"get",
"(",
"\"Ref\"",
")",
":",
"layer_logical_id",
"=",
"layer",
".",
"get",
"(",
"\"Ref\"",
")",
"layer_resource",
"=",
"resources",
".",
"get",
"(",
"layer_logical_id",
")",
"if",
"not",
"layer_resource",
"or",
"layer_resource",
".",
"get",
"(",
"\"Type\"",
",",
"\"\"",
")",
"not",
"in",
"(",
"SamFunctionProvider",
".",
"_SERVERLESS_LAYER",
",",
"SamFunctionProvider",
".",
"_LAMBDA_LAYER",
")",
":",
"raise",
"InvalidLayerReference",
"(",
")",
"layer_properties",
"=",
"layer_resource",
".",
"get",
"(",
"\"Properties\"",
",",
"{",
"}",
")",
"resource_type",
"=",
"layer_resource",
".",
"get",
"(",
"\"Type\"",
")",
"codeuri",
"=",
"None",
"if",
"resource_type",
"==",
"SamFunctionProvider",
".",
"_LAMBDA_LAYER",
":",
"codeuri",
"=",
"SamFunctionProvider",
".",
"_extract_lambda_function_code",
"(",
"layer_properties",
",",
"\"Content\"",
")",
"if",
"resource_type",
"==",
"SamFunctionProvider",
".",
"_SERVERLESS_LAYER",
":",
"codeuri",
"=",
"SamFunctionProvider",
".",
"_extract_sam_function_codeuri",
"(",
"layer_logical_id",
",",
"layer_properties",
",",
"\"ContentUri\"",
")",
"layers",
".",
"append",
"(",
"LayerVersion",
"(",
"layer_logical_id",
",",
"codeuri",
")",
")",
"return",
"layers"
] | 48.588235 | 31.294118 |
def rparents(self, level=-1, intermediate=True):
"""Create a recursive list of children.
Note that the :param:`intermediate` can be used to include every
parents to the returned list, not only the most nested ones.
Parameters:
level (int): The depth level to continue fetching parents from
(default is -1, to get parents to the utter depths)
intermediate (bool): Also include the intermediate parents
(default is True)
Returns:
:obj:`pronto.TermList`:
The recursive children of the Term following the parameters
"""
try:
return self._rparents[(level, intermediate)]
except KeyError:
rparents = []
if self.parents and level:
if intermediate or level==1:
rparents.extend(self.parents)
for parent in self.parents:
rparents.extend(parent.rparents(level=level-1,
intermediate=intermediate))
rparents = TermList(unique_everseen(rparents))
self._rparents[(level, intermediate)] = rparents
return rparents
|
[
"def",
"rparents",
"(",
"self",
",",
"level",
"=",
"-",
"1",
",",
"intermediate",
"=",
"True",
")",
":",
"try",
":",
"return",
"self",
".",
"_rparents",
"[",
"(",
"level",
",",
"intermediate",
")",
"]",
"except",
"KeyError",
":",
"rparents",
"=",
"[",
"]",
"if",
"self",
".",
"parents",
"and",
"level",
":",
"if",
"intermediate",
"or",
"level",
"==",
"1",
":",
"rparents",
".",
"extend",
"(",
"self",
".",
"parents",
")",
"for",
"parent",
"in",
"self",
".",
"parents",
":",
"rparents",
".",
"extend",
"(",
"parent",
".",
"rparents",
"(",
"level",
"=",
"level",
"-",
"1",
",",
"intermediate",
"=",
"intermediate",
")",
")",
"rparents",
"=",
"TermList",
"(",
"unique_everseen",
"(",
"rparents",
")",
")",
"self",
".",
"_rparents",
"[",
"(",
"level",
",",
"intermediate",
")",
"]",
"=",
"rparents",
"return",
"rparents"
] | 33.694444 | 23.805556 |
def singleton_init_by(init_fn=None):
"""
>>> from Redy.Magic.Classic import singleton
>>> @singleton
>>> class S:
>>> pass
>>> assert isinstance(S, S)
"""
if not init_fn:
def wrap_init(origin_init):
return origin_init
else:
def wrap_init(origin_init):
def __init__(self):
origin_init(self)
init_fn(self)
return __init__
def inner(cls_def: type):
if not hasattr(cls_def, '__instancecheck__') or isinstance(cls_def.__instancecheck__,
(types.BuiltinMethodType, _slot_wrapper)):
def __instancecheck__(self, instance):
return instance is self
cls_def.__instancecheck__ = __instancecheck__
_origin_init = cls_def.__init__
cls_def.__init__ = wrap_init(_origin_init)
return cls_def()
return inner
|
[
"def",
"singleton_init_by",
"(",
"init_fn",
"=",
"None",
")",
":",
"if",
"not",
"init_fn",
":",
"def",
"wrap_init",
"(",
"origin_init",
")",
":",
"return",
"origin_init",
"else",
":",
"def",
"wrap_init",
"(",
"origin_init",
")",
":",
"def",
"__init__",
"(",
"self",
")",
":",
"origin_init",
"(",
"self",
")",
"init_fn",
"(",
"self",
")",
"return",
"__init__",
"def",
"inner",
"(",
"cls_def",
":",
"type",
")",
":",
"if",
"not",
"hasattr",
"(",
"cls_def",
",",
"'__instancecheck__'",
")",
"or",
"isinstance",
"(",
"cls_def",
".",
"__instancecheck__",
",",
"(",
"types",
".",
"BuiltinMethodType",
",",
"_slot_wrapper",
")",
")",
":",
"def",
"__instancecheck__",
"(",
"self",
",",
"instance",
")",
":",
"return",
"instance",
"is",
"self",
"cls_def",
".",
"__instancecheck__",
"=",
"__instancecheck__",
"_origin_init",
"=",
"cls_def",
".",
"__init__",
"cls_def",
".",
"__init__",
"=",
"wrap_init",
"(",
"_origin_init",
")",
"return",
"cls_def",
"(",
")",
"return",
"inner"
] | 27.441176 | 20.441176 |
def deserialize_dataframe(reader, data_type_id):
"""
Deserialize a dataframe.
Parameters
----------
reader : file
File-like object to read from. Must be opened in binary mode.
data_type_id : dict
Serialization format of the raw data.
See the azureml.DataTypeIds class for constants.
Returns
-------
pandas.DataFrame
Dataframe object.
"""
_not_none('reader', reader)
_not_none_or_empty('data_type_id', data_type_id)
serializer = _SERIALIZERS.get(data_type_id)
if serializer is None:
raise UnsupportedDatasetTypeError(data_type_id)
return serializer[1](reader=reader)
|
[
"def",
"deserialize_dataframe",
"(",
"reader",
",",
"data_type_id",
")",
":",
"_not_none",
"(",
"'reader'",
",",
"reader",
")",
"_not_none_or_empty",
"(",
"'data_type_id'",
",",
"data_type_id",
")",
"serializer",
"=",
"_SERIALIZERS",
".",
"get",
"(",
"data_type_id",
")",
"if",
"serializer",
"is",
"None",
":",
"raise",
"UnsupportedDatasetTypeError",
"(",
"data_type_id",
")",
"return",
"serializer",
"[",
"1",
"]",
"(",
"reader",
"=",
"reader",
")"
] | 26.875 | 18.041667 |
def close(self):
"""
Close the connection.
:param purge: If True (the default), the receive buffer will
be purged.
"""
# Close the underlying socket
if self._sock:
with utils.ignore_except():
self._sock.close()
self._sock = None
# Purge the message buffers
self._recvbuf = []
self._recvbuf_partial = ''
|
[
"def",
"close",
"(",
"self",
")",
":",
"# Close the underlying socket",
"if",
"self",
".",
"_sock",
":",
"with",
"utils",
".",
"ignore_except",
"(",
")",
":",
"self",
".",
"_sock",
".",
"close",
"(",
")",
"self",
".",
"_sock",
"=",
"None",
"# Purge the message buffers",
"self",
".",
"_recvbuf",
"=",
"[",
"]",
"self",
".",
"_recvbuf_partial",
"=",
"''"
] | 24.882353 | 15 |
def delay_embedding(data, emb_dim, lag=1):
"""
Perform a time-delay embedding of a time series
Args:
data (array-like):
the data that should be embedded
emb_dim (int):
the embedding dimension
Kwargs:
lag (int):
the lag between elements in the embedded vectors
Returns:
emb_dim x m array:
matrix of embedded vectors of the form
[data[i], data[i+lag], data[i+2*lag], ... data[i+(emb_dim-1)*lag]]
for i in 0 to m-1 (m = len(data)-(emb_dim-1)*lag)
"""
data = np.asarray(data)
min_len = (emb_dim - 1) * lag + 1
if len(data) < min_len:
msg = "cannot embed data of length {} with embedding dimension {} " \
+ "and lag {}, minimum required length is {}"
raise ValueError(msg.format(len(data), emb_dim, lag, min_len))
m = len(data) - min_len + 1
indices = np.repeat([np.arange(emb_dim) * lag], m, axis=0)
indices += np.arange(m).reshape((m, 1))
return data[indices]
|
[
"def",
"delay_embedding",
"(",
"data",
",",
"emb_dim",
",",
"lag",
"=",
"1",
")",
":",
"data",
"=",
"np",
".",
"asarray",
"(",
"data",
")",
"min_len",
"=",
"(",
"emb_dim",
"-",
"1",
")",
"*",
"lag",
"+",
"1",
"if",
"len",
"(",
"data",
")",
"<",
"min_len",
":",
"msg",
"=",
"\"cannot embed data of length {} with embedding dimension {} \"",
"+",
"\"and lag {}, minimum required length is {}\"",
"raise",
"ValueError",
"(",
"msg",
".",
"format",
"(",
"len",
"(",
"data",
")",
",",
"emb_dim",
",",
"lag",
",",
"min_len",
")",
")",
"m",
"=",
"len",
"(",
"data",
")",
"-",
"min_len",
"+",
"1",
"indices",
"=",
"np",
".",
"repeat",
"(",
"[",
"np",
".",
"arange",
"(",
"emb_dim",
")",
"*",
"lag",
"]",
",",
"m",
",",
"axis",
"=",
"0",
")",
"indices",
"+=",
"np",
".",
"arange",
"(",
"m",
")",
".",
"reshape",
"(",
"(",
"m",
",",
"1",
")",
")",
"return",
"data",
"[",
"indices",
"]"
] | 31.862069 | 17.37931 |
def get_pres_features(self, features=None):
"""
Returns a df of features for presented items
"""
if features is None:
features = self.dist_funcs.keys()
elif not isinstance(features, list):
features = [features]
return self.pres.applymap(lambda x: {k:v for k,v in x.items() if k in features} if x is not None else None)
|
[
"def",
"get_pres_features",
"(",
"self",
",",
"features",
"=",
"None",
")",
":",
"if",
"features",
"is",
"None",
":",
"features",
"=",
"self",
".",
"dist_funcs",
".",
"keys",
"(",
")",
"elif",
"not",
"isinstance",
"(",
"features",
",",
"list",
")",
":",
"features",
"=",
"[",
"features",
"]",
"return",
"self",
".",
"pres",
".",
"applymap",
"(",
"lambda",
"x",
":",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"x",
".",
"items",
"(",
")",
"if",
"k",
"in",
"features",
"}",
"if",
"x",
"is",
"not",
"None",
"else",
"None",
")"
] | 42.444444 | 13.111111 |
def add_tags(self, tags):
"""
Add tags to the comments
"""
if not isinstance(tags, list):
tags = [tags]
self._bugsy.request('bug/comment/%s/tags' % self._comment['id'],
method='PUT', json={"add": tags})
|
[
"def",
"add_tags",
"(",
"self",
",",
"tags",
")",
":",
"if",
"not",
"isinstance",
"(",
"tags",
",",
"list",
")",
":",
"tags",
"=",
"[",
"tags",
"]",
"self",
".",
"_bugsy",
".",
"request",
"(",
"'bug/comment/%s/tags'",
"%",
"self",
".",
"_comment",
"[",
"'id'",
"]",
",",
"method",
"=",
"'PUT'",
",",
"json",
"=",
"{",
"\"add\"",
":",
"tags",
"}",
")"
] | 34.875 | 11.125 |
def p_declnamelist(self, p):
'declnamelist : declnamelist COMMA declname'
p[0] = p[1] + (p[3],)
p.set_lineno(0, p.lineno(1))
|
[
"def",
"p_declnamelist",
"(",
"self",
",",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"+",
"(",
"p",
"[",
"3",
"]",
",",
")",
"p",
".",
"set_lineno",
"(",
"0",
",",
"p",
".",
"lineno",
"(",
"1",
")",
")"
] | 36.25 | 9.75 |
def docker_client():
"""
Returns a docker-py client configured using environment variables
according to the same logic as the official Docker client.
"""
cert_path = os.environ.get('DOCKER_CERT_PATH', '')
if cert_path == '':
cert_path = os.path.join(os.environ.get('HOME', ''), '.docker')
base_url = os.environ.get('DOCKER_HOST')
tls_config = None
if os.environ.get('DOCKER_TLS_VERIFY', '') != '':
parts = base_url.split('://', 1)
base_url = '%s://%s' % ('https', parts[1])
client_cert = (os.path.join(cert_path, 'cert.pem'),
os.path.join(cert_path, 'key.pem'))
ca_cert = os.path.join(cert_path, 'ca.pem')
tls_config = tls.TLSConfig(
ssl_version=ssl.PROTOCOL_TLSv1,
verify=True,
assert_hostname=False,
client_cert=client_cert,
ca_cert=ca_cert,
)
timeout = int(os.environ.get('DOCKER_CLIENT_TIMEOUT', 60))
return Client(
base_url=base_url, tls=tls_config, version='1.15', timeout=timeout
)
|
[
"def",
"docker_client",
"(",
")",
":",
"cert_path",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'DOCKER_CERT_PATH'",
",",
"''",
")",
"if",
"cert_path",
"==",
"''",
":",
"cert_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'HOME'",
",",
"''",
")",
",",
"'.docker'",
")",
"base_url",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'DOCKER_HOST'",
")",
"tls_config",
"=",
"None",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"'DOCKER_TLS_VERIFY'",
",",
"''",
")",
"!=",
"''",
":",
"parts",
"=",
"base_url",
".",
"split",
"(",
"'://'",
",",
"1",
")",
"base_url",
"=",
"'%s://%s'",
"%",
"(",
"'https'",
",",
"parts",
"[",
"1",
"]",
")",
"client_cert",
"=",
"(",
"os",
".",
"path",
".",
"join",
"(",
"cert_path",
",",
"'cert.pem'",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"cert_path",
",",
"'key.pem'",
")",
")",
"ca_cert",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cert_path",
",",
"'ca.pem'",
")",
"tls_config",
"=",
"tls",
".",
"TLSConfig",
"(",
"ssl_version",
"=",
"ssl",
".",
"PROTOCOL_TLSv1",
",",
"verify",
"=",
"True",
",",
"assert_hostname",
"=",
"False",
",",
"client_cert",
"=",
"client_cert",
",",
"ca_cert",
"=",
"ca_cert",
",",
")",
"timeout",
"=",
"int",
"(",
"os",
".",
"environ",
".",
"get",
"(",
"'DOCKER_CLIENT_TIMEOUT'",
",",
"60",
")",
")",
"return",
"Client",
"(",
"base_url",
"=",
"base_url",
",",
"tls",
"=",
"tls_config",
",",
"version",
"=",
"'1.15'",
",",
"timeout",
"=",
"timeout",
")"
] | 33.03125 | 19.28125 |
def get_history(self, start, end):
"""返回历史成交
Arguments:
start {str} -- [description]
end {str]} -- [description]
"""
return self.history_table.set_index(
'datetime',
drop=False
).loc[slice(pd.Timestamp(start),
pd.Timestamp(end))]
|
[
"def",
"get_history",
"(",
"self",
",",
"start",
",",
"end",
")",
":",
"return",
"self",
".",
"history_table",
".",
"set_index",
"(",
"'datetime'",
",",
"drop",
"=",
"False",
")",
".",
"loc",
"[",
"slice",
"(",
"pd",
".",
"Timestamp",
"(",
"start",
")",
",",
"pd",
".",
"Timestamp",
"(",
"end",
")",
")",
"]"
] | 27.25 | 9.083333 |
def apply_meta(self, arr, funcs):
"""
Apply metadata to help formatting the output:
- conv_funcs: convert data before column alignments
- deco_funcs: decorate data after column alignments
"""
if arr is None:
logging.error("unable to convert data: emtpy section")
return None
tmp = np.copy(arr)
for col in arr.dtype.names:
for row in self._get_row_hdrs():
meta = self._get_meta(row, col)
for mk, mv in sorted(meta.iteritems()):
if mk in funcs.iterkeys():
tmp[col][self.irt[row]] = \
funcs[mk](tmp[col][self.irt[row]], mv)
return tmp
|
[
"def",
"apply_meta",
"(",
"self",
",",
"arr",
",",
"funcs",
")",
":",
"if",
"arr",
"is",
"None",
":",
"logging",
".",
"error",
"(",
"\"unable to convert data: emtpy section\"",
")",
"return",
"None",
"tmp",
"=",
"np",
".",
"copy",
"(",
"arr",
")",
"for",
"col",
"in",
"arr",
".",
"dtype",
".",
"names",
":",
"for",
"row",
"in",
"self",
".",
"_get_row_hdrs",
"(",
")",
":",
"meta",
"=",
"self",
".",
"_get_meta",
"(",
"row",
",",
"col",
")",
"for",
"mk",
",",
"mv",
"in",
"sorted",
"(",
"meta",
".",
"iteritems",
"(",
")",
")",
":",
"if",
"mk",
"in",
"funcs",
".",
"iterkeys",
"(",
")",
":",
"tmp",
"[",
"col",
"]",
"[",
"self",
".",
"irt",
"[",
"row",
"]",
"]",
"=",
"funcs",
"[",
"mk",
"]",
"(",
"tmp",
"[",
"col",
"]",
"[",
"self",
".",
"irt",
"[",
"row",
"]",
"]",
",",
"mv",
")",
"return",
"tmp"
] | 36.3 | 15.4 |
def render(self, **kwargs):
""" Plots the volume and the control points. """
# Calling parent function
super(VisVolume, self).render(**kwargs)
# Initialize variables
legend_proxy = []
legend_names = []
# Start plotting of the surface and the control points grid
fig = plt.figure(figsize=self.vconf.figure_size, dpi=self.vconf.figure_dpi)
ax = Axes3D(fig)
# Start plotting
for plot in self._plots:
pts = np.array(plot['ptsarr'], dtype=self.vconf.dtype)
# Plot control points
if plot['type'] == 'ctrlpts' and self.vconf.display_ctrlpts:
ax.scatter(pts[:, 0], pts[:, 1], pts[:, 2], color=plot['color'], marker='^', s=20, depthshade=True)
plot_proxy = mpl.lines.Line2D([0], [0], linestyle='none', color=plot['color'], marker='^')
legend_proxy.append(plot_proxy)
legend_names.append(plot['name'])
# Plot evaluated points
if plot['type'] == 'evalpts' and self.vconf.display_evalpts:
ax.scatter(pts[:, 0], pts[:, 1], pts[:, 2],
color=plot['color'], marker='o', s=10, depthshade=True, alpha=self.vconf.alpha)
plot_proxy = mpl.lines.Line2D([0], [0], linestyle='none', color=plot['color'], marker='o')
legend_proxy.append(plot_proxy)
legend_names.append(plot['name'])
# Plot bounding box
if plot['type'] == 'bbox' and self.vconf.display_bbox:
ax.plot(pts[:, 0], pts[:, 1], pts[:, 2], color=plot['color'], linestyle='--')
plot_proxy = mpl.lines.Line2D([0], [0], linestyle='--', color=plot['color'])
legend_proxy.append(plot_proxy)
legend_names.append(plot['name'])
# Plot extras
if plot['type'] == 'extras':
ax.plot(pts[:, 0], pts[:, 1], pts[:, 2],
color=plot['color'][0], linestyle='-', linewidth=plot['color'][1])
plot_proxy = mpl.lines.Line2D([0], [0], linestyle='-', color=plot['color'][0])
legend_proxy.append(plot_proxy)
legend_names.append(plot['name'])
# Add legend to 3D plot, @ref: https://stackoverflow.com/a/20505720
if self.vconf.display_legend:
ax.legend(legend_proxy, legend_names, numpoints=1)
# Remove axes
if not self.vconf.display_axes:
plt.axis('off')
# Set axes equal
if self.vconf.axes_equal:
self.vconf.set_axes_equal(ax)
# Axis labels
if self.vconf.display_labels:
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
# Process keyword arguments
fig_filename = kwargs.get('fig_save_as', None)
fig_display = kwargs.get('display_plot', True)
# Display the plot
if fig_display:
plt.show()
else:
fig_filename = self.vconf.figure_image_filename if fig_filename is None else fig_filename
# Save the figure
self.vconf.save_figure_as(fig, fig_filename)
# Return the figure object
return fig
|
[
"def",
"render",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"# Calling parent function",
"super",
"(",
"VisVolume",
",",
"self",
")",
".",
"render",
"(",
"*",
"*",
"kwargs",
")",
"# Initialize variables",
"legend_proxy",
"=",
"[",
"]",
"legend_names",
"=",
"[",
"]",
"# Start plotting of the surface and the control points grid",
"fig",
"=",
"plt",
".",
"figure",
"(",
"figsize",
"=",
"self",
".",
"vconf",
".",
"figure_size",
",",
"dpi",
"=",
"self",
".",
"vconf",
".",
"figure_dpi",
")",
"ax",
"=",
"Axes3D",
"(",
"fig",
")",
"# Start plotting",
"for",
"plot",
"in",
"self",
".",
"_plots",
":",
"pts",
"=",
"np",
".",
"array",
"(",
"plot",
"[",
"'ptsarr'",
"]",
",",
"dtype",
"=",
"self",
".",
"vconf",
".",
"dtype",
")",
"# Plot control points",
"if",
"plot",
"[",
"'type'",
"]",
"==",
"'ctrlpts'",
"and",
"self",
".",
"vconf",
".",
"display_ctrlpts",
":",
"ax",
".",
"scatter",
"(",
"pts",
"[",
":",
",",
"0",
"]",
",",
"pts",
"[",
":",
",",
"1",
"]",
",",
"pts",
"[",
":",
",",
"2",
"]",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
",",
"marker",
"=",
"'^'",
",",
"s",
"=",
"20",
",",
"depthshade",
"=",
"True",
")",
"plot_proxy",
"=",
"mpl",
".",
"lines",
".",
"Line2D",
"(",
"[",
"0",
"]",
",",
"[",
"0",
"]",
",",
"linestyle",
"=",
"'none'",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
",",
"marker",
"=",
"'^'",
")",
"legend_proxy",
".",
"append",
"(",
"plot_proxy",
")",
"legend_names",
".",
"append",
"(",
"plot",
"[",
"'name'",
"]",
")",
"# Plot evaluated points",
"if",
"plot",
"[",
"'type'",
"]",
"==",
"'evalpts'",
"and",
"self",
".",
"vconf",
".",
"display_evalpts",
":",
"ax",
".",
"scatter",
"(",
"pts",
"[",
":",
",",
"0",
"]",
",",
"pts",
"[",
":",
",",
"1",
"]",
",",
"pts",
"[",
":",
",",
"2",
"]",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
",",
"marker",
"=",
"'o'",
",",
"s",
"=",
"10",
",",
"depthshade",
"=",
"True",
",",
"alpha",
"=",
"self",
".",
"vconf",
".",
"alpha",
")",
"plot_proxy",
"=",
"mpl",
".",
"lines",
".",
"Line2D",
"(",
"[",
"0",
"]",
",",
"[",
"0",
"]",
",",
"linestyle",
"=",
"'none'",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
",",
"marker",
"=",
"'o'",
")",
"legend_proxy",
".",
"append",
"(",
"plot_proxy",
")",
"legend_names",
".",
"append",
"(",
"plot",
"[",
"'name'",
"]",
")",
"# Plot bounding box",
"if",
"plot",
"[",
"'type'",
"]",
"==",
"'bbox'",
"and",
"self",
".",
"vconf",
".",
"display_bbox",
":",
"ax",
".",
"plot",
"(",
"pts",
"[",
":",
",",
"0",
"]",
",",
"pts",
"[",
":",
",",
"1",
"]",
",",
"pts",
"[",
":",
",",
"2",
"]",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
",",
"linestyle",
"=",
"'--'",
")",
"plot_proxy",
"=",
"mpl",
".",
"lines",
".",
"Line2D",
"(",
"[",
"0",
"]",
",",
"[",
"0",
"]",
",",
"linestyle",
"=",
"'--'",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
")",
"legend_proxy",
".",
"append",
"(",
"plot_proxy",
")",
"legend_names",
".",
"append",
"(",
"plot",
"[",
"'name'",
"]",
")",
"# Plot extras",
"if",
"plot",
"[",
"'type'",
"]",
"==",
"'extras'",
":",
"ax",
".",
"plot",
"(",
"pts",
"[",
":",
",",
"0",
"]",
",",
"pts",
"[",
":",
",",
"1",
"]",
",",
"pts",
"[",
":",
",",
"2",
"]",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
"[",
"0",
"]",
",",
"linestyle",
"=",
"'-'",
",",
"linewidth",
"=",
"plot",
"[",
"'color'",
"]",
"[",
"1",
"]",
")",
"plot_proxy",
"=",
"mpl",
".",
"lines",
".",
"Line2D",
"(",
"[",
"0",
"]",
",",
"[",
"0",
"]",
",",
"linestyle",
"=",
"'-'",
",",
"color",
"=",
"plot",
"[",
"'color'",
"]",
"[",
"0",
"]",
")",
"legend_proxy",
".",
"append",
"(",
"plot_proxy",
")",
"legend_names",
".",
"append",
"(",
"plot",
"[",
"'name'",
"]",
")",
"# Add legend to 3D plot, @ref: https://stackoverflow.com/a/20505720",
"if",
"self",
".",
"vconf",
".",
"display_legend",
":",
"ax",
".",
"legend",
"(",
"legend_proxy",
",",
"legend_names",
",",
"numpoints",
"=",
"1",
")",
"# Remove axes",
"if",
"not",
"self",
".",
"vconf",
".",
"display_axes",
":",
"plt",
".",
"axis",
"(",
"'off'",
")",
"# Set axes equal",
"if",
"self",
".",
"vconf",
".",
"axes_equal",
":",
"self",
".",
"vconf",
".",
"set_axes_equal",
"(",
"ax",
")",
"# Axis labels",
"if",
"self",
".",
"vconf",
".",
"display_labels",
":",
"ax",
".",
"set_xlabel",
"(",
"'x'",
")",
"ax",
".",
"set_ylabel",
"(",
"'y'",
")",
"ax",
".",
"set_zlabel",
"(",
"'z'",
")",
"# Process keyword arguments",
"fig_filename",
"=",
"kwargs",
".",
"get",
"(",
"'fig_save_as'",
",",
"None",
")",
"fig_display",
"=",
"kwargs",
".",
"get",
"(",
"'display_plot'",
",",
"True",
")",
"# Display the plot",
"if",
"fig_display",
":",
"plt",
".",
"show",
"(",
")",
"else",
":",
"fig_filename",
"=",
"self",
".",
"vconf",
".",
"figure_image_filename",
"if",
"fig_filename",
"is",
"None",
"else",
"fig_filename",
"# Save the figure",
"self",
".",
"vconf",
".",
"save_figure_as",
"(",
"fig",
",",
"fig_filename",
")",
"# Return the figure object",
"return",
"fig"
] | 40.227848 | 23.594937 |
def salt_ssh_create_dirs(self):
"""
Creates the `salt-ssh` required directory structure
"""
logger.debug('Creating salt-ssh dirs into: %s', self.settings_dir)
utils.create_dir(os.path.join(self.settings_dir, 'salt'))
utils.create_dir(os.path.join(self.settings_dir, 'pillar'))
utils.create_dir(os.path.join(self.settings_dir, 'etc', 'salt'))
utils.create_dir(os.path.join(self.settings_dir, 'var', 'cache', 'salt'))
utils.create_dir(os.path.join(self.settings_dir, 'var', 'log', 'salt'))
|
[
"def",
"salt_ssh_create_dirs",
"(",
"self",
")",
":",
"logger",
".",
"debug",
"(",
"'Creating salt-ssh dirs into: %s'",
",",
"self",
".",
"settings_dir",
")",
"utils",
".",
"create_dir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"settings_dir",
",",
"'salt'",
")",
")",
"utils",
".",
"create_dir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"settings_dir",
",",
"'pillar'",
")",
")",
"utils",
".",
"create_dir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"settings_dir",
",",
"'etc'",
",",
"'salt'",
")",
")",
"utils",
".",
"create_dir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"settings_dir",
",",
"'var'",
",",
"'cache'",
",",
"'salt'",
")",
")",
"utils",
".",
"create_dir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"settings_dir",
",",
"'var'",
",",
"'log'",
",",
"'salt'",
")",
")"
] | 55 | 22.6 |
def add_nodes(self, nodes): # noqa: D302
r"""
Add nodes to tree.
:param nodes: Node(s) to add with associated data. If there are
several list items in the argument with the same node
name the resulting node data is a list with items
corresponding to the data of each entry in the argument
with the same node name, in their order of appearance,
in addition to any existing node data if the node is
already present in the tree
:type nodes: :ref:`NodesWithData`
:raises:
* RuntimeError (Argument \`nodes\` is not valid)
* ValueError (Illegal node name: *[node_name]*)
For example:
.. =[=cog
.. import docs.support.incfile
.. docs.support.incfile.incfile('ptrie_example.py', cog.out)
.. =]=
.. code-block:: python
# ptrie_example.py
import ptrie
def create_tree():
tobj = ptrie.Trie()
tobj.add_nodes([
{'name':'root.branch1', 'data':5},
{'name':'root.branch1', 'data':7},
{'name':'root.branch2', 'data':[]},
{'name':'root.branch1.leaf1', 'data':[]},
{'name':'root.branch1.leaf1.subleaf1', 'data':333},
{'name':'root.branch1.leaf2', 'data':'Hello world!'},
{'name':'root.branch1.leaf2.subleaf2', 'data':[]},
])
return tobj
.. =[=end=]=
.. code-block:: python
>>> from __future__ import print_function
>>> import docs.support.ptrie_example
>>> tobj = docs.support.ptrie_example.create_tree()
>>> print(tobj)
root
├branch1 (*)
│├leaf1
││└subleaf1 (*)
│└leaf2 (*)
│ └subleaf2
└branch2
>>> tobj.get_data('root.branch1')
[5, 7]
"""
self._validate_nodes_with_data(nodes)
nodes = nodes if isinstance(nodes, list) else [nodes]
# Create root node (if needed)
if not self.root_name:
self._set_root_name(nodes[0]["name"].split(self._node_separator)[0].strip())
self._root_hierarchy_length = len(
self.root_name.split(self._node_separator)
)
self._create_node(name=self.root_name, parent="", children=[], data=[])
# Process new data
for node_dict in nodes:
name, data = node_dict["name"], node_dict["data"]
if name not in self._db:
# Validate node name (root of new node same as tree root)
if not name.startswith(self.root_name + self._node_separator):
raise ValueError("Illegal node name: {0}".format(name))
self._create_intermediate_nodes(name)
self._db[name]["data"] += copy.deepcopy(
data
if isinstance(data, list) and data
else ([] if isinstance(data, list) else [data])
)
|
[
"def",
"add_nodes",
"(",
"self",
",",
"nodes",
")",
":",
"# noqa: D302",
"self",
".",
"_validate_nodes_with_data",
"(",
"nodes",
")",
"nodes",
"=",
"nodes",
"if",
"isinstance",
"(",
"nodes",
",",
"list",
")",
"else",
"[",
"nodes",
"]",
"# Create root node (if needed)",
"if",
"not",
"self",
".",
"root_name",
":",
"self",
".",
"_set_root_name",
"(",
"nodes",
"[",
"0",
"]",
"[",
"\"name\"",
"]",
".",
"split",
"(",
"self",
".",
"_node_separator",
")",
"[",
"0",
"]",
".",
"strip",
"(",
")",
")",
"self",
".",
"_root_hierarchy_length",
"=",
"len",
"(",
"self",
".",
"root_name",
".",
"split",
"(",
"self",
".",
"_node_separator",
")",
")",
"self",
".",
"_create_node",
"(",
"name",
"=",
"self",
".",
"root_name",
",",
"parent",
"=",
"\"\"",
",",
"children",
"=",
"[",
"]",
",",
"data",
"=",
"[",
"]",
")",
"# Process new data",
"for",
"node_dict",
"in",
"nodes",
":",
"name",
",",
"data",
"=",
"node_dict",
"[",
"\"name\"",
"]",
",",
"node_dict",
"[",
"\"data\"",
"]",
"if",
"name",
"not",
"in",
"self",
".",
"_db",
":",
"# Validate node name (root of new node same as tree root)",
"if",
"not",
"name",
".",
"startswith",
"(",
"self",
".",
"root_name",
"+",
"self",
".",
"_node_separator",
")",
":",
"raise",
"ValueError",
"(",
"\"Illegal node name: {0}\"",
".",
"format",
"(",
"name",
")",
")",
"self",
".",
"_create_intermediate_nodes",
"(",
"name",
")",
"self",
".",
"_db",
"[",
"name",
"]",
"[",
"\"data\"",
"]",
"+=",
"copy",
".",
"deepcopy",
"(",
"data",
"if",
"isinstance",
"(",
"data",
",",
"list",
")",
"and",
"data",
"else",
"(",
"[",
"]",
"if",
"isinstance",
"(",
"data",
",",
"list",
")",
"else",
"[",
"data",
"]",
")",
")"
] | 37.650602 | 21.204819 |
def read(self):
"""Read a Response, do some validation, and return it."""
if FLAGS.sc2_verbose_protocol:
self._log(" Reading response ".center(60, "-"))
start = time.time()
response = self._read()
if FLAGS.sc2_verbose_protocol:
self._log(" %0.1f msec\n" % (1000 * (time.time() - start)))
self._log_packet(response)
if not response.HasField("status"):
raise ProtocolError("Got an incomplete response without a status.")
prev_status = self._status
self._status = Status(response.status) # pytype: disable=not-callable
if response.error:
err_str = ("Error in RPC response (likely a bug). "
"Prev status: %s, new status: %s, error:\n%s" % (
prev_status, self._status, "\n".join(response.error)))
logging.error(err_str)
raise ProtocolError(err_str)
return response
|
[
"def",
"read",
"(",
"self",
")",
":",
"if",
"FLAGS",
".",
"sc2_verbose_protocol",
":",
"self",
".",
"_log",
"(",
"\" Reading response \"",
".",
"center",
"(",
"60",
",",
"\"-\"",
")",
")",
"start",
"=",
"time",
".",
"time",
"(",
")",
"response",
"=",
"self",
".",
"_read",
"(",
")",
"if",
"FLAGS",
".",
"sc2_verbose_protocol",
":",
"self",
".",
"_log",
"(",
"\" %0.1f msec\\n\"",
"%",
"(",
"1000",
"*",
"(",
"time",
".",
"time",
"(",
")",
"-",
"start",
")",
")",
")",
"self",
".",
"_log_packet",
"(",
"response",
")",
"if",
"not",
"response",
".",
"HasField",
"(",
"\"status\"",
")",
":",
"raise",
"ProtocolError",
"(",
"\"Got an incomplete response without a status.\"",
")",
"prev_status",
"=",
"self",
".",
"_status",
"self",
".",
"_status",
"=",
"Status",
"(",
"response",
".",
"status",
")",
"# pytype: disable=not-callable",
"if",
"response",
".",
"error",
":",
"err_str",
"=",
"(",
"\"Error in RPC response (likely a bug). \"",
"\"Prev status: %s, new status: %s, error:\\n%s\"",
"%",
"(",
"prev_status",
",",
"self",
".",
"_status",
",",
"\"\\n\"",
".",
"join",
"(",
"response",
".",
"error",
")",
")",
")",
"logging",
".",
"error",
"(",
"err_str",
")",
"raise",
"ProtocolError",
"(",
"err_str",
")",
"return",
"response"
] | 43.15 | 16.2 |
def install(ctx, services, delete_after_install=False):
"""Install a honeypot service from the online library, local path or zipfile."""
logger.debug("running command %s (%s)", ctx.command.name, ctx.params,
extra={"command": ctx.command.name, "params": ctx.params})
home = ctx.obj["HOME"]
services_path = os.path.join(home, SERVICES)
installed_all_plugins = True
for service in services:
try:
plugin_utils.install_plugin(service, SERVICE, services_path, register_service)
except exceptions.PluginAlreadyInstalled as exc:
click.echo(exc)
installed_all_plugins = False
if not installed_all_plugins:
raise ctx.exit(errno.EEXIST)
|
[
"def",
"install",
"(",
"ctx",
",",
"services",
",",
"delete_after_install",
"=",
"False",
")",
":",
"logger",
".",
"debug",
"(",
"\"running command %s (%s)\"",
",",
"ctx",
".",
"command",
".",
"name",
",",
"ctx",
".",
"params",
",",
"extra",
"=",
"{",
"\"command\"",
":",
"ctx",
".",
"command",
".",
"name",
",",
"\"params\"",
":",
"ctx",
".",
"params",
"}",
")",
"home",
"=",
"ctx",
".",
"obj",
"[",
"\"HOME\"",
"]",
"services_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"SERVICES",
")",
"installed_all_plugins",
"=",
"True",
"for",
"service",
"in",
"services",
":",
"try",
":",
"plugin_utils",
".",
"install_plugin",
"(",
"service",
",",
"SERVICE",
",",
"services_path",
",",
"register_service",
")",
"except",
"exceptions",
".",
"PluginAlreadyInstalled",
"as",
"exc",
":",
"click",
".",
"echo",
"(",
"exc",
")",
"installed_all_plugins",
"=",
"False",
"if",
"not",
"installed_all_plugins",
":",
"raise",
"ctx",
".",
"exit",
"(",
"errno",
".",
"EEXIST",
")"
] | 39.777778 | 20.222222 |
def get_encodings_from_content(content):
"""Returns encodings from given content string.
:param content: bytestring to extract encodings from.
"""
charset_re = re.compile(r'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
pragma_re = re.compile(r'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
return (charset_re.findall(content) +
pragma_re.findall(content) +
xml_re.findall(content))
|
[
"def",
"get_encodings_from_content",
"(",
"content",
")",
":",
"charset_re",
"=",
"re",
".",
"compile",
"(",
"r'<meta.*?charset=[\"\\']*(.+?)[\"\\'>]'",
",",
"flags",
"=",
"re",
".",
"I",
")",
"pragma_re",
"=",
"re",
".",
"compile",
"(",
"r'<meta.*?content=[\"\\']*;?charset=(.+?)[\"\\'>]'",
",",
"flags",
"=",
"re",
".",
"I",
")",
"xml_re",
"=",
"re",
".",
"compile",
"(",
"r'^<\\?xml.*?encoding=[\"\\']*(.+?)[\"\\'>]'",
")",
"return",
"(",
"charset_re",
".",
"findall",
"(",
"content",
")",
"+",
"pragma_re",
".",
"findall",
"(",
"content",
")",
"+",
"xml_re",
".",
"findall",
"(",
"content",
")",
")"
] | 38.384615 | 19.153846 |
def compute_key(cli, familly, discriminant=None):
"""This function is used to compute a unique key from all connection parametters."""
hash_key = hashlib.sha256()
hash_key.update(familly)
hash_key.update(cli.host)
hash_key.update(cli.user)
hash_key.update(cli.password)
if discriminant:
if isinstance(discriminant, list):
for i in discriminant:
if i is not None and i is not False:
hash_key.update(str(i))
elif isinstance(discriminant, tuple):
for i in discriminant:
if i is not None and i is not False:
hash_key.update(str(i))
else:
hash_key.update(discriminant)
hash_key = hash_key.hexdigest()
cli.log.debug("hash_key: " + hash_key)
return hash_key
|
[
"def",
"compute_key",
"(",
"cli",
",",
"familly",
",",
"discriminant",
"=",
"None",
")",
":",
"hash_key",
"=",
"hashlib",
".",
"sha256",
"(",
")",
"hash_key",
".",
"update",
"(",
"familly",
")",
"hash_key",
".",
"update",
"(",
"cli",
".",
"host",
")",
"hash_key",
".",
"update",
"(",
"cli",
".",
"user",
")",
"hash_key",
".",
"update",
"(",
"cli",
".",
"password",
")",
"if",
"discriminant",
":",
"if",
"isinstance",
"(",
"discriminant",
",",
"list",
")",
":",
"for",
"i",
"in",
"discriminant",
":",
"if",
"i",
"is",
"not",
"None",
"and",
"i",
"is",
"not",
"False",
":",
"hash_key",
".",
"update",
"(",
"str",
"(",
"i",
")",
")",
"elif",
"isinstance",
"(",
"discriminant",
",",
"tuple",
")",
":",
"for",
"i",
"in",
"discriminant",
":",
"if",
"i",
"is",
"not",
"None",
"and",
"i",
"is",
"not",
"False",
":",
"hash_key",
".",
"update",
"(",
"str",
"(",
"i",
")",
")",
"else",
":",
"hash_key",
".",
"update",
"(",
"discriminant",
")",
"hash_key",
"=",
"hash_key",
".",
"hexdigest",
"(",
")",
"cli",
".",
"log",
".",
"debug",
"(",
"\"hash_key: \"",
"+",
"hash_key",
")",
"return",
"hash_key"
] | 38.190476 | 8.761905 |
def swo_supported_speeds(self, cpu_speed, num_speeds=3):
"""Retrives a list of SWO speeds supported by both the target and the
connected J-Link.
The supported speeds are returned in order from highest to lowest.
Args:
self (JLink): the ``JLink`` instance
cpu_speed (int): the target's CPU speed in Hz
num_speeds (int): the number of compatible speeds to return
Returns:
A list of compatible SWO speeds in Hz in order from highest to lowest.
"""
buf_size = num_speeds
buf = (ctypes.c_uint32 * buf_size)()
res = self._dll.JLINKARM_SWO_GetCompatibleSpeeds(cpu_speed, 0, buf, buf_size)
if res < 0:
raise errors.JLinkException(res)
return list(buf)[:res]
|
[
"def",
"swo_supported_speeds",
"(",
"self",
",",
"cpu_speed",
",",
"num_speeds",
"=",
"3",
")",
":",
"buf_size",
"=",
"num_speeds",
"buf",
"=",
"(",
"ctypes",
".",
"c_uint32",
"*",
"buf_size",
")",
"(",
")",
"res",
"=",
"self",
".",
"_dll",
".",
"JLINKARM_SWO_GetCompatibleSpeeds",
"(",
"cpu_speed",
",",
"0",
",",
"buf",
",",
"buf_size",
")",
"if",
"res",
"<",
"0",
":",
"raise",
"errors",
".",
"JLinkException",
"(",
"res",
")",
"return",
"list",
"(",
"buf",
")",
"[",
":",
"res",
"]"
] | 36.809524 | 21.952381 |
def display_name(self):
"""Readable name for the variant."""
if self.is_snv:
gene_ids = self.gene_symbols[:2]
return ', '.join(gene_ids)
else:
return "{this.cytoband_start} ({this.sv_len})".format(this=self)
|
[
"def",
"display_name",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_snv",
":",
"gene_ids",
"=",
"self",
".",
"gene_symbols",
"[",
":",
"2",
"]",
"return",
"', '",
".",
"join",
"(",
"gene_ids",
")",
"else",
":",
"return",
"\"{this.cytoband_start} ({this.sv_len})\"",
".",
"format",
"(",
"this",
"=",
"self",
")"
] | 37.285714 | 14.714286 |
def add(self, constraint, check=False):
"""
Add a constraint to the set
:param constraint: The constraint to add to the set.
:param check: Currently unused.
:return:
"""
if isinstance(constraint, bool):
constraint = BoolConstant(constraint)
assert isinstance(constraint, Bool)
constraint = simplify(constraint)
# If self._child is not None this constraint set has been forked and a
# a derived constraintset may be using this. So we can't add any more
# constraints to this one. After the child constraintSet is deleted
# we regain the ability to add constraints.
if self._child is not None:
raise Exception('ConstraintSet is frozen')
if isinstance(constraint, BoolConstant):
if not constraint.value:
logger.info("Adding an impossible constant constraint")
self._constraints = [constraint]
else:
return
self._constraints.append(constraint)
if check:
from ...core.smtlib import solver
if not solver.check(self):
raise ValueError("Added an impossible constraint")
|
[
"def",
"add",
"(",
"self",
",",
"constraint",
",",
"check",
"=",
"False",
")",
":",
"if",
"isinstance",
"(",
"constraint",
",",
"bool",
")",
":",
"constraint",
"=",
"BoolConstant",
"(",
"constraint",
")",
"assert",
"isinstance",
"(",
"constraint",
",",
"Bool",
")",
"constraint",
"=",
"simplify",
"(",
"constraint",
")",
"# If self._child is not None this constraint set has been forked and a",
"# a derived constraintset may be using this. So we can't add any more",
"# constraints to this one. After the child constraintSet is deleted",
"# we regain the ability to add constraints.",
"if",
"self",
".",
"_child",
"is",
"not",
"None",
":",
"raise",
"Exception",
"(",
"'ConstraintSet is frozen'",
")",
"if",
"isinstance",
"(",
"constraint",
",",
"BoolConstant",
")",
":",
"if",
"not",
"constraint",
".",
"value",
":",
"logger",
".",
"info",
"(",
"\"Adding an impossible constant constraint\"",
")",
"self",
".",
"_constraints",
"=",
"[",
"constraint",
"]",
"else",
":",
"return",
"self",
".",
"_constraints",
".",
"append",
"(",
"constraint",
")",
"if",
"check",
":",
"from",
".",
".",
".",
"core",
".",
"smtlib",
"import",
"solver",
"if",
"not",
"solver",
".",
"check",
"(",
"self",
")",
":",
"raise",
"ValueError",
"(",
"\"Added an impossible constraint\"",
")"
] | 37.6875 | 16.125 |
def __headers(self):
"""
Get HTTP headers for a HTTP/HTTPS SOAP request.
@return: A dictionary of header/values.
@rtype: dict
"""
action = self.method.soap.action
if isinstance(action, unicode):
action = action.encode("utf-8")
result = {
"Content-Type": "text/xml; charset=utf-8",
"SOAPAction": action}
result.update(**self.options.headers)
log.debug("headers = %s", result)
return result
|
[
"def",
"__headers",
"(",
"self",
")",
":",
"action",
"=",
"self",
".",
"method",
".",
"soap",
".",
"action",
"if",
"isinstance",
"(",
"action",
",",
"unicode",
")",
":",
"action",
"=",
"action",
".",
"encode",
"(",
"\"utf-8\"",
")",
"result",
"=",
"{",
"\"Content-Type\"",
":",
"\"text/xml; charset=utf-8\"",
",",
"\"SOAPAction\"",
":",
"action",
"}",
"result",
".",
"update",
"(",
"*",
"*",
"self",
".",
"options",
".",
"headers",
")",
"log",
".",
"debug",
"(",
"\"headers = %s\"",
",",
"result",
")",
"return",
"result"
] | 29.294118 | 12.588235 |
def check_levels(imls, imt, min_iml=1E-10):
"""
Raise a ValueError if the given levels are invalid.
:param imls: a list of intensity measure and levels
:param imt: the intensity measure type
:param min_iml: minimum intensity measure level (default 1E-10)
>>> check_levels([0.1, 0.2], 'PGA') # ok
>>> check_levels([], 'PGA')
Traceback (most recent call last):
...
ValueError: No imls for PGA: []
>>> check_levels([0.2, 0.1], 'PGA')
Traceback (most recent call last):
...
ValueError: The imls for PGA are not sorted: [0.2, 0.1]
>>> check_levels([0.2, 0.2], 'PGA')
Traceback (most recent call last):
...
ValueError: Found duplicated levels for PGA: [0.2, 0.2]
"""
if len(imls) < 1:
raise ValueError('No imls for %s: %s' % (imt, imls))
elif imls != sorted(imls):
raise ValueError('The imls for %s are not sorted: %s' % (imt, imls))
elif len(distinct(imls)) < len(imls):
raise ValueError("Found duplicated levels for %s: %s" % (imt, imls))
elif imls[0] == 0 and imls[1] <= min_iml: # apply the cutoff
raise ValueError("The min_iml %s=%s is larger than the second level "
"for %s" % (imt, min_iml, imls))
elif imls[0] == 0 and imls[1] > min_iml: # apply the cutoff
imls[0] = min_iml
|
[
"def",
"check_levels",
"(",
"imls",
",",
"imt",
",",
"min_iml",
"=",
"1E-10",
")",
":",
"if",
"len",
"(",
"imls",
")",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'No imls for %s: %s'",
"%",
"(",
"imt",
",",
"imls",
")",
")",
"elif",
"imls",
"!=",
"sorted",
"(",
"imls",
")",
":",
"raise",
"ValueError",
"(",
"'The imls for %s are not sorted: %s'",
"%",
"(",
"imt",
",",
"imls",
")",
")",
"elif",
"len",
"(",
"distinct",
"(",
"imls",
")",
")",
"<",
"len",
"(",
"imls",
")",
":",
"raise",
"ValueError",
"(",
"\"Found duplicated levels for %s: %s\"",
"%",
"(",
"imt",
",",
"imls",
")",
")",
"elif",
"imls",
"[",
"0",
"]",
"==",
"0",
"and",
"imls",
"[",
"1",
"]",
"<=",
"min_iml",
":",
"# apply the cutoff",
"raise",
"ValueError",
"(",
"\"The min_iml %s=%s is larger than the second level \"",
"\"for %s\"",
"%",
"(",
"imt",
",",
"min_iml",
",",
"imls",
")",
")",
"elif",
"imls",
"[",
"0",
"]",
"==",
"0",
"and",
"imls",
"[",
"1",
"]",
">",
"min_iml",
":",
"# apply the cutoff",
"imls",
"[",
"0",
"]",
"=",
"min_iml"
] | 39.969697 | 16.272727 |
def _do_connect(self):
""" Connect to the remote. """
self.load_system_host_keys()
if self.username is None or self.port is None:
self._configure()
try:
self.connect(hostname=self.hostname,
port=self.port,
username=self.username,
key_filename=self.key_filename,
sock=self.proxy)
except socket.error as e:
raise GerritError("Failed to connect to server: %s" % e)
try:
version_string = self._transport.remote_version
pattern = re.compile(r'^.*GerritCodeReview_([a-z0-9-\.]*) .*$')
self.remote_version = _extract_version(version_string, pattern)
except AttributeError:
self.remote_version = None
|
[
"def",
"_do_connect",
"(",
"self",
")",
":",
"self",
".",
"load_system_host_keys",
"(",
")",
"if",
"self",
".",
"username",
"is",
"None",
"or",
"self",
".",
"port",
"is",
"None",
":",
"self",
".",
"_configure",
"(",
")",
"try",
":",
"self",
".",
"connect",
"(",
"hostname",
"=",
"self",
".",
"hostname",
",",
"port",
"=",
"self",
".",
"port",
",",
"username",
"=",
"self",
".",
"username",
",",
"key_filename",
"=",
"self",
".",
"key_filename",
",",
"sock",
"=",
"self",
".",
"proxy",
")",
"except",
"socket",
".",
"error",
"as",
"e",
":",
"raise",
"GerritError",
"(",
"\"Failed to connect to server: %s\"",
"%",
"e",
")",
"try",
":",
"version_string",
"=",
"self",
".",
"_transport",
".",
"remote_version",
"pattern",
"=",
"re",
".",
"compile",
"(",
"r'^.*GerritCodeReview_([a-z0-9-\\.]*) .*$'",
")",
"self",
".",
"remote_version",
"=",
"_extract_version",
"(",
"version_string",
",",
"pattern",
")",
"except",
"AttributeError",
":",
"self",
".",
"remote_version",
"=",
"None"
] | 40.7 | 15.6 |
def randomize_colors(im, keep_vals=[0]):
r'''
Takes a greyscale image and randomly shuffles the greyscale values, so that
all voxels labeled X will be labelled Y, and all voxels labeled Y will be
labeled Z, where X, Y, Z and so on are randomly selected from the values
in the input image.
This function is useful for improving the visibility of images with
neighboring regions that are only incrementally different from each other,
such as that returned by `scipy.ndimage.label`.
Parameters
----------
im : array_like
An ND image of greyscale values.
keep_vals : array_like
Indicate which voxel values should NOT be altered. The default is
`[0]` which is useful for leaving the background of the image
untouched.
Returns
-------
image : ND-array
An image the same size and type as ``im`` but with the greyscale values
reassigned. The unique values in both the input and output images will
be identical.
Notes
-----
If the greyscale values in the input image are not contiguous then the
neither will they be in the output.
Examples
--------
>>> import porespy as ps
>>> import scipy as sp
>>> sp.random.seed(0)
>>> im = sp.random.randint(low=0, high=5, size=[4, 4])
>>> print(im)
[[4 0 3 3]
[3 1 3 2]
[4 0 0 4]
[2 1 0 1]]
>>> im_rand = ps.tools.randomize_colors(im)
>>> print(im_rand)
[[2 0 4 4]
[4 1 4 3]
[2 0 0 2]
[3 1 0 1]]
As can be seen, the 2's have become 3, 3's have become 4, and 4's have
become 2. 1's remained 1 by random accident. 0's remain zeros by default,
but this can be controlled using the `keep_vals` argument.
'''
im_flat = im.flatten()
keep_vals = sp.array(keep_vals)
swap_vals = ~sp.in1d(im_flat, keep_vals)
im_vals = sp.unique(im_flat[swap_vals])
new_vals = sp.random.permutation(im_vals)
im_map = sp.zeros(shape=[sp.amax(im_vals) + 1, ], dtype=int)
im_map[im_vals] = new_vals
im_new = im_map[im_flat]
im_new = sp.reshape(im_new, newshape=sp.shape(im))
return im_new
|
[
"def",
"randomize_colors",
"(",
"im",
",",
"keep_vals",
"=",
"[",
"0",
"]",
")",
":",
"im_flat",
"=",
"im",
".",
"flatten",
"(",
")",
"keep_vals",
"=",
"sp",
".",
"array",
"(",
"keep_vals",
")",
"swap_vals",
"=",
"~",
"sp",
".",
"in1d",
"(",
"im_flat",
",",
"keep_vals",
")",
"im_vals",
"=",
"sp",
".",
"unique",
"(",
"im_flat",
"[",
"swap_vals",
"]",
")",
"new_vals",
"=",
"sp",
".",
"random",
".",
"permutation",
"(",
"im_vals",
")",
"im_map",
"=",
"sp",
".",
"zeros",
"(",
"shape",
"=",
"[",
"sp",
".",
"amax",
"(",
"im_vals",
")",
"+",
"1",
",",
"]",
",",
"dtype",
"=",
"int",
")",
"im_map",
"[",
"im_vals",
"]",
"=",
"new_vals",
"im_new",
"=",
"im_map",
"[",
"im_flat",
"]",
"im_new",
"=",
"sp",
".",
"reshape",
"(",
"im_new",
",",
"newshape",
"=",
"sp",
".",
"shape",
"(",
"im",
")",
")",
"return",
"im_new"
] | 31.787879 | 24.484848 |
def _emplace_transcript(transcripts, parent):
"""Retrieve the primary transcript and discard all others."""
transcripts.sort(key=lambda t: (len(t), t.get_attribute('ID')))
pt = transcripts.pop()
parent.children = [pt]
|
[
"def",
"_emplace_transcript",
"(",
"transcripts",
",",
"parent",
")",
":",
"transcripts",
".",
"sort",
"(",
"key",
"=",
"lambda",
"t",
":",
"(",
"len",
"(",
"t",
")",
",",
"t",
".",
"get_attribute",
"(",
"'ID'",
")",
")",
")",
"pt",
"=",
"transcripts",
".",
"pop",
"(",
")",
"parent",
".",
"children",
"=",
"[",
"pt",
"]"
] | 45.8 | 12 |
def create(recipients, data, cipher, flags=0):
"""
Creates and encrypts message
@param recipients - list of X509 objects
@param data - contents of the message
@param cipher - CipherType object
@param flags - flag
"""
recp = StackOfX509(recipients)
bio = Membio(data)
cms_ptr = libcrypto.CMS_encrypt(recp.ptr, bio.bio, cipher.cipher,
flags)
if cms_ptr is None:
raise CMSError("encrypt EnvelopedData")
return EnvelopedData(cms_ptr)
|
[
"def",
"create",
"(",
"recipients",
",",
"data",
",",
"cipher",
",",
"flags",
"=",
"0",
")",
":",
"recp",
"=",
"StackOfX509",
"(",
"recipients",
")",
"bio",
"=",
"Membio",
"(",
"data",
")",
"cms_ptr",
"=",
"libcrypto",
".",
"CMS_encrypt",
"(",
"recp",
".",
"ptr",
",",
"bio",
".",
"bio",
",",
"cipher",
".",
"cipher",
",",
"flags",
")",
"if",
"cms_ptr",
"is",
"None",
":",
"raise",
"CMSError",
"(",
"\"encrypt EnvelopedData\"",
")",
"return",
"EnvelopedData",
"(",
"cms_ptr",
")"
] | 37.533333 | 7.933333 |
def delete_location(self, location_name):
"""
Remove location with name location_name from self.locations.
If the location had any sites, change site.location to "".
"""
location = self.find_by_name(location_name, self.locations)
if not location:
return False
sites = location.sites
self.locations.remove(location)
for site in sites:
if site:
site.location = ''
del location
return sites
|
[
"def",
"delete_location",
"(",
"self",
",",
"location_name",
")",
":",
"location",
"=",
"self",
".",
"find_by_name",
"(",
"location_name",
",",
"self",
".",
"locations",
")",
"if",
"not",
"location",
":",
"return",
"False",
"sites",
"=",
"location",
".",
"sites",
"self",
".",
"locations",
".",
"remove",
"(",
"location",
")",
"for",
"site",
"in",
"sites",
":",
"if",
"site",
":",
"site",
".",
"location",
"=",
"''",
"del",
"location",
"return",
"sites"
] | 33.4 | 13.666667 |
def QueryUserDefinedFunctions(self, collection_link, query, options=None):
"""Queries user defined functions in a collection.
:param str collection_link:
The link to the collection.
:param (str or dict) query:
:param dict options:
The request options for the request.
:return:
Query Iterable of UDFs.
:rtype:
query_iterable.QueryIterable
"""
if options is None:
options = {}
path = base.GetPathFromLink(collection_link, 'udfs')
collection_id = base.GetResourceIdOrFullNameFromLink(collection_link)
def fetch_fn(options):
return self.__QueryFeed(path,
'udfs',
collection_id,
lambda r: r['UserDefinedFunctions'],
lambda _, b: b,
query,
options), self.last_response_headers
return query_iterable.QueryIterable(self, query, options, fetch_fn)
|
[
"def",
"QueryUserDefinedFunctions",
"(",
"self",
",",
"collection_link",
",",
"query",
",",
"options",
"=",
"None",
")",
":",
"if",
"options",
"is",
"None",
":",
"options",
"=",
"{",
"}",
"path",
"=",
"base",
".",
"GetPathFromLink",
"(",
"collection_link",
",",
"'udfs'",
")",
"collection_id",
"=",
"base",
".",
"GetResourceIdOrFullNameFromLink",
"(",
"collection_link",
")",
"def",
"fetch_fn",
"(",
"options",
")",
":",
"return",
"self",
".",
"__QueryFeed",
"(",
"path",
",",
"'udfs'",
",",
"collection_id",
",",
"lambda",
"r",
":",
"r",
"[",
"'UserDefinedFunctions'",
"]",
",",
"lambda",
"_",
",",
"b",
":",
"b",
",",
"query",
",",
"options",
")",
",",
"self",
".",
"last_response_headers",
"return",
"query_iterable",
".",
"QueryIterable",
"(",
"self",
",",
"query",
",",
"options",
",",
"fetch_fn",
")"
] | 37.862069 | 17.275862 |
def statement(self):
"""
statement: function_declaration
| variable_declaration
| expression_statement
| if_statement
| while_statement
| jump_statement
"""
if self.token.nature == Nature.DEF:
node = self.function_declaration()
elif self.token.nature == Nature.LET:
node = self.variable_declaration()
elif self.token.nature in (Nature.MUT, Nature.ID):
node = self.expression_statement()
elif self.token.nature == Nature.IF:
node = self.if_statement()
elif self.token.nature == Nature.WHILE:
node = self.while_statement()
elif self.token.nature == Nature.RETURN:
node = self.jump_statement()
else:
node = self._error()
return node
|
[
"def",
"statement",
"(",
"self",
")",
":",
"if",
"self",
".",
"token",
".",
"nature",
"==",
"Nature",
".",
"DEF",
":",
"node",
"=",
"self",
".",
"function_declaration",
"(",
")",
"elif",
"self",
".",
"token",
".",
"nature",
"==",
"Nature",
".",
"LET",
":",
"node",
"=",
"self",
".",
"variable_declaration",
"(",
")",
"elif",
"self",
".",
"token",
".",
"nature",
"in",
"(",
"Nature",
".",
"MUT",
",",
"Nature",
".",
"ID",
")",
":",
"node",
"=",
"self",
".",
"expression_statement",
"(",
")",
"elif",
"self",
".",
"token",
".",
"nature",
"==",
"Nature",
".",
"IF",
":",
"node",
"=",
"self",
".",
"if_statement",
"(",
")",
"elif",
"self",
".",
"token",
".",
"nature",
"==",
"Nature",
".",
"WHILE",
":",
"node",
"=",
"self",
".",
"while_statement",
"(",
")",
"elif",
"self",
".",
"token",
".",
"nature",
"==",
"Nature",
".",
"RETURN",
":",
"node",
"=",
"self",
".",
"jump_statement",
"(",
")",
"else",
":",
"node",
"=",
"self",
".",
"_error",
"(",
")",
"return",
"node"
] | 34.52 | 8.28 |
def index():
"""Show a list of available libraries, and resource files"""
kwdb = current_app.kwdb
libraries = get_collections(kwdb, libtype="library")
resource_files = get_collections(kwdb, libtype="resource")
return flask.render_template("libraryNames.html",
data={"libraries": libraries,
"version": __version__,
"resource_files": resource_files
})
|
[
"def",
"index",
"(",
")",
":",
"kwdb",
"=",
"current_app",
".",
"kwdb",
"libraries",
"=",
"get_collections",
"(",
"kwdb",
",",
"libtype",
"=",
"\"library\"",
")",
"resource_files",
"=",
"get_collections",
"(",
"kwdb",
",",
"libtype",
"=",
"\"resource\"",
")",
"return",
"flask",
".",
"render_template",
"(",
"\"libraryNames.html\"",
",",
"data",
"=",
"{",
"\"libraries\"",
":",
"libraries",
",",
"\"version\"",
":",
"__version__",
",",
"\"resource_files\"",
":",
"resource_files",
"}",
")"
] | 42.166667 | 20.833333 |
def upload(self, **kwargs):
""" https://api.slack.com/methods/files.upload
"""
if kwargs:
self.params.update(kwargs)
return FromUrl('https://slack.com/api/files.upload', self._requests)(data=self.params).post()
|
[
"def",
"upload",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"kwargs",
":",
"self",
".",
"params",
".",
"update",
"(",
"kwargs",
")",
"return",
"FromUrl",
"(",
"'https://slack.com/api/files.upload'",
",",
"self",
".",
"_requests",
")",
"(",
"data",
"=",
"self",
".",
"params",
")",
".",
"post",
"(",
")"
] | 41.5 | 16.333333 |
def _transform(self, X, y):
'''
Transforms single series
'''
z, p, y_rle = self._rle(y)
p = np.append(p, len(y))
big_enough = p[1:] - p[:-1] >= self.min_length
Xt = []
for i in range(len(y_rle)):
if (big_enough[i]):
Xt.append(X[p[i]:p[i+1]])
yt = y_rle[big_enough].tolist()
return Xt, yt
|
[
"def",
"_transform",
"(",
"self",
",",
"X",
",",
"y",
")",
":",
"z",
",",
"p",
",",
"y_rle",
"=",
"self",
".",
"_rle",
"(",
"y",
")",
"p",
"=",
"np",
".",
"append",
"(",
"p",
",",
"len",
"(",
"y",
")",
")",
"big_enough",
"=",
"p",
"[",
"1",
":",
"]",
"-",
"p",
"[",
":",
"-",
"1",
"]",
">=",
"self",
".",
"min_length",
"Xt",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"y_rle",
")",
")",
":",
"if",
"(",
"big_enough",
"[",
"i",
"]",
")",
":",
"Xt",
".",
"append",
"(",
"X",
"[",
"p",
"[",
"i",
"]",
":",
"p",
"[",
"i",
"+",
"1",
"]",
"]",
")",
"yt",
"=",
"y_rle",
"[",
"big_enough",
"]",
".",
"tolist",
"(",
")",
"return",
"Xt",
",",
"yt"
] | 25.533333 | 16.466667 |
def get_readonly_fields(self, request, obj=None):
"""
Return readonly fields by user's permissions.
"""
readonly_fields = list(super(EntryAdmin, self).get_readonly_fields(
request, obj))
if not request.user.has_perm('zinnia.can_change_status'):
readonly_fields.append('status')
if not request.user.has_perm('zinnia.can_change_author'):
readonly_fields.append('authors')
return readonly_fields
|
[
"def",
"get_readonly_fields",
"(",
"self",
",",
"request",
",",
"obj",
"=",
"None",
")",
":",
"readonly_fields",
"=",
"list",
"(",
"super",
"(",
"EntryAdmin",
",",
"self",
")",
".",
"get_readonly_fields",
"(",
"request",
",",
"obj",
")",
")",
"if",
"not",
"request",
".",
"user",
".",
"has_perm",
"(",
"'zinnia.can_change_status'",
")",
":",
"readonly_fields",
".",
"append",
"(",
"'status'",
")",
"if",
"not",
"request",
".",
"user",
".",
"has_perm",
"(",
"'zinnia.can_change_author'",
")",
":",
"readonly_fields",
".",
"append",
"(",
"'authors'",
")",
"return",
"readonly_fields"
] | 33.857143 | 18.571429 |
def parse_username_password_hostname(remote_url):
"""
Parse a command line string and return username, password, remote hostname and remote path.
:param remote_url: A command line string.
:return: A tuple, containing username, password, remote hostname and remote path.
"""
assert remote_url
assert ':' in remote_url
if '@' in remote_url:
username, hostname = remote_url.rsplit('@', 1)
else:
username, hostname = None, remote_url
hostname, remote_path = hostname.split(':', 1)
password = None
if username and ':' in username:
username, password = username.split(':', 1)
assert hostname
assert remote_path
return username, password, hostname, remote_path
|
[
"def",
"parse_username_password_hostname",
"(",
"remote_url",
")",
":",
"assert",
"remote_url",
"assert",
"':'",
"in",
"remote_url",
"if",
"'@'",
"in",
"remote_url",
":",
"username",
",",
"hostname",
"=",
"remote_url",
".",
"rsplit",
"(",
"'@'",
",",
"1",
")",
"else",
":",
"username",
",",
"hostname",
"=",
"None",
",",
"remote_url",
"hostname",
",",
"remote_path",
"=",
"hostname",
".",
"split",
"(",
"':'",
",",
"1",
")",
"password",
"=",
"None",
"if",
"username",
"and",
"':'",
"in",
"username",
":",
"username",
",",
"password",
"=",
"username",
".",
"split",
"(",
"':'",
",",
"1",
")",
"assert",
"hostname",
"assert",
"remote_path",
"return",
"username",
",",
"password",
",",
"hostname",
",",
"remote_path"
] | 29.958333 | 21.125 |
def _wrap_callback_parse_parameter_data(subscription, on_data, message):
"""
Wraps an (optional) user callback to parse ParameterData
from a WebSocket data message
"""
if message.type == message.REPLY:
data = web_pb2.ParameterSubscriptionResponse()
data.ParseFromString(message.reply.data)
subscription.subscription_id = data.subscriptionId
elif (message.type == message.DATA and
message.data.type == yamcs_pb2.PARAMETER):
parameter_data = ParameterData(getattr(message.data, 'parameterData'))
#pylint: disable=protected-access
subscription._process(parameter_data)
if on_data:
on_data(parameter_data)
|
[
"def",
"_wrap_callback_parse_parameter_data",
"(",
"subscription",
",",
"on_data",
",",
"message",
")",
":",
"if",
"message",
".",
"type",
"==",
"message",
".",
"REPLY",
":",
"data",
"=",
"web_pb2",
".",
"ParameterSubscriptionResponse",
"(",
")",
"data",
".",
"ParseFromString",
"(",
"message",
".",
"reply",
".",
"data",
")",
"subscription",
".",
"subscription_id",
"=",
"data",
".",
"subscriptionId",
"elif",
"(",
"message",
".",
"type",
"==",
"message",
".",
"DATA",
"and",
"message",
".",
"data",
".",
"type",
"==",
"yamcs_pb2",
".",
"PARAMETER",
")",
":",
"parameter_data",
"=",
"ParameterData",
"(",
"getattr",
"(",
"message",
".",
"data",
",",
"'parameterData'",
")",
")",
"#pylint: disable=protected-access",
"subscription",
".",
"_process",
"(",
"parameter_data",
")",
"if",
"on_data",
":",
"on_data",
"(",
"parameter_data",
")"
] | 43 | 11.625 |
def get_updates(self, id, project=None, top=None, skip=None):
"""GetUpdates.
[Preview API] Returns a the deltas between work item revisions
:param int id:
:param str project: Project ID or project name
:param int top:
:param int skip:
:rtype: [WorkItemUpdate]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if id is not None:
route_values['id'] = self._serialize.url('id', id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
if skip is not None:
query_parameters['$skip'] = self._serialize.query('skip', skip, 'int')
response = self._send(http_method='GET',
location_id='6570bf97-d02c-4a91-8d93-3abe9895b1a9',
version='5.1-preview.3',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[WorkItemUpdate]', self._unwrap_collection(response))
|
[
"def",
"get_updates",
"(",
"self",
",",
"id",
",",
"project",
"=",
"None",
",",
"top",
"=",
"None",
",",
"skip",
"=",
"None",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"project",
"is",
"not",
"None",
":",
"route_values",
"[",
"'project'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'project'",
",",
"project",
",",
"'str'",
")",
"if",
"id",
"is",
"not",
"None",
":",
"route_values",
"[",
"'id'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'id'",
",",
"id",
",",
"'int'",
")",
"query_parameters",
"=",
"{",
"}",
"if",
"top",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$top'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'top'",
",",
"top",
",",
"'int'",
")",
"if",
"skip",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$skip'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'skip'",
",",
"skip",
",",
"'int'",
")",
"response",
"=",
"self",
".",
"_send",
"(",
"http_method",
"=",
"'GET'",
",",
"location_id",
"=",
"'6570bf97-d02c-4a91-8d93-3abe9895b1a9'",
",",
"version",
"=",
"'5.1-preview.3'",
",",
"route_values",
"=",
"route_values",
",",
"query_parameters",
"=",
"query_parameters",
")",
"return",
"self",
".",
"_deserialize",
"(",
"'[WorkItemUpdate]'",
",",
"self",
".",
"_unwrap_collection",
"(",
"response",
")",
")"
] | 47.56 | 20.08 |
def register_model(cls: Type[Model]):
"""
Include the given model class into the registry.
:param cls: The class of the registered model.
:return: None
"""
if not issubclass(cls, Model):
raise TypeError("model bust be a subclass of Model")
if issubclass(cls, GenericModel):
raise TypeError("model must not be a subclass of GenericModel")
__models__.add(cls)
return cls
|
[
"def",
"register_model",
"(",
"cls",
":",
"Type",
"[",
"Model",
"]",
")",
":",
"if",
"not",
"issubclass",
"(",
"cls",
",",
"Model",
")",
":",
"raise",
"TypeError",
"(",
"\"model bust be a subclass of Model\"",
")",
"if",
"issubclass",
"(",
"cls",
",",
"GenericModel",
")",
":",
"raise",
"TypeError",
"(",
"\"model must not be a subclass of GenericModel\"",
")",
"__models__",
".",
"add",
"(",
"cls",
")",
"return",
"cls"
] | 31.461538 | 14.692308 |
def list_queue(self, embed_last_unused_offers=False):
"""List all the tasks queued up or waiting to be scheduled.
:returns: list of queue items
:rtype: list[:class:`marathon.models.queue.MarathonQueueItem`]
"""
if embed_last_unused_offers:
params = {'embed': 'lastUnusedOffers'}
else:
params = {}
response = self._do_request('GET', '/v2/queue', params=params)
return self._parse_response(response, MarathonQueueItem, is_list=True, resource_name='queue')
|
[
"def",
"list_queue",
"(",
"self",
",",
"embed_last_unused_offers",
"=",
"False",
")",
":",
"if",
"embed_last_unused_offers",
":",
"params",
"=",
"{",
"'embed'",
":",
"'lastUnusedOffers'",
"}",
"else",
":",
"params",
"=",
"{",
"}",
"response",
"=",
"self",
".",
"_do_request",
"(",
"'GET'",
",",
"'/v2/queue'",
",",
"params",
"=",
"params",
")",
"return",
"self",
".",
"_parse_response",
"(",
"response",
",",
"MarathonQueueItem",
",",
"is_list",
"=",
"True",
",",
"resource_name",
"=",
"'queue'",
")"
] | 44.25 | 19.583333 |
async def run_checks(self):
"""
Run checks on itself and on the FSM
"""
async for check in self.fsm.health_check():
yield check
async for check in self.self_check():
yield check
for check in MiddlewareManager.health_check():
yield check
|
[
"async",
"def",
"run_checks",
"(",
"self",
")",
":",
"async",
"for",
"check",
"in",
"self",
".",
"fsm",
".",
"health_check",
"(",
")",
":",
"yield",
"check",
"async",
"for",
"check",
"in",
"self",
".",
"self_check",
"(",
")",
":",
"yield",
"check",
"for",
"check",
"in",
"MiddlewareManager",
".",
"health_check",
"(",
")",
":",
"yield",
"check"
] | 23.923077 | 16.692308 |
def __rename_directory(self, source, target):
"""
Renames a directory using given source and target names.
:param source: Source file.
:type source: unicode
:param target: Target file.
:type target: unicode
"""
for node in itertools.chain(self.__script_editor.model.get_project_nodes(source),
self.__script_editor.model.get_directory_nodes(source)):
self.__script_editor.model.unregister_project_nodes(node)
self.__script_editor.unregister_node_path(node)
self.__rename_path(source, target)
node.name = os.path.basename(target)
node.path = target
self.__script_editor.model.node_changed(node)
self.__script_editor.register_node_path(node)
self.__script_editor.model.set_project_nodes(node)
|
[
"def",
"__rename_directory",
"(",
"self",
",",
"source",
",",
"target",
")",
":",
"for",
"node",
"in",
"itertools",
".",
"chain",
"(",
"self",
".",
"__script_editor",
".",
"model",
".",
"get_project_nodes",
"(",
"source",
")",
",",
"self",
".",
"__script_editor",
".",
"model",
".",
"get_directory_nodes",
"(",
"source",
")",
")",
":",
"self",
".",
"__script_editor",
".",
"model",
".",
"unregister_project_nodes",
"(",
"node",
")",
"self",
".",
"__script_editor",
".",
"unregister_node_path",
"(",
"node",
")",
"self",
".",
"__rename_path",
"(",
"source",
",",
"target",
")",
"node",
".",
"name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"target",
")",
"node",
".",
"path",
"=",
"target",
"self",
".",
"__script_editor",
".",
"model",
".",
"node_changed",
"(",
"node",
")",
"self",
".",
"__script_editor",
".",
"register_node_path",
"(",
"node",
")",
"self",
".",
"__script_editor",
".",
"model",
".",
"set_project_nodes",
"(",
"node",
")"
] | 43.4 | 18.5 |
def mousePressEvent( self, event ):
"""
Sets the value for the slider at the event position.
:param event | <QMouseEvent>
"""
self.setValue(self.valueAt(event.pos().x()))
|
[
"def",
"mousePressEvent",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"setValue",
"(",
"self",
".",
"valueAt",
"(",
"event",
".",
"pos",
"(",
")",
".",
"x",
"(",
")",
")",
")"
] | 32 | 10 |
def encode(
self, word, max_length=4, var='American', reverse=False, zero_pad=True
):
"""Return the Soundex code for a word.
Parameters
----------
word : str
The word to transform
max_length : int
The length of the code returned (defaults to 4)
var : str
The variant of the algorithm to employ (defaults to ``American``):
- ``American`` follows the American Soundex algorithm, as
described at :cite:`US:2007` and in :cite:`Knuth:1998`; this
is also called Miracode
- ``special`` follows the rules from the 1880-1910 US Census
retrospective re-analysis, in which h & w are not treated as
blocking consonants but as vowels. Cf. :cite:`Repici:2013`.
- ``Census`` follows the rules laid out in GIL 55
:cite:`US:1997` by the US Census, including coding prefixed
and unprefixed versions of some names
reverse : bool
Reverse the word before computing the selected Soundex (defaults to
False); This results in "Reverse Soundex", which is useful for
blocking in cases where the initial elements may be in error.
zero_pad : bool
Pad the end of the return value with 0s to achieve a max_length
string
Returns
-------
str
The Soundex value
Examples
--------
>>> pe = Soundex()
>>> pe.encode("Christopher")
'C623'
>>> pe.encode("Niall")
'N400'
>>> pe.encode('Smith')
'S530'
>>> pe.encode('Schmidt')
'S530'
>>> pe.encode('Christopher', max_length=-1)
'C623160000000000000000000000000000000000000000000000000000000000'
>>> pe.encode('Christopher', max_length=-1, zero_pad=False)
'C62316'
>>> pe.encode('Christopher', reverse=True)
'R132'
>>> pe.encode('Ashcroft')
'A261'
>>> pe.encode('Asicroft')
'A226'
>>> pe.encode('Ashcroft', var='special')
'A226'
>>> pe.encode('Asicroft', var='special')
'A226'
"""
# Require a max_length of at least 4 and not more than 64
if max_length != -1:
max_length = min(max(4, max_length), 64)
else:
max_length = 64
# uppercase, normalize, decompose, and filter non-A-Z out
word = unicode_normalize('NFKD', text_type(word.upper()))
word = word.replace('ß', 'SS')
if var == 'Census':
if word[:3] in {'VAN', 'CON'} and len(word) > 4:
return (
soundex(word, max_length, 'American', reverse, zero_pad),
soundex(
word[3:], max_length, 'American', reverse, zero_pad
),
)
if word[:2] in {'DE', 'DI', 'LA', 'LE'} and len(word) > 3:
return (
soundex(word, max_length, 'American', reverse, zero_pad),
soundex(
word[2:], max_length, 'American', reverse, zero_pad
),
)
# Otherwise, proceed as usual (var='American' mode, ostensibly)
word = ''.join(c for c in word if c in self._uc_set)
# Nothing to convert, return base case
if not word:
if zero_pad:
return '0' * max_length
return '0'
# Reverse word if computing Reverse Soundex
if reverse:
word = word[::-1]
# apply the Soundex algorithm
sdx = word.translate(self._trans)
if var == 'special':
sdx = sdx.replace('9', '0') # special rule for 1880-1910 census
else:
sdx = sdx.replace('9', '') # rule 1
sdx = self._delete_consecutive_repeats(sdx) # rule 3
if word[0] in 'HW':
sdx = word[0] + sdx
else:
sdx = word[0] + sdx[1:]
sdx = sdx.replace('0', '') # rule 1
if zero_pad:
sdx += '0' * max_length # rule 4
return sdx[:max_length]
|
[
"def",
"encode",
"(",
"self",
",",
"word",
",",
"max_length",
"=",
"4",
",",
"var",
"=",
"'American'",
",",
"reverse",
"=",
"False",
",",
"zero_pad",
"=",
"True",
")",
":",
"# Require a max_length of at least 4 and not more than 64",
"if",
"max_length",
"!=",
"-",
"1",
":",
"max_length",
"=",
"min",
"(",
"max",
"(",
"4",
",",
"max_length",
")",
",",
"64",
")",
"else",
":",
"max_length",
"=",
"64",
"# uppercase, normalize, decompose, and filter non-A-Z out",
"word",
"=",
"unicode_normalize",
"(",
"'NFKD'",
",",
"text_type",
"(",
"word",
".",
"upper",
"(",
")",
")",
")",
"word",
"=",
"word",
".",
"replace",
"(",
"'ß',",
" ",
"SS')",
"",
"if",
"var",
"==",
"'Census'",
":",
"if",
"word",
"[",
":",
"3",
"]",
"in",
"{",
"'VAN'",
",",
"'CON'",
"}",
"and",
"len",
"(",
"word",
")",
">",
"4",
":",
"return",
"(",
"soundex",
"(",
"word",
",",
"max_length",
",",
"'American'",
",",
"reverse",
",",
"zero_pad",
")",
",",
"soundex",
"(",
"word",
"[",
"3",
":",
"]",
",",
"max_length",
",",
"'American'",
",",
"reverse",
",",
"zero_pad",
")",
",",
")",
"if",
"word",
"[",
":",
"2",
"]",
"in",
"{",
"'DE'",
",",
"'DI'",
",",
"'LA'",
",",
"'LE'",
"}",
"and",
"len",
"(",
"word",
")",
">",
"3",
":",
"return",
"(",
"soundex",
"(",
"word",
",",
"max_length",
",",
"'American'",
",",
"reverse",
",",
"zero_pad",
")",
",",
"soundex",
"(",
"word",
"[",
"2",
":",
"]",
",",
"max_length",
",",
"'American'",
",",
"reverse",
",",
"zero_pad",
")",
",",
")",
"# Otherwise, proceed as usual (var='American' mode, ostensibly)",
"word",
"=",
"''",
".",
"join",
"(",
"c",
"for",
"c",
"in",
"word",
"if",
"c",
"in",
"self",
".",
"_uc_set",
")",
"# Nothing to convert, return base case",
"if",
"not",
"word",
":",
"if",
"zero_pad",
":",
"return",
"'0'",
"*",
"max_length",
"return",
"'0'",
"# Reverse word if computing Reverse Soundex",
"if",
"reverse",
":",
"word",
"=",
"word",
"[",
":",
":",
"-",
"1",
"]",
"# apply the Soundex algorithm",
"sdx",
"=",
"word",
".",
"translate",
"(",
"self",
".",
"_trans",
")",
"if",
"var",
"==",
"'special'",
":",
"sdx",
"=",
"sdx",
".",
"replace",
"(",
"'9'",
",",
"'0'",
")",
"# special rule for 1880-1910 census",
"else",
":",
"sdx",
"=",
"sdx",
".",
"replace",
"(",
"'9'",
",",
"''",
")",
"# rule 1",
"sdx",
"=",
"self",
".",
"_delete_consecutive_repeats",
"(",
"sdx",
")",
"# rule 3",
"if",
"word",
"[",
"0",
"]",
"in",
"'HW'",
":",
"sdx",
"=",
"word",
"[",
"0",
"]",
"+",
"sdx",
"else",
":",
"sdx",
"=",
"word",
"[",
"0",
"]",
"+",
"sdx",
"[",
"1",
":",
"]",
"sdx",
"=",
"sdx",
".",
"replace",
"(",
"'0'",
",",
"''",
")",
"# rule 1",
"if",
"zero_pad",
":",
"sdx",
"+=",
"'0'",
"*",
"max_length",
"# rule 4",
"return",
"sdx",
"[",
":",
"max_length",
"]"
] | 33.072 | 23.096 |
def get_context_data(self, **kwargs):
""" TODO - possibly inherit this from DocumentEditFormView. This is same thing minus:
self.ident = self.kwargs.get('id')
self.document = self.document_type.objects.get(pk=self.ident)
"""
context = super(DocumentAddFormView, self).get_context_data(**kwargs)
self.set_mongoadmin()
context = self.set_permissions_in_context(context)
self.document_type = getattr(self.models, self.document_name)
context['app_label'] = self.app_label
context['document_name'] = self.document_name
context['form_action'] = reverse('document_detail_add_form', args=[self.kwargs.get('app_label'),
self.kwargs.get('document_name')])
return context
|
[
"def",
"get_context_data",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"context",
"=",
"super",
"(",
"DocumentAddFormView",
",",
"self",
")",
".",
"get_context_data",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"set_mongoadmin",
"(",
")",
"context",
"=",
"self",
".",
"set_permissions_in_context",
"(",
"context",
")",
"self",
".",
"document_type",
"=",
"getattr",
"(",
"self",
".",
"models",
",",
"self",
".",
"document_name",
")",
"context",
"[",
"'app_label'",
"]",
"=",
"self",
".",
"app_label",
"context",
"[",
"'document_name'",
"]",
"=",
"self",
".",
"document_name",
"context",
"[",
"'form_action'",
"]",
"=",
"reverse",
"(",
"'document_detail_add_form'",
",",
"args",
"=",
"[",
"self",
".",
"kwargs",
".",
"get",
"(",
"'app_label'",
")",
",",
"self",
".",
"kwargs",
".",
"get",
"(",
"'document_name'",
")",
"]",
")",
"return",
"context"
] | 51.625 | 24.125 |
def arcball_map_to_sphere(point, center, radius):
"""Return unit sphere coordinates from window coordinates."""
v = numpy.array(((point[0] - center[0]) / radius,
(center[1] - point[1]) / radius,
0.0), dtype=numpy.float64)
n = v[0]*v[0] + v[1]*v[1]
if n > 1.0:
v /= math.sqrt(n) # position outside of sphere
else:
v[2] = math.sqrt(1.0 - n)
return v
|
[
"def",
"arcball_map_to_sphere",
"(",
"point",
",",
"center",
",",
"radius",
")",
":",
"v",
"=",
"numpy",
".",
"array",
"(",
"(",
"(",
"point",
"[",
"0",
"]",
"-",
"center",
"[",
"0",
"]",
")",
"/",
"radius",
",",
"(",
"center",
"[",
"1",
"]",
"-",
"point",
"[",
"1",
"]",
")",
"/",
"radius",
",",
"0.0",
")",
",",
"dtype",
"=",
"numpy",
".",
"float64",
")",
"n",
"=",
"v",
"[",
"0",
"]",
"*",
"v",
"[",
"0",
"]",
"+",
"v",
"[",
"1",
"]",
"*",
"v",
"[",
"1",
"]",
"if",
"n",
">",
"1.0",
":",
"v",
"/=",
"math",
".",
"sqrt",
"(",
"n",
")",
"# position outside of sphere",
"else",
":",
"v",
"[",
"2",
"]",
"=",
"math",
".",
"sqrt",
"(",
"1.0",
"-",
"n",
")",
"return",
"v"
] | 38.090909 | 14.363636 |
def add_binding(self, binding: Binding):
"""Stores binding"""
binding.add_error_info = lambda error: error.add_view_info(self._xml_node.view_info)
self._bindings.append(binding)
|
[
"def",
"add_binding",
"(",
"self",
",",
"binding",
":",
"Binding",
")",
":",
"binding",
".",
"add_error_info",
"=",
"lambda",
"error",
":",
"error",
".",
"add_view_info",
"(",
"self",
".",
"_xml_node",
".",
"view_info",
")",
"self",
".",
"_bindings",
".",
"append",
"(",
"binding",
")"
] | 49.5 | 13.5 |
def cluster_exists(self, name):
"""Check if a given cluster exists."""
clusters = self.data['clusters']
for cluster in clusters:
if cluster['name'] == name:
return True
return False
|
[
"def",
"cluster_exists",
"(",
"self",
",",
"name",
")",
":",
"clusters",
"=",
"self",
".",
"data",
"[",
"'clusters'",
"]",
"for",
"cluster",
"in",
"clusters",
":",
"if",
"cluster",
"[",
"'name'",
"]",
"==",
"name",
":",
"return",
"True",
"return",
"False"
] | 33.571429 | 7.285714 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.