Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
21,600 | def matches(self, other):
if other is None:
return False
if isinstance(other, PhonemeDisjunction):
return any([phoneme.matches(other) for phoneme in self])
if isinstance(other, list) or isinstance(other, PhonologicalFeature):
other = phoneme(other)
return any([phoneme <= other for phoneme in self]) | A disjunctive list matches a phoneme if any of its members matches the phoneme.
If other is also a disjunctive list, any match between this list and the other returns true. |
21,601 | def load_rules(self):
self.col_maps = []
with open(self.col_file, ) as f:
for line in f:
rule = MapColumn(line)
self.col_maps.append(rule) | load the rules from file |
21,602 | def is_indexed(self, dataset):
query = text()
result = self.backend.library.database.connection.execute(query, vid=dataset.vid)
return bool(result.fetchall()) | Returns True if dataset is already indexed. Otherwise returns False. |
21,603 | def _buildStaticFiles(self):
if not self.output_path_static:
self.output_path_static = os.path.join(self.output_path, "static")
if not os.path.exists(self.output_path_static):
os.makedirs(self.output_path_static)
for x in self.static_files:
source_f = os.path.join(self.static_root, x)
dest_f = os.path.join(self.output_path_static, x)
if os.path.isdir(source_f):
if os.path.exists(dest_f):
shutil.rmtree(dest_f)
shutil.copytree(source_f, dest_f)
else:
shutil.copyfile(source_f, dest_f)
if x.endswith():
printDebug("..unzipping")
zip_ref = zipfile.ZipFile(os.path.join(dest_f), )
zip_ref.extractall(self.output_path_static)
zip_ref.close()
printDebug("..cleaning up")
os.remove(dest_f)
shutil.rmtree(
os.path.join(self.output_path_static, "__MACOSX")) | move over static files so that relative imports work
Note: if a dir is passed, it is copied with all of its contents
If the file is a zip, it is copied and extracted too
# By default folder name is 'static', unless *output_path_static* is passed (now allowed only in special applications like KompleteVizMultiModel) |
21,604 | def parse_sgtin_96(sgtin_96):
if not sgtin_96:
raise Exception()
if not sgtin_96.startswith("30"):
raise Exception()
binary = "{0:020b}".format(int(sgtin_96, 16)).zfill(96)
header = int(binary[:8], 2)
tag_filter = int(binary[8:11], 2)
partition = binary[11:14]
partition_value = int(partition, 2)
m, l, n, k = SGTIN_96_PARTITION_MAP[partition_value]
company_start = 8 + 3 + 3
company_end = company_start + m
company_data = int(binary[company_start:company_end], 2)
if company_data > pow(10, l):
company_prefix = str(company_data).zfill(l)
item_start = company_end
item_end = item_start + n
item_data = binary[item_start:item_end]
item_number = int(item_data, 2)
item_reference = str(item_number).zfill(k)
serial = int(binary[-38:], 2)
return {
"header": header,
"filter": tag_filter,
"partition": partition,
"company_prefix": company_prefix,
"item_reference": item_reference,
"serial": serial
} | Given a SGTIN-96 hex string, parse each segment.
Returns a dictionary of the segments. |
21,605 | def compare_params(defined, existing, return_old_value=False):
if not isinstance(defined, type(existing)):
raise SaltException(
.format(type(existing),
type(defined),
existing,
defined))
if not salt.utils.data.is_iter(defined):
if six.text_type(defined) != six.text_type(existing) and return_old_value:
return {: six.text_type(defined), : six.text_type(existing)}
elif six.text_type(defined) != six.text_type(existing) and not return_old_value:
return six.text_type(defined)
if isinstance(defined, list):
if len(defined) != len(existing):
log.info()
return {: defined, : existing} if return_old_value else defined
else:
difflist = []
for ditem in defined:
d_in_e = []
for eitem in existing:
comp = compare_params(ditem, eitem, return_old_value)
if return_old_value:
d_in_e.append(comp[])
else:
d_in_e.append(comp)
if all(d_in_e):
difflist.append(ditem)
if any(difflist) and return_old_value:
return {: defined, : existing}
elif any(difflist) and not return_old_value:
return defined
if isinstance(defined, dict):
try:
if set(defined) <= set(existing):
intersection = set(defined) & set(existing)
diffdict = {: {}, : {}} if return_old_value else {}
for i in intersection:
comp = compare_params(defined[i], existing[i], return_old_value)
if return_old_value:
if comp or (not comp and isinstance(comp, list)):
diffdict[].update({i: defined[i]})
diffdict[].update({i: existing[i]})
else:
if comp or (not comp and isinstance(comp, list)):
diffdict.update({i: defined[i]})
return diffdict
return {: defined, : existing} if return_old_value else defined
except TypeError:
raise SaltException(
.format(type(existing),
type(defined),
existing,
defined)) | .. versionadded:: 2017.7
Compares Zabbix object definition against existing Zabbix object.
:param defined: Zabbix object definition taken from sls file.
:param existing: Existing Zabbix object taken from result of an API call.
:param return_old_value: Default False. If True, returns dict("old"=old_val, "new"=new_val) for rollback purpose.
:return: Params that are different from existing object. Result extended by
object ID can be passed directly to Zabbix API update method. |
21,606 | def global_state_code(self):
self._generate_func_code()
if not self._compile_regexps:
return .join(
[
,
,
,
]
)
regexs = [.format(key, value.pattern) for key, value in self._compile_regexps.items()]
return .join(
[
,
,
,
,
,
+ .join(regexs),
,
,
]
) | Returns global variables for generating function from ``func_code`` as code.
Includes compiled regular expressions and imports. |
21,607 | def save(self, filename):
f = open(filename, mode=)
f.write("QGC WPL 110\n")
for w in self.wpoints:
if getattr(w, , None):
f.write("
f.write("%u\t%u\t%u\t%u\t%f\t%f\t%f\t%f\t%f\t%f\t%f\t%u\n" % (
w.seq, w.current, w.frame, w.command,
w.param1, w.param2, w.param3, w.param4,
w.x, w.y, w.z, w.autocontinue))
f.close() | save waypoints to a file |
21,608 | def parse_xml_node(self, node):
super(Preceding, self).parse_xml_node(node)
p_nodes = node.getElementsByTagNameNS(RTS_NS, )
if p_nodes.length != 1:
raise InvalidParticipantNodeError
p_node = p_nodes[0]
if p_node.hasAttributeNS(RTS_NS, ):
self.timeout = int(p_node.getAttributeNS(RTS_NS, ))
else:
self.timeout = 0
if p_node.hasAttributeNS(RTS_NS, ):
self.sending_timing = p_node.getAttributeNS(RTS_NS, )
else:
self.sending_timing =
self._preceding_components = []
for c in p_node.getElementsByTagNameNS(RTS_NS, ):
self._preceding_components.append(TargetExecutionContext().parse_xml_node(c))
return self | Parse an xml.dom Node object representing a preceding condition into
this object. |
21,609 | def connect():
ftp_class = ftplib.FTP if not SSL else ftplib.FTP_TLS
ftp = ftp_class(timeout=TIMEOUT)
ftp.connect(HOST, PORT)
ftp.login(USER, PASSWORD)
if SSL:
ftp.prot_p()
return ftp | Connect to FTP server, login and return an ftplib.FTP instance. |
21,610 | def add_argument(self, parser, bootstrap=False):
if self.cli_expose:
for child in self.children.values():
child.add_argument(parser, bootstrap) | Add dict-style item as an argument to the given parser.
The dict item will take all the nested items in the dictionary and
namespace them with the dict name, adding each child item as
their own CLI argument.
Examples:
A non-nested dict item with the name 'db' and children named
'port' and 'host' will result in the following being valid
CLI args:
['--db-host', 'localhost', '--db-port', '1234']
Args:
parser (argparse.ArgumentParser): The parser to add this item to.
bootstrap (bool): Flag to indicate whether you only want to mark
this item as required or not. |
21,611 | def safe_sort(values, labels=None, na_sentinel=-1, assume_unique=False):
if not is_list_like(values):
raise TypeError("Only list-like objects are allowed to be passed to"
"safe_sort as values")
if not isinstance(values, np.ndarray):
np.putmask(new_labels, mask, na_sentinel)
return ordered, ensure_platform_int(new_labels) | Sort ``values`` and reorder corresponding ``labels``.
``values`` should be unique if ``labels`` is not None.
Safe for use with mixed types (int, str), orders ints before strs.
.. versionadded:: 0.19.0
Parameters
----------
values : list-like
Sequence; must be unique if ``labels`` is not None.
labels : list_like
Indices to ``values``. All out of bound indices are treated as
"not found" and will be masked with ``na_sentinel``.
na_sentinel : int, default -1
Value in ``labels`` to mark "not found".
Ignored when ``labels`` is None.
assume_unique : bool, default False
When True, ``values`` are assumed to be unique, which can speed up
the calculation. Ignored when ``labels`` is None.
Returns
-------
ordered : ndarray
Sorted ``values``
new_labels : ndarray
Reordered ``labels``; returned when ``labels`` is not None.
Raises
------
TypeError
* If ``values`` is not list-like or if ``labels`` is neither None
nor list-like
* If ``values`` cannot be sorted
ValueError
* If ``labels`` is not None and ``values`` contain duplicates. |
21,612 | def set_file_atrificat_of_project(self, doc, symbol, value):
if self.has_package(doc) and self.has_file(doc):
self.file(doc).add_artifact(symbol, value)
else:
raise OrderError() | Sets a file name, uri or home artificat.
Raises OrderError if no package or file defined. |
21,613 | def config_args(self):
self.arg_parser.add_argument(, action=,
version= + str(__version__))
self.arg_parser.add_argument(,
action=, dest = ,
help=_())
self.arg_parser.add_argument(,
action=, dest = ,
help=_())
self.arg_parser.add_argument(,
action=, dest=,
help=_())
self.arg_parser.add_argument(,
action=, dest=, default=,
help=_())
self.arg_parser.add_argument(, ,
action=, dest=,
help=_())
self.arg_parser.add_argument(,
action=, dest=,
help=_(+0500\))
self.arg_parser.add_argument(,
metavar=, nargs=,
help=_())
self.arg_parser.add_argument_group(self.filter_args)
self.arg_parser.add_argument_group(self.output_args)
self.args = self.arg_parser.parse_args() | Set config options |
21,614 | def _rest_request_to_json(self, address, object_path, service_name, requests_config, tags, *args, **kwargs):
response = self._rest_request(address, object_path, service_name, requests_config, tags, *args, **kwargs)
try:
response_json = response.json()
except JSONDecodeError as e:
self.service_check(
service_name,
AgentCheck.CRITICAL,
tags=[ % self._get_url_base(address)] + tags,
message=.format(e),
)
raise
return response_json | Query the given URL and return the JSON response |
21,615 | def main():
global args
args = parse_args()
if not args:
return 1
state = MyState(args)
for path in args.paths:
if os.path.isdir(path):
walk_dir(path, args, state)
else:
safe_process_files(os.path.dirname(path), [os.path.basename(path)], args, state)
if state.should_quit():
break
if state.failed_files:
sys.stderr.write("error: %i/%i AEADs failed\n" % (len(state.failed_files), state.file_count))
return 1
if args.debug:
sys.stderr.write("Successfully processed %i AEADs\n" % (state.file_count)) | Main function when running as a program. |
21,616 | def calc_crc16(buf):
crc_table = [0x0000, 0xc0c1, 0xc181, 0x0140, 0xc301, 0x03c0, 0x0280, 0xc241,
0xc601, 0x06c0, 0x0780, 0xc741, 0x0500, 0xc5c1, 0xc481, 0x0440,
0xcc01, 0x0cc0, 0x0d80, 0xcd41, 0x0f00, 0xcfc1, 0xce81, 0x0e40,
0x0a00, 0xcac1, 0xcb81, 0x0b40, 0xc901, 0x09c0, 0x0880, 0xc841,
0xd801, 0x18c0, 0x1980, 0xd941, 0x1b00, 0xdbc1, 0xda81, 0x1a40,
0x1e00, 0xdec1, 0xdf81, 0x1f40, 0xdd01, 0x1dc0, 0x1c80, 0xdc41,
0x1400, 0xd4c1, 0xd581, 0x1540, 0xd701, 0x17c0, 0x1680, 0xd641,
0xd201, 0x12c0, 0x1380, 0xd341, 0x1100, 0xd1c1, 0xd081, 0x1040,
0xf001, 0x30c0, 0x3180, 0xf141, 0x3300, 0xf3c1, 0xf281, 0x3240,
0x3600, 0xf6c1, 0xf781, 0x3740, 0xf501, 0x35c0, 0x3480, 0xf441,
0x3c00, 0xfcc1, 0xfd81, 0x3d40, 0xff01, 0x3fc0, 0x3e80, 0xfe41,
0xfa01, 0x3ac0, 0x3b80, 0xfb41, 0x3900, 0xf9c1, 0xf881, 0x3840,
0x2800, 0xe8c1, 0xe981, 0x2940, 0xeb01, 0x2bc0, 0x2a80, 0xea41,
0xee01, 0x2ec0, 0x2f80, 0xef41, 0x2d00, 0xedc1, 0xec81, 0x2c40,
0xe401, 0x24c0, 0x2580, 0xe541, 0x2700, 0xe7c1, 0xe681, 0x2640,
0x2200, 0xe2c1, 0xe381, 0x2340, 0xe101, 0x21c0, 0x2080, 0xe041,
0xa001, 0x60c0, 0x6180, 0xa141, 0x6300, 0xa3c1, 0xa281, 0x6240,
0x6600, 0xa6c1, 0xa781, 0x6740, 0xa501, 0x65c0, 0x6480, 0xa441,
0x6c00, 0xacc1, 0xad81, 0x6d40, 0xaf01, 0x6fc0, 0x6e80, 0xae41,
0xaa01, 0x6ac0, 0x6b80, 0xab41, 0x6900, 0xa9c1, 0xa881, 0x6840,
0x7800, 0xb8c1, 0xb981, 0x7940, 0xbb01, 0x7bc0, 0x7a80, 0xba41,
0xbe01, 0x7ec0, 0x7f80, 0xbf41, 0x7d00, 0xbdc1, 0xbc81, 0x7c40,
0xb401, 0x74c0, 0x7580, 0xb541, 0x7700, 0xb7c1, 0xb681, 0x7640,
0x7200, 0xb2c1, 0xb381, 0x7340, 0xb101, 0x71c0, 0x7080, 0xb041,
0x5000, 0x90c1, 0x9181, 0x5140, 0x9301, 0x53c0, 0x5280, 0x9241,
0x9601, 0x56c0, 0x5780, 0x9741, 0x5500, 0x95c1, 0x9481, 0x5440,
0x9c01, 0x5cc0, 0x5d80, 0x9d41, 0x5f00, 0x9fc1, 0x9e81, 0x5e40,
0x5a00, 0x9ac1, 0x9b81, 0x5b40, 0x9901, 0x59c0, 0x5880, 0x9841,
0x8801, 0x48c0, 0x4980, 0x8941, 0x4b00, 0x8bc1, 0x8a81, 0x4a40,
0x4e00, 0x8ec1, 0x8f81, 0x4f40, 0x8d01, 0x4dc0, 0x4c80, 0x8c41,
0x4400, 0x84c1, 0x8581, 0x4540, 0x8701, 0x47c0, 0x4680, 0x8641,
0x8201, 0x42c0, 0x4380, 0x8341, 0x4100, 0x81c1, 0x8081, 0x4040]
crc = 0xffff
for c in buf:
index = (crc ^ ord(c)) & 0xff
crct = crc_table[index]
crc = (crc >> 8) ^ crct
crc = (crc << 8) | (crc >> 8)
crc &= 0x7F7F
return "%04x" % crc | Drop in pure python replacement for ekmcrc.c extension.
Args:
buf (bytes): String or byte array (implicit Python 2.7 cast)
Returns:
str: 16 bit CRC per EKM Omnimeters formatted as hex string. |
21,617 | def flush_template(context, declaration=None, reconstruct=True):
if declaration is None:
declaration = Line(0, )
if {, }.issubset(context.flag):
yield declaration.clone(line=)
context.flag.remove()
context.flag.remove()
if reconstruct:
for i in ensure_buffer(context):
yield i
if declaration.stripped == :
yield declaration | Emit the code needed to flush the buffer.
Will only emit the yield and clear if the buffer is known to be dirty. |
21,618 | def evaluate_with_predictions(data_file, predictions):
expected_version =
with open(data_file) as dataset_file:
dataset_json = json.load(dataset_file)
if dataset_json[] != expected_version:
print( + expected_version +
+ dataset_json[],
file=sys.stderr)
dataset = dataset_json[]
result = _evaluate(dataset, predictions)
return result[] | Evalutate with predictions/ |
21,619 | def permission_required(perm, queryset=None,
login_url=None, raise_exception=False):
def wrapper(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def inner(request, *args, **kwargs):
_kwargs = copy.copy(kwargs)
if queryset:
_kwargs[] = queryset
if in _kwargs:
fn = get_object_from_date_based_view
else:
fn = get_object_from_list_detail_view
if fn.validate(request, *args, **_kwargs):
obj = fn(request, *args, **_kwargs)
else:
obj = None
if not request.user.has_perm(perm, obj=obj):
if raise_exception:
raise PermissionDenied
else:
return redirect_to_login(request, login_url)
return view_func(request, *args, **_kwargs)
return inner
return wrapper | Permission check decorator for function-base generic view
This decorator works as function decorator
Parameters
----------
perm : string
A permission string
queryset : queryset or model
A queryset or model for finding object.
With classbased generic view, ``None`` for using view default queryset.
When the view does not define ``get_queryset``, ``queryset``,
``get_object``, or ``object`` then ``obj=None`` is used to check
permission.
With functional generic view, ``None`` for using passed queryset.
When non queryset was passed then ``obj=None`` is used to check
permission.
Examples
--------
>>> @permission_required('auth.change_user')
>>> def update_auth_user(request, *args, **kwargs):
... pass |
21,620 | def header(heading_text, header_level, style="atx"):
if not isinstance(header_level, int):
raise TypeError("header_level must be int")
if style not in ["atx", "setext"]:
raise ValueError("Invalid style %s (choose or )" % style)
if style == "atx":
if not 1 <= header_level <= 6:
raise ValueError("Invalid level %d for atx" % header_level)
return ("
else:
if not 0 < header_level < 3:
raise ValueError("Invalid level %d for setext" % header_level)
header_character = "=" if header_level == 1 else "-"
return esc_format(heading_text) + ("\n%s" % (header_character * 3)) | Return a header of specified level.
Keyword arguments:
style -- Specifies the header style (default atx).
The "atx" style uses hash signs, and has 6 levels.
The "setext" style uses dashes or equals signs for headers of
levels 1 and 2 respectively, and is limited to those two levels.
Specifying a level outside of the style's range results in a ValueError.
>>> header("Main Title", 1)
'# Main Title'
>>> header("Smaller subtitle", 4)
'#### Smaller subtitle'
>>> header("Setext style", 2, style="setext")
'Setext style\\n---' |
21,621 | def get_alert(self, id, **kwargs):
kwargs[] = True
if kwargs.get():
return self.get_alert_with_http_info(id, **kwargs)
else:
(data) = self.get_alert_with_http_info(id, **kwargs)
return data | Get a specific alert # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_alert(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerAlert
If the method is called asynchronously,
returns the request thread. |
21,622 | def _shutdown_transport(self):
if self.sock is not None:
try:
unwrap = self.sock.unwrap
except AttributeError:
return
try:
self.sock = unwrap()
except ValueError:
pass | Unwrap a Python 2.6 SSL socket, so we can call shutdown() |
21,623 | def format_response_data_type(self, response_data):
if isinstance(response_data, list) and not isinstance(
response_data, str
):
return response_data
int_match_str = "|".join(self.config["response_format"]["int"])
float_match_str = "|".join(self.config["response_format"]["float"])
for item in response_data:
for key in item:
try:
if re.search(int_match_str, key) is not None:
item[key] = helpers.str2num(item[key], "int")
elif re.search(float_match_str, key) is not None:
item[key] = helpers.str2num(item[key], "float")
except ValueError:
continue
return response_data | 格式化返回的值为正确的类型
:param response_data: 返回的数据 |
21,624 | def getclosurevars(func):
if inspect.ismethod(func):
func = func.__func__
elif not inspect.isroutine(func):
raise TypeError(" is not a Python function".format(func))
| Get the mapping of free variables to their current values.
Returns a named tuple of dicts mapping the current nonlocal, global
and builtin references as seen by the body of the function. A final
set of unbound names that could not be resolved is also provided.
Note:
Modified function from the Python 3.5 inspect standard library module
Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; All Rights
Reserved"
See also py-cloud-compute-cannon/NOTICES. |
21,625 | def _partition(episodes):
from ray.rllib.evaluation.sampler import RolloutMetrics
rollouts, estimates = [], []
for e in episodes:
if isinstance(e, RolloutMetrics):
rollouts.append(e)
elif isinstance(e, OffPolicyEstimate):
estimates.append(e)
else:
raise ValueError("Unknown metric type: {}".format(e))
return rollouts, estimates | Divides metrics data into true rollouts vs off-policy estimates. |
21,626 | def qteMakeAppletActive(self, applet: (QtmacsApplet, str)):
if isinstance(applet, str):
appletObj = self.qteGetAppletHandle(applet)
else:
appletObj = applet
if appletObj not in self._qteAppletList:
return False
if self.qteIsMiniApplet(appletObj):
if appletObj is not self._qteMiniApplet:
self.qteLogger.warning()
print(appletObj)
print(self._qteMiniApplet)
return False
if not appletObj.qteIsVisible():
appletObj.show(True)
else:
if not appletObj.qteIsVisible():
self.qteReplaceAppletInLayout(appletObj)
self._qteActiveApplet = appletObj
return True | Make ``applet`` visible and give it the focus.
If ``applet`` is not yet visible it will replace the
currently active applet, otherwise only the focus will shift.
The ``applet`` parameter can either be an instance of
``QtmacsApplet`` or a string denoting an applet ID. In the
latter case the ``qteGetAppletHandle`` method is used to fetch
the respective applet instance.
|Args|
* ``applet`` (**QtmacsApplet**, **str**): the applet to activate.
|Returns|
* **bool**: whether or not an applet was activated.
|Raises|
* **QtmacsArgumentError** if at least one argument has an invalid type. |
21,627 | def delete(
self, resource_group_name, if_match, provisioning_service_name, certificate_name, certificatename=None, certificateraw_bytes=None, certificateis_verified=None, certificatepurpose=None, certificatecreated=None, certificatelast_updated=None, certificatehas_private_key=None, certificatenonce=None, custom_headers=None, raw=False, **operation_config):
url = self.delete.metadata[]
path_format_arguments = {
: self._serialize.url("self.config.subscription_id", self.config.subscription_id, ),
: self._serialize.url("resource_group_name", resource_group_name, ),
: self._serialize.url("provisioning_service_name", provisioning_service_name, ),
: self._serialize.url("certificate_name", certificate_name, )
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
if certificatename is not None:
query_parameters[] = self._serialize.query("certificatename", certificatename, )
if certificateraw_bytes is not None:
query_parameters[] = self._serialize.query("certificateraw_bytes", certificateraw_bytes, )
if certificateis_verified is not None:
query_parameters[] = self._serialize.query("certificateis_verified", certificateis_verified, )
if certificatepurpose is not None:
query_parameters[] = self._serialize.query("certificatepurpose", certificatepurpose, )
if certificatecreated is not None:
query_parameters[] = self._serialize.query("certificatecreated", certificatecreated, )
if certificatelast_updated is not None:
query_parameters[] = self._serialize.query("certificatelast_updated", certificatelast_updated, )
if certificatehas_private_key is not None:
query_parameters[] = self._serialize.query("certificatehas_private_key", certificatehas_private_key, )
if certificatenonce is not None:
query_parameters[] = self._serialize.query("certificatenonce", certificatenonce, )
query_parameters[] = self._serialize.query("self.api_version", self.api_version, )
header_parameters = {}
header_parameters[] =
if self.config.generate_client_request_id:
header_parameters[] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
header_parameters[] = self._serialize.header("if_match", if_match, )
if self.config.accept_language is not None:
header_parameters[] = self._serialize.header("self.config.accept_language", self.config.accept_language, )
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 204]:
raise models.ErrorDetailsException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response | Delete the Provisioning Service Certificate.
Deletes the specified certificate assosciated with the Provisioning
Service.
:param resource_group_name: Resource group identifier.
:type resource_group_name: str
:param if_match: ETag of the certificate
:type if_match: str
:param provisioning_service_name: The name of the provisioning
service.
:type provisioning_service_name: str
:param certificate_name: This is a mandatory field, and is the logical
name of the certificate that the provisioning service will access by.
:type certificate_name: str
:param certificatename: This is optional, and it is the Common Name of
the certificate.
:type certificatename: str
:param certificateraw_bytes: Raw data within the certificate.
:type certificateraw_bytes: bytearray
:param certificateis_verified: Indicates if certificate has been
verified by owner of the private key.
:type certificateis_verified: bool
:param certificatepurpose: A description that mentions the purpose of
the certificate. Possible values include: 'clientAuthentication',
'serverAuthentication'
:type certificatepurpose: str or
~azure.mgmt.iothubprovisioningservices.models.CertificatePurpose
:param certificatecreated: Time the certificate is created.
:type certificatecreated: datetime
:param certificatelast_updated: Time the certificate is last updated.
:type certificatelast_updated: datetime
:param certificatehas_private_key: Indicates if the certificate
contains a private key.
:type certificatehas_private_key: bool
:param certificatenonce: Random number generated to indicate Proof of
Possession.
:type certificatenonce: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorDetailsException<azure.mgmt.iothubprovisioningservices.models.ErrorDetailsException>` |
21,628 | def source_file(pymux, variables):
filename = os.path.expanduser(variables[])
try:
with open(filename, ) as f:
for line in f:
line = line.decode()
handle_command(pymux, line)
except IOError as e:
raise CommandException( % (e, )) | Source configuration file. |
21,629 | def run(self):
def compound_name(id):
if id not in self._model.compounds:
return id
return self._model.compounds[id].properties.get(, id)
def reaction_genes_string(id):
if id not in self._model.reactions:
return
return self._model.reactions[id].properties.get(, )
reaction = self._get_objective()
if not self._mm.has_reaction(reaction):
self.fail(
.format(reaction))
loop_removal = self._get_loop_removal_option()
if loop_removal == :
result = self.run_fba(reaction)
elif loop_removal == :
result = self.run_fba_minimized(reaction)
elif loop_removal == :
result = self.run_tfba(reaction)
optimum = None
total_reactions = 0
nonzero_reactions = 0
for reaction_id, flux in sorted(result):
total_reactions += 1
if abs(flux) > self._args.epsilon:
nonzero_reactions += 1
if abs(flux) > self._args.epsilon or self._args.all_reactions:
rx = self._mm.get_reaction(reaction_id)
rx_trans = rx.translated_compounds(compound_name)
genes = reaction_genes_string(reaction_id)
print(.format(
reaction_id, flux, rx_trans, genes))
if reaction_id == reaction:
optimum = flux
logger.info(.format(optimum))
logger.info(.format(
total_reactions - nonzero_reactions, total_reactions)) | Run flux analysis command. |
21,630 | def Serialize(self, writer):
try:
writer.WriteByte(self.Type)
writer.WriteHashes(self.Hashes)
except Exception as e:
logger.error(f"COULD NOT WRITE INVENTORY HASHES ({self.Type} {self.Hashes}) {e}") | Serialize object.
Raises:
Exception: if hash writing fails.
Args:
writer (neo.IO.BinaryWriter): |
21,631 | def par_relax_AX(i):
global mp_X
global mp_Xnr
global mp_DX
global mp_DXnr
mp_Xnr[mp_grp[i]:mp_grp[i+1]] = mp_X[mp_grp[i]:mp_grp[i+1]]
mp_DXnr[i] = mp_DX[i]
if mp_rlx != 1.0:
grpind = slice(mp_grp[i], mp_grp[i+1])
mp_X[grpind] = mp_rlx * mp_X[grpind] + (1-mp_rlx)*mp_Y1[grpind]
mp_DX[i] = mp_rlx*mp_DX[i] + (1-mp_rlx)*mp_Y0[i] | Parallel implementation of relaxation if option ``RelaxParam`` !=
1.0. |
21,632 | def create_provider_directory(provider, redirect_uri):
dir = CLIENT.directories.create({
: APPLICATION.name + + provider,
: {
: settings.STORMPATH_SOCIAL[provider.upper()][],
: settings.STORMPATH_SOCIAL[provider.upper()][],
: redirect_uri,
: provider,
},
})
APPLICATION.account_store_mappings.create({
: APPLICATION,
: dir,
: 99,
: False,
: False,
}) | Helper function for creating a provider directory |
21,633 | def _log_multivariate_normal_density_tied(X, means, covars):
cv = np.tile(covars, (means.shape[0], 1, 1))
return _log_multivariate_normal_density_full(X, means, cv) | Compute Gaussian log-density at X for a tied model. |
21,634 | def get(self, request, uri):
uri = self.decode_uri(uri)
node = cio.get(uri, lazy=False)
if node.content is None:
raise Http404
return self.render_to_json({
: node.uri,
: node.content
}) | Return published node or specified version.
JSON Response:
{uri: x, content: y} |
21,635 | def sg_mse(tensor, opt):
r
assert opt.target is not None,
out = tf.identity(tf.square(tensor - opt.target), )
tf.sg_summary_loss(out, name=opt.name)
return out | r"""Returns squared error between `tensor` and `target`.
Args:
tensor: A `Tensor`.
opt:
target: A `Tensor` with the same shape and dtype as `tensor`.
name: A `string`. A name to display in the tensor board web UI.
Returns:
A `Tensor` of the same shape and dtype as `tensor`
For example,
```
tensor = [[34, 11, 40], [13, 30, 42]]
target = [[34, 10, 41], [14, 31, 40]]
tensor.sg_mse(target=target) => [[ 0. 1. 1.]
[ 1. 1. 4.]]
``` |
21,636 | def create_channels(chan_name=None, n_chan=None):
if chan_name is not None:
n_chan = len(chan_name)
elif n_chan is not None:
chan_name = _make_chan_name(n_chan)
else:
raise TypeError()
xyz = round(random.randn(n_chan, 3) * 10, decimals=2)
return Channels(chan_name, xyz) | Create instance of Channels with random xyz coordinates
Parameters
----------
chan_name : list of str
names of the channels
n_chan : int
if chan_name is not specified, this defines the number of channels
Returns
-------
instance of Channels
where the location of the channels is random |
21,637 | def relieve_state_machines(self, model, prop_name, info):
if info[] == :
pass
elif info[] == :
self.relieve_model(self.state_machine_manager_model.state_machines[info[][0]])
if self.state_machine_manager_model.state_machines[info[][0]].root_state:
self.relieve_model(self.state_machine_manager_model.state_machines[info[][0]].root_state)
else:
self.logger.warning(NotificationOverview(info)) | The method relieves observed models before those get removed from the list of state_machines hold by
observed StateMachineMangerModel. The method register as observer of observable
StateMachineMangerModel.state_machines. |
21,638 | def comments(self):
if self._comments is None:
self._comments = [c for c in self.grammar.children if c.is_type(TokenType.comment)]
return self._comments | The AST comments. |
21,639 | def log(self, cause=None, do_message=True, custom_msg=None):
message = error_message(self, cause=cause)
exc_type, exc_value, exc_traceback = sys.exc_info()
traceback_formatted = traceback.format_exc()
traceback_val = traceback.extract_tb(exc_traceback)
md5 = hashlib.md5(traceback_formatted.encode()).hexdigest()
if md5 in self._db:
return
if custom_msg is not None and cause is not None:
self.logger.debug( % (custom_msg, cause))
elif custom_msg is not None:
self.logger.debug(custom_msg)
elif cause is not None:
self.logger.debug( % cause)
self.logger.debug(traceback_formatted)
self._db.add(md5) | Loads exception data from the current exception frame - should be called inside the except block
:return: |
21,640 | def get_ip_address():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip_address = s.getsockname()[0]
except socket_error as sockerr:
if sockerr.errno != errno.ENETUNREACH:
raise sockerr
ip_address = socket.gethostbyname(socket.getfqdn())
finally:
s.close()
return ip_address | Simple utility to get host IP address. |
21,641 | def main(argString=None):
args = parseArgs(argString)
checkArgs(args)
logger.info("Options used:")
for key, value in vars(args).iteritems():
logger.info(" --{} {}".format(key.replace("_", "-"), value))
if not os.path.isdir(args.out_dir):
os.mkdir(args.out_dir)
qc_dir = order_qc_dir(args.qc_dir)
merge_required_files(qc_dir, args.out_dir)
copy_initial_files(os.path.join(qc_dir[0], "initial_files.txt"),
args.out_dir)
final_nb_markers, final_nb_samples = get_final_numbers(
os.path.join(qc_dir[-1], "final_files.txt"),
args.out_dir,
)
summary_files = get_summary_files(qc_dir)
generate_report(args.out_dir, summary_files, final_nb_markers,
final_nb_samples, args) | The main function of this module.
:param argString: the options.
:type argString: list of strings |
21,642 | def tasks_from_nids(self, nids):
if not isinstance(nids, collections.abc.Iterable): nids = [nids]
n2task = {task.node_id: task for task in self.iflat_tasks()}
return [n2task[n] for n in nids if n in n2task] | Return the list of tasks associated to the given list of node identifiers (nids).
.. note::
Invalid ids are ignored |
21,643 | def depends_on(self, dependency):
packages = self.package_info()
return [package for package in packages if dependency in package.get("requires", "")] | List of packages that depend on dependency
:param dependency: package name, e.g. 'vext' or 'Pillow' |
21,644 | def addresses_from_address_families(address_mapper, specs):
snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs))
dirnames = {dirname(f) for f in snapshot.files}
address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames]
address_family_by_directory = {af.namespace: af for af in address_families}
matched_addresses = OrderedSet()
for spec in specs:
try:
addr_families_for_spec = spec.matching_address_families(address_family_by_directory)
except Spec.AddressFamilyResolutionError as e:
raise raise_from(ResolveError(e), e)
try:
all_addr_tgt_pairs = spec.address_target_pairs_from_address_families(addr_families_for_spec)
except Spec.AddressResolutionError as e:
raise raise_from(AddressLookupError(e), e)
except SingleAddress._SingleAddressResolutionError as e:
_raise_did_you_mean(e.single_address_family, e.name, source=e)
matched_addresses.update(
addr for (addr, tgt) in all_addr_tgt_pairs
if specs.matcher.matches_target_address_pair(addr, tgt)
)
yield BuildFileAddresses(tuple(matched_addresses)) | Given an AddressMapper and list of Specs, return matching BuildFileAddresses.
:raises: :class:`ResolveError` if:
- there were no matching AddressFamilies, or
- the Spec matches no addresses for SingleAddresses.
:raises: :class:`AddressLookupError` if no targets are matched for non-SingleAddress specs. |
21,645 | def _matcher(self, other):
if isinstance(other, CGRContainer):
return GraphMatcher(other, self, lambda x, y: y == x, lambda x, y: y == x)
elif isinstance(other, QueryCGRContainer):
return GraphMatcher(other, self, lambda x, y: x == y, lambda x, y: x == y)
raise TypeError() | QueryCGRContainer < CGRContainer
QueryContainer < QueryCGRContainer[more general] |
21,646 | def decode(self, bytes, raw=False):
return struct.unpack(self.format, buffer(bytes))[0] | decode(bytearray, raw=False) -> value
Decodes the given bytearray according to this PrimitiveType
definition.
NOTE: The parameter ``raw`` is present to adhere to the
``decode()`` inteface, but has no effect for PrimitiveType
definitions. |
21,647 | def get_seq(self,obj,default=None):
if is_sequence(obj):
return obj
if is_number(obj): return [obj]
if obj is None and default is not None:
log.warning(%(default))
return self.get_seq(default)
raise ValueError(%(type(obj))) | Return sequence. |
21,648 | def obfn_f(self, X=None):
r
if X is None:
X = self.X
return 0.5 * np.linalg.norm((self.D.dot(X) - self.S).ravel())**2 | r"""Compute data fidelity term :math:`(1/2) \| D \mathbf{x} -
\mathbf{s} \|_2^2`. |
21,649 | def get_new_author(self, api_author):
return Author(site_id=self.site_id,
wp_id=api_author["ID"],
**self.api_object_data("author", api_author)) | Instantiate a new Author from api data.
:param api_author: the api data for the Author
:return: the new Author |
21,650 | def can_create(self):
if (
self.data.get()
and self.data.get()
and self.data.get()
):
return True
return False | If the key_name, value_name, and value_type has been provided returns that the
Registry Key can be created, otherwise returns that the Registry Key cannot be created.
Returns: |
21,651 | def _aligned_series(*many_series):
head = many_series[0]
tail = many_series[1:]
n = len(head)
if (isinstance(head, np.ndarray) and
all(len(s) == n and isinstance(s, np.ndarray) for s in tail)):
return many_series
return (
v
for _, v in iteritems(pd.concat(map(_to_pandas, many_series), axis=1))
) | Return a new list of series containing the data in the input series, but
with their indices aligned. NaNs will be filled in for missing values.
Parameters
----------
*many_series
The series to align.
Returns
-------
aligned_series : iterable[array-like]
A new list of series containing the data in the input series, but
with their indices aligned. NaNs will be filled in for missing values. |
21,652 | def key_from_protobuf(pb):
path_args = []
for element in pb.path:
path_args.append(element.kind)
if element.id:
path_args.append(element.id)
if element.name:
path_args.append(element.name)
project = None
if pb.partition_id.project_id:
project = pb.partition_id.project_id
namespace = None
if pb.partition_id.namespace_id:
namespace = pb.partition_id.namespace_id
return Key(*path_args, namespace=namespace, project=project) | Factory method for creating a key based on a protobuf.
The protobuf should be one returned from the Cloud Datastore
Protobuf API.
:type pb: :class:`.entity_pb2.Key`
:param pb: The Protobuf representing the key.
:rtype: :class:`google.cloud.datastore.key.Key`
:returns: a new `Key` instance |
21,653 | def root():
return {
"message": "Welcome to the SIP Master Controller (flask variant)",
"_links": {
"items": [
{
"Link": "Health",
"href": "{}health".format(request.url)
},
{
"Link": "Version",
"href": "{}version".format(request.url)
},
{
"Link": "Allowed target states",
"href": "{}allowed_target_sdp_states".format(request.url)
},
{
"Link": "SDP state",
"href": "{}state".format(request.url)
},
{
"Link": "SDP target state",
"href": "{}state/target".format(request.url)
},
{
"Link": "SDP target state",
"href": "{}target_state".format(request.url)
},
{
"Link": "SDP current state",
"href": "{}state/current".format(request.url)
},
{
"Link": "Scheduling Block Instances",
"href": "{}scheduling_block_instances".format(request.url)
},
{
"Link": "Processing Blocks",
"href": "{}processing_blocks".format(request.url)
},
{
"Link": "Resource Availability",
"href": "{}resource_availability".format(request.url)
},
{
"Link": "Configure SBI",
"href": "{}configure_sbi".format(request.url)
}
]
}
} | Home page. |
21,654 | def security_rules_list(security_group, resource_group, **kwargs):
netconn = __utils__[](, **kwargs)
try:
secrules = netconn.security_rules.list(
network_security_group_name=security_group,
resource_group_name=resource_group
)
result = __utils__[](secrules)
except CloudError as exc:
__utils__[](, str(exc), **kwargs)
result = {: str(exc)}
return result | .. versionadded:: 2019.2.0
List security rules within a network security group.
:param security_group: The network security group to query.
:param resource_group: The resource group name assigned to the
network security group.
CLI Example:
.. code-block:: bash
salt-call azurearm_network.security_rules_list testnsg testgroup |
21,655 | def validate_args(**args):
if not args[]:
print("\nMissing required query argument.")
sys.exit()
for key in DEFAULTS:
if key not in args:
args[key] = DEFAULTS[key]
return args | function to check if input query is not None
and set missing arguments to default value |
21,656 | def delete_types(self, base_key, out_key, *types):
self.params[ % (base_key, out_key)] = self.delete_types_s(
self.params[base_key], types) | Method to delete a parameter from a parameter documentation.
This method deletes the given `param` from the `base_key` item in the
:attr:`params` dictionary and creates a new item with the original
documentation without the description of the param. This method works
for ``'Results'`` like sections.
See the :meth:`keep_types` method for an example.
Parameters
----------
base_key: str
key in the :attr:`params` dictionary
out_key: str
Extension for the base key (the final key will be like
``'%s.%s' % (base_key, out_key)``
``*types``
str. The type identifier of which the documentations shall deleted
See Also
--------
delete_params |
21,657 | def serializeCorpus(self):
corpus_details = [{
"model": "django-tethne.corpus",
"pk": self.corpus_id,
"fields": {
"source": self.source,
"date_created":strftime("%Y-%m-%d %H:%M:%S", gmtime()),
"length" : len(self.corpus),
}
}]
return corpus_details | This method creates a fixture for the "django-tethne_corpus" model.
Returns
-------
corpus_details in JSON format which can written to a file. |
21,658 | def Expand(self):
ret = libxml2mod.xmlTextReaderExpand(self._o)
if ret is None:raise treeError()
__tmp = xmlNode(_obj=ret)
return __tmp | Reads the contents of the current node and the full
subtree. It then makes the subtree available until the next
xmlTextReaderRead() call |
21,659 | def path_glob(pattern, current_dir=None):
if not current_dir:
current_dir = pathlib.Path.cwd()
elif not isinstance(current_dir, pathlib.Path):
current_dir = pathlib.Path(str(current_dir))
for p in current_dir.glob(pattern):
yield Path(str(p)) | Use pathlib for ant-like patterns, like: "**/*.py"
:param pattern: File/directory pattern to use (as string).
:param current_dir: Current working directory (as Path, pathlib.Path, str)
:return Resolved Path (as path.Path). |
21,660 | def on_created(self, event):
logger.debug("file created: %s", event.src_path)
if not event.is_directory:
self.update_file(event.src_path) | on_created handler |
21,661 | def delete_topic(self, topic_name, fail_not_exist=False):
try:
return self.mgmt_client.delete_topic(topic_name, fail_not_exist=fail_not_exist)
except requests.exceptions.ConnectionError as e:
raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e)
except azure.common.AzureMissingResourceHttpError as e:
raise ServiceBusResourceNotFound("Specificed queue does not exist.", e) | Delete a topic entity.
:param topic_name: The name of the topic to delete.
:type topic_name: str
:param fail_not_exist: Whether to raise an exception if the named topic is not
found. If set to True, a ServiceBusResourceNotFound will be raised.
Default value is False.
:type fail_not_exist: bool
:raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namesapce is not found.
:raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the topic is not found
and `fail_not_exist` is set to True. |
21,662 | def render_layout(self, form, context):
form.rendered_fields = []
html = self.layout.render(form, self.form_style, context)
for field in form.fields.keys():
if not field in form.rendered_fields:
html += render_field(field, form, self.form_style, context)
return mark_safe(html) | Returns safe html of the rendering of the layout |
21,663 | def make_index_lookup(list_, dict_factory=dict):
r
return dict_factory(zip(list_, range(len(list_)))) | r"""
Args:
list_ (list): assumed to have unique items
Returns:
dict: mapping from item to index
CommandLine:
python -m utool.util_list --exec-make_index_lookup
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_list import * # NOQA
>>> import utool as ut
>>> list_ = [5, 3, 8, 2]
>>> idx2_item = ut.make_index_lookup(list_)
>>> result = ut.repr2(idx2_item, nl=False)
>>> assert ut.dict_take(idx2_item, list_) == list(range(len(list_)))
>>> print(result)
{2: 3, 3: 1, 5: 0, 8: 2} |
21,664 | def primary_key(self, hkey, rkey=None):
if isinstance(hkey, dict):
def decode(val):
if isinstance(val, Decimal):
return float(val)
return val
pkey = {self.hash_key.name: decode(hkey[self.hash_key.name])}
if self.range_key is not None:
pkey[self.range_key.name] = decode(hkey[self.range_key.name])
return pkey
else:
pkey = {self.hash_key.name: hkey}
if self.range_key is not None:
if rkey is None:
raise ValueError("Range key is missing!")
pkey[self.range_key.name] = rkey
return pkey | Construct a primary key dictionary
You can either pass in a (hash_key[, range_key]) as the arguments, or
you may pass in an Item itself |
21,665 | def build_highlight_objects(html, highlights, uniformize_html=True):
if uniformize_html:
try:
html = uniform_html(html.encode()).decode()
except Exception, exc:
logger.info(,
len(html), exc, exc_info=True)
html = None
highlight_objects = []
for category, phrase_scores in highlights.iteritems():
for (phrase, score) in phrase_scores:
hl = dict(
score=score,
category=category,
)
ranges = make_xpath_ranges(html, phrase)
if ranges:
hl[] = [{: r} for r in ranges]
elif phrase in html:
hl[] = [phrase]
else:
hl[] = [{
: phrase,
: ,
}]
highlight_objects.append(hl)
return highlight_objects | converts a dict of pretty_name --> [tuple(string, score), ...] to
`Highlight` objects as specified above. |
21,666 | def interface_list(env, securitygroup_id, sortby):
mgr = SoftLayer.NetworkManager(env.client)
table = formatting.Table(COLUMNS)
table.sortby = sortby
mask = (
)
secgroup = mgr.get_securitygroup(securitygroup_id, mask=mask)
for binding in secgroup.get(, []):
interface_id = binding[]
try:
interface = binding[]
vsi = interface[]
vsi_id = vsi[]
hostname = vsi[]
priv_pub = if interface[] == 0 else
ip_address = (vsi[]
if interface[] == 0
else vsi[])
except KeyError:
vsi_id = "N/A"
hostname = "Not enough permission to view"
priv_pub = "N/A"
ip_address = "N/A"
table.add_row([
interface_id,
vsi_id,
hostname,
priv_pub,
ip_address
])
env.fout(table) | List interfaces associated with security groups. |
21,667 | def get_trial_info(current_trial):
if current_trial.end_time and ("_" in current_trial.end_time):
time_obj = datetime.datetime.strptime(current_trial.end_time,
"%Y-%m-%d_%H-%M-%S")
end_time = time_obj.strftime("%Y-%m-%d %H:%M:%S")
else:
end_time = current_trial.end_time
if current_trial.metrics:
metrics = eval(current_trial.metrics)
else:
metrics = None
trial_info = {
"trial_id": current_trial.trial_id,
"job_id": current_trial.job_id,
"trial_status": current_trial.trial_status,
"start_time": current_trial.start_time,
"end_time": end_time,
"params": eval(current_trial.params.encode("utf-8")),
"metrics": metrics
}
return trial_info | Get job information for current trial. |
21,668 | def hkdf_extract(salt, input_key_material, hash=hashlib.sha512):
s block size will be used instead per the RFC.
See the HKDF draft RFC and paper for usage notes.
'
hash_len = hash().digest_size
if salt == None or len(salt) == 0:
salt = bytearray((0,) * hash_len)
return hmac.new(bytes(salt), buffer(input_key_material), hash).digest() | Extract a pseudorandom key suitable for use with hkdf_expand
from the input_key_material and a salt using HMAC with the
provided hash (default SHA-512).
salt should be a random, application-specific byte string. If
salt is None or the empty string, an all-zeros string of the same
length as the hash's block size will be used instead per the RFC.
See the HKDF draft RFC and paper for usage notes. |
21,669 | def read(message):
require_compatible_version(message.physt_compatible)
a_dict = _dict_from_v0342(message)
return create_from_dict(a_dict, "Message") | Convert a parsed protobuf message into a histogram. |
21,670 | def set_speed(self, aspirate=None, dispense=None):
if aspirate:
self.speeds[] = aspirate
if dispense:
self.speeds[] = dispense
return self | Set the speed (mm/second) the :any:`Pipette` plunger will move
during :meth:`aspirate` and :meth:`dispense`
Parameters
----------
aspirate: int
The speed in millimeters-per-second, at which the plunger will
move while performing an aspirate
dispense: int
The speed in millimeters-per-second, at which the plunger will
move while performing an dispense |
21,671 | def WriteEventBody(self, event):
output_values = self._GetOutputValues(event)
output_values[3] = self._output_mediator.GetMACBRepresentation(event)
output_values[6] = event.timestamp_desc or
self._WriteOutputValues(output_values) | Writes the body of an event object to the output.
Args:
event (EventObject): event.
Raises:
NoFormatterFound: If no event formatter can be found to match the data
type in the event object. |
21,672 | def find_censored_md5ext(post_id: int) -> Optional[str]:
"Find MD5 for a censored postt find."
try:
last_pull_date = LAST_PULL_DATE_FILE.read_text().strip()
except FileNotFoundError:
last_pull_date = ""
date = datetime.utcnow()
date = f"{date.year}{date.month}{date.day}"
if last_pull_date != date:
update_batches()
LAST_PULL_DATE_FILE.parent.mkdir(exist_ok=True, parents=True)
LAST_PULL_DATE_FILE.write_text(date)
post_id = str(post_id)
for batch in BATCHES_DIR.iterdir():
with open(batch, "r") as content:
for line in content:
an_id, its_md5_ext = line.split(":")
if post_id == an_id:
return its_md5_ext.rstrip().split(".")
return None | Find MD5 for a censored post's ID, return None if can't find. |
21,673 | def get(self, user_id, lang=):
assert lang in (, , ),
return self._get(
,
params={
: user_id,
: lang
}
) | 获取用户基本信息(包括UnionID机制)
详情请参考
https://mp.weixin.qq.com/wiki?t=resource/res_main&id=mp1421140839
:param user_id: 普通用户的标识,对当前公众号唯一
:param lang: 返回国家地区语言版本,zh_CN 简体,zh_TW 繁体,en 英语
:return: 返回的 JSON 数据包
使用示例::
from wechatpy import WeChatClient
client = WeChatClient('appid', 'secret')
user = client.user.get('openid') |
21,674 | def dryRun(self, func, *args, **kwargs):
print >> self.out, \
self.formatterDict.get(func, self.defaultFormatter)(func, *args, **kwargs) | Instead of running function with `*args` and `**kwargs`, just print
out the function call. |
21,675 | def path_without_suffix(self):
if self.suffix:
return self.path[:-len(.join(self.suffix))]
return self.path | The relative path to asset without suffix.
Example::
>>> attrs = AssetAttributes(environment, 'js/app.js')
>>> attrs.path_without_suffix
'js/app' |
21,676 | def get_day_start_ut_span(self):
cur = self.conn.cursor()
first_day_start_ut, last_day_start_ut = \
cur.execute("SELECT min(day_start_ut), max(day_start_ut) FROM days;").fetchone()
return first_day_start_ut, last_day_start_ut | Return the first and last day_start_ut
Returns
-------
first_day_start_ut: int
last_day_start_ut: int |
21,677 | def update(pkg, slot=None, fromrepo=None, refresh=False, binhost=None, **kwargs):
s control group. This is done to keep systemd
from killing any emerge commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
<salt.modules.config.get>` called ``systemd.scope``, with a value of
``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Updates the passed package (emerge --update package)
slot
Restrict the update to a particular slot. It will update to the
latest version within the slot.
fromrepo
Restrict the update to a particular repository. It will update to the
latest version within the repository.
binhost
has two options try and force.
try - tells emerge to try and install the package from a configured binhost.
force - forces emerge to install the package from a binhost otherwise it fails out.
Return a dict containing the new package names and versions::
{: {: ,
: }}
CLI Example:
.. code-block:: bash
salt pkg.update <package name>
{0}:{1}{0}::{1}try-gforce-Gconfig.getsystemd.scopesystemd-run--scopeemerge--askn--quiet--update--newuse--oneshotcmd.run_alltraceretcodestdoutstderrpkg.list_pkgsProblem encountered updating package(s)needed_changeschanges': ret}
)
return ret | .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any emerge commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
<salt.modules.config.get>` called ``systemd.scope``, with a value of
``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Updates the passed package (emerge --update package)
slot
Restrict the update to a particular slot. It will update to the
latest version within the slot.
fromrepo
Restrict the update to a particular repository. It will update to the
latest version within the repository.
binhost
has two options try and force.
try - tells emerge to try and install the package from a configured binhost.
force - forces emerge to install the package from a binhost otherwise it fails out.
Return a dict containing the new package names and versions::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.update <package name> |
21,678 | def get_field_analysis(self, field):
class_analysis = self.get_class_analysis(field.get_class_name())
if class_analysis:
return class_analysis.get_field_analysis(field)
return None | Get the FieldAnalysis for a given fieldname
:param field: TODO
:return: :class:`FieldClassAnalysis` |
21,679 | def pause(self):
for client in self._snippet_clients.values():
self._device.log.debug(
,
client.host_port, client.package)
client.clear_host_port() | Pauses all the snippet clients under management.
This clears the host port of a client because a new port will be
allocated in `resume`. |
21,680 | def update(self, *args, **kwargs):
super(TAG_Compound, self).update(*args, **kwargs)
for key, item in self.items():
if item.name is None:
item.name = key | See `__setitem__`. |
21,681 | def get_server_build_info(self):
if self.is_online():
try:
return self.get_mongo_client().server_info()
except OperationFailure, ofe:
log_exception(ofe)
if "there are no users authenticated" in str(ofe):
admin_db = self.get_db("admin", no_auth=False)
return admin_db.command("buildinfo")
except Exception, e:
log_exception(e)
return None | issues a buildinfo command |
21,682 | def scale_sfs(s):
k = np.arange(s.size)
out = s * k
return out | Scale a site frequency spectrum.
Parameters
----------
s : array_like, int, shape (n_chromosomes,)
Site frequency spectrum.
Returns
-------
sfs_scaled : ndarray, int, shape (n_chromosomes,)
Scaled site frequency spectrum. |
21,683 | def parse_qualifier(parser, event, node):
name = _get_required_attribute(node, )
cim_type = _get_required_attribute(node, )
propagated = _get_attribute(node, )
(next_event, next_node) = six.next(parser)
if _is_end(next_event, next_node, ):
return CIMQualifier(name, None, type=cim_type)
if _is_start(next_event, next_node, ):
value = parse_value(parser, next_event, next_node)
elif _is_start(next_event, next_node, ):
value = parse_value_array(parser, next_event, next_node)
else:
raise ParseError()
result = CIMQualifier(name, tocimobj(cim_type, value))
_get_end_event(parser, )
return result | Parse CIM/XML QUALIFIER element and return CIMQualifier |
21,684 | def configure_db(self, hostname, database, username, admin=False):
self.connect(password=self.get_mysql_root_password())
if not self.database_exists(database):
self.create_database(database)
remote_ip = self.normalize_address(hostname)
password = self.get_mysql_password(username)
if not self.grant_exists(database, username, remote_ip):
if not admin:
self.create_grant(database, username, remote_ip, password)
else:
self.create_admin_grant(username, remote_ip, password)
self.flush_priviledges()
return password | Configure access to database for username from hostname. |
21,685 | def _load_torrents_directory(self):
r = self._req_lixian_get_id(torrent=True)
self._downloads_directory = self._load_directory(r[]) | Load torrents directory
If it does not exist yet, this request will cause the system to create
one |
21,686 | def add(self, arg1, arg2=None, arg3=None, bucket_type=None):
mr = RiakMapReduce(self)
return mr.add(arg1, arg2, arg3, bucket_type) | Start assembling a Map/Reduce operation. A shortcut for
:func:`RiakMapReduce.add`.
:param arg1: the object or bucket to add
:type arg1: RiakObject, string
:param arg2: a key or list of keys to add (if a bucket is
given in arg1)
:type arg2: string, list, None
:param arg3: key data for this input (must be convertible to JSON)
:type arg3: string, list, dict, None
:param bucket_type: Optional name of a bucket type
:type bucket_type: string, None
:rtype: :class:`RiakMapReduce` |
21,687 | def update_layers(self):
signals.post_save.disconnect(layer_post_save, sender=Layer)
try:
LOGGER.debug( % self.id)
if self.type == :
update_layers_wms(self)
elif self.type == :
update_layers_wmts(self)
elif self.type == :
update_layers_esri_mapserver(self)
elif self.type == :
update_layers_esri_imageserver(self)
elif self.type == :
update_layers_wm_legacy(self)
elif self.type == :
update_layers_geonode_wm(self)
elif self.type == :
update_layers_warper(self)
except:
LOGGER.error( % self.uuid)
signals.post_save.connect(layer_post_save, sender=Layer) | Update layers for a service. |
21,688 | def _extract_timeseries_list(tsvol, roivol, maskvol=None, roi_values=None, zeroe=True):
_check_for_partition(tsvol, roivol, maskvol)
if roi_values is None:
roi_values = get_unique_nonzeros(roivol)
ts_list = []
for r in roi_values:
ts = _partition_data(tsvol, roivol, r, maskvol, zeroe)
if len(ts) == 0:
ts = np.zeros(tsvol.shape[-1])
ts_list.append(ts)
return ts_list | Partition the timeseries in tsvol according to the ROIs in roivol.
If a mask is given, will use it to exclude any voxel outside of it.
Parameters
----------
tsvol: numpy.ndarray
4D timeseries volume or a 3D volume to be partitioned
roivol: numpy.ndarray
3D ROIs volume
maskvol: numpy.ndarray
3D mask volume
zeroe: bool
If true will remove the null timeseries voxels. Only applied to timeseries (4D) data.
roi_values: list of ROI values (int?)
List of the values of the ROIs to indicate the
order and which ROIs will be processed.
Returns
-------
ts_list: list
A list with the timeseries arrays as items |
21,689 | def progress(self) -> List[bool]:
return [True if p == "W" else False for p in self._data[MiniSeriesData].progress if p != "N"] | A list of True/False for the number of games the played in the mini series indicating if the player won or lost. |
21,690 | def com_google_fonts_check_metadata_nameid_family_name(ttFont, font_metadata):
from fontbakery.utils import get_name_entry_strings
familynames = get_name_entry_strings(ttFont, NameID.TYPOGRAPHIC_FAMILY_NAME)
if not familynames:
familynames = get_name_entry_strings(ttFont, NameID.FONT_FAMILY_NAME)
if len(familynames) == 0:
yield FAIL, Message("missing",
("This font lacks a FONT_FAMILY_NAME entry"
" (nameID={}) in the name"
" table.").format(NameID.FONT_FAMILY_NAME))
else:
if font_metadata.name not in familynames:
yield FAIL, Message("mismatch",
("Unmatched family name in font:"
" TTF has \"{}\" while METADATA.pb"
" has \"{}\"").format(familynames[0],
font_metadata.name))
else:
yield PASS, ("Family name \"{}\" is identical"
" in METADATA.pb and on the"
" TTF file.").format(font_metadata.name) | Checks METADATA.pb font.name field matches
family name declared on the name table. |
21,691 | def check_format(self, full_check=True):
check_call(_LIB.MXNDArraySyncCheckFormat(self.handle, ctypes.c_bool(full_check))) | Check whether the NDArray format is valid.
Parameters
----------
full_check : bool, optional
If `True`, rigorous check, O(N) operations. Otherwise
basic check, O(1) operations (default True). |
21,692 | def offering(self):
warnings.warn(
"Offering is no longer a supported property of Locator. Please use the course and run properties.",
DeprecationWarning,
stacklevel=2
)
if not self.course and not self.run:
return None
elif not self.run and self.course:
return self.course
return "/".join([self.course, self.run]) | Deprecated. Use course and run independently. |
21,693 | def replace_zeros(self, val, zero_thresh=0.0):
new_data = self.data.copy()
new_data[new_data <= zero_thresh] = val
return type(self)(new_data.astype(self.data.dtype), frame=self._frame) | Replaces all zeros in the image with a specified value
Returns
-------
image dtype
value to replace zeros with |
21,694 | def nvmlUnitGetUnitInfo(unit):
r
c_info = c_nvmlUnitInfo_t()
fn = _nvmlGetFunctionPointer("nvmlUnitGetUnitInfo")
ret = fn(unit, byref(c_info))
_nvmlCheckReturn(ret)
return bytes_to_str(c_info) | r"""
/**
* Retrieves the static information associated with a unit.
*
* For S-class products.
*
* See \ref nvmlUnitInfo_t for details on available unit info.
*
* @param unit The identifier of the target unit
* @param info Reference in which to return the unit information
*
* @return
* - \ref NVML_SUCCESS if \a info has been populated
* - \ref NVML_ERROR_UNINITIALIZED if the library has not been successfully initialized
* - \ref NVML_ERROR_INVALID_ARGUMENT if \a unit is invalid or \a info is NULL
*/
nvmlReturn_t DECLDIR nvmlUnitGetUnitInfo |
21,695 | def initialize(self):
self.Base.metadata.create_all(self.session.bind)
logger.debug("initialized sqlalchemy orm tables") | Instantiates the cache area to be ready for updates |
21,696 | def extract_header_comment_key_value_tuples_from_file(file_descriptor):
file_data = file_descriptor.read()
findall_result = re.findall(HEADER_COMMENT_KEY_VALUE_TUPLES_REGEX, file_data, re.MULTILINE | re.DOTALL)
returned_list = []
for header_comment, _ignored, raw_comments, key, value in findall_result:
comments = re.findall("/\* (.*?) \*/", raw_comments)
if len(comments) == 0:
comments = [u""]
returned_list.append((header_comment, comments, key, value))
return returned_list | Extracts tuples representing comments and localization entries from strings file.
Args:
file_descriptor (file): The file to read the tuples from
Returns:
list : List of tuples representing the headers and localization entries. |
21,697 | def newton_solver_comp(f, x0, lb, ub, infos=False, backsteps=10, maxit=50, numdiff=False):
from numpy import row_stack
ind = x0.shape[0] - 1
def fun_lc(xx):
x = row_stack([xx, lb])
res = f(x)
return res[:ind,:]
def fun_uc(xx):
x = row_stack([xx, ub])
res = f(x)
return res[:ind,:]
[sol_nc, nit0] = newton_solver(f, x0, numdiff=True, infos=True)
lower_constrained = sol_nc[ind,:] < lb
upper_constrained = sol_nc[ind,:] > ub
not_constrained = - ( lower_constrained + upper_constrained )
sol = sol_nc.copy()
[sol_lc, nit1] = newton_solver(fun_lc, x0[:-1,:], numdiff=True, infos=True)
[sol_uc, nit2] = newton_solver(fun_uc, x0[:-1,:], numdiff=True, infos=True)
nit = nit0 + nit1 + nit2
sol_lc = row_stack([sol_lc, lb])
sol_uc = row_stack([sol_uc, ub])
lower_constrained = sol_nc[-1,:] < lb
upper_constrained = sol_nc[-1,:] > ub
not_constrained = - ( lower_constrained + upper_constrained )
sol = sol_lc * lower_constrained + sol_uc * upper_constrained + sol_nc * not_constrained
return [sol,nit] | Solves many independent systems f(x)=0 simultaneously using a simple gradient descent.
:param f: objective function to be solved with values p x N . The second output argument represents the derivative with
values in (p x p x N)
:param x0: initial value ( p x N )
:param lb: bounds for first variable
:param ub: bounds for second variable
:return: solution x such that f(x) = 0 |
21,698 | def run_function(app_function, event, context):
if hasattr(inspect, "getfullargspec"):
args, varargs, keywords, defaults, _, _, _ = inspect.getfullargspec(app_function)
else:
args, varargs, keywords, defaults = inspect.getargspec(app_function)
num_args = len(args)
if num_args == 0:
result = app_function(event, context) if varargs else app_function()
elif num_args == 1:
result = app_function(event, context) if varargs else app_function(event)
elif num_args == 2:
result = app_function(event, context)
else:
raise RuntimeError("Function signature is invalid. Expected a function that accepts at most "
"2 arguments or varargs.")
return result | Given a function and event context,
detect signature and execute, returning any result. |
21,699 | def symbol_scores(self, symbol):
scores = []
path = []
def fixup(module, variable):
prefix = module.split()
if variable is not None:
prefix.append(variable)
seeking = symbol.split()
new_module = []
while prefix and seeking[0] != prefix[0]:
new_module.append(prefix.pop(0))
if new_module:
module, variable = .join(new_module), prefix[0]
else:
variable = None
return module, variable
def score_walk(scope, scale):
sub_path, score = self._score_key(scope, full_key)
if score > 0.1:
try:
i = sub_path.index(None)
sub_path, from_symbol = sub_path[:i], .join(sub_path[i + 1:])
except ValueError:
from_symbol = None
package_path = .join(path + sub_path)
package_path, from_symbol = fixup(package_path, from_symbol)
scores.append((score * scale, package_path, from_symbol))
for key, subscope in scope._tree.items():
if type(subscope) is not float:
path.append(key)
score_walk(subscope, subscope.score * scale - 0.1)
path.pop()
full_key = symbol.split()
score_walk(self, 1.0)
scores.sort(reverse=True)
return scores | Find matches for symbol.
:param symbol: A . separated symbol. eg. 'os.path.basename'
:returns: A list of tuples of (score, package, reference|None),
ordered by score from highest to lowest. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.