Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
21,200 | def fromJSON(value):
j = json.loads(value)
v = GPLong()
if "defaultValue" in j:
v.value = j[]
else:
v.value = j[]
if in j:
v.paramName = j[]
elif in j:
v.paramName = j[]
return v | loads the GP object from a JSON string |
21,201 | def awscli_defaults(os_type=None):
try:
if os_type is None:
os_type = platform.system()
if os_type == :
HOME = os.environ[]
awscli_credentials = HOME +
awscli_config = HOME +
elif os_type == :
username = os.getenv()
awscli_credentials = + username +
awscli_config = + username +
elif os_type == :
logger.warning()
HOME = os.environ[]
awscli_credentials = HOME +
awscli_config = HOME +
alt_credentials = os.getenv()
except OSError as e:
logger.exception(
%
(inspect.stack()[0][3], str(e))
)
raise e
return {
: {
: awscli_credentials,
: awscli_config,
: alt_credentials
}
} | Summary:
Parse, update local awscli config credentials
Args:
:user (str): USERNAME, only required when run on windows os
Returns:
TYPE: dict object containing key, value pairs describing
os information |
21,202 | def p_object_literal(self, p):
if len(p) == 3:
p[0] = self.asttypes.Object()
else:
p[0] = self.asttypes.Object(properties=p[2])
p[0].setpos(p) | object_literal : LBRACE RBRACE
| LBRACE property_list RBRACE
| LBRACE property_list COMMA RBRACE |
21,203 | def register_message(self, message):
self._check_id_and_symbol_consistency(message.msgid, message.symbol)
self._check_symbol(message.msgid, message.symbol)
self._check_msgid(message.msgid, message.symbol)
for old_name in message.old_names:
self._check_symbol(message.msgid, old_name[1])
self._messages_definitions[message.symbol] = message
self._register_alternative_name(message, message.msgid, message.symbol)
for old_id, old_symbol in message.old_names:
self._register_alternative_name(message, old_id, old_symbol)
self._msgs_by_category[message.msgid[0]].append(message.msgid) | Register a MessageDefinition with consistency in mind.
:param MessageDefinition message: The message definition being added. |
21,204 | def compile_validation_pattern(self, units=None):
if units is None:
units = list(self.POSSIBLE_UNITS)
else:
for u in units:
if u not in self.POSSIBLE_UNITS:
raise ValidationError(.format(u))
regex = re.compile(r.format(.join(units)))
endings = ( % ugettext("or")).join("" % u.replace(, ) for u in units)
params = {: , : , : , : endings}
return regex, self.invalid_message % params | Assure that passed in units are valid size units, or if missing, use all possible units.
Return a tuple with a regular expression to be used for validating and an error message
in case this validation failed. |
21,205 | def get_memory(self,
shutit_pexpect_child=None,
note=None):
shutit_global.shutit_global_object.yield_to_draw()
shutit_pexpect_child = shutit_pexpect_child or self.get_current_shutit_pexpect_session().pexpect_child
shutit_pexpect_session = self.get_shutit_pexpect_session_from_child(shutit_pexpect_child)
return shutit_pexpect_session.get_memory(note=note) | Returns memory available for use in k as an int |
21,206 | def _rm_get_reference_coords_from_header(parts):
s = int(parts[5])
e = int(parts[6]) + 1
if (s >= e):
raise AlignmentIteratorError("invalid repeatmakser header: " +
" ".join(parts))
return (s, e) | extract the reference (genomic sequence match) coordinates of a repeat
occurrence from a repeatmakser header line. An example header line is::
239 29.42 1.92 0.97 chr1 11 17 (41) C XX#YY (74) 104 1 m_b1s502i1 4
the genomic start and end are always at positions 5 and 6 resepctively. In
the repeatmasker format, the end is inclusive, but in pyokit end coordinates
are exclusive, so we adjust it when we parse here.
:param parts: the header line, as a tokenized list.
:return: tuple of (start, end) |
21,207 | async def xdel(self, name: str, stream_id: str) -> int:
return await self.execute_command(, name, stream_id) | [NOTICE] Not officially released yet
[NOTICE] In the current implementation, memory is not
really reclaimed until a macro node is completely empty,
so you should not abuse this feature.
remove items from the middle of a stream, just by ID.
:param name: name of the stream
:param stream_id: id of the options appended to the stream. |
21,208 | def on_finish(self, exc=None):
super(GarbageCollector, self).on_finish(exc)
self._cycles_left -= 1
if self._cycles_left <= 0:
num_collected = gc.collect()
self._cycles_left = self.collection_cycle
LOGGER.debug(,
num_collected) | Used to initiate the garbage collection |
21,209 | def _execute(self, command, stdin=None, stdout=subprocess.PIPE):
process = subprocess.Popen(command, shell=True, cwd=self.root_path, stdin=stdin, stdout=stdout)
return (process.wait(), None if stdout is not subprocess.PIPE else process.communicate()[0].decode()) | Executes the specified command relative to the repository root.
Returns a tuple containing the return code and the process output. |
21,210 | def clone(self, substitutions, **kwargs):
dag = self.explain(**kwargs)
dag.substitutions.update(substitutions)
cloned_dag = dag.clone(ignore=self.ignore)
return self.update_nodes(self.add_edges(cloned_dag)) | Clone a DAG. |
21,211 | def cmd_hasher(f, algorithm):
data = f.read()
if not data:
print("Empty file or string!")
return 1
if algorithm:
print(hasher(data, algorithm)[algorithm], f.name)
else:
for algo, result in hasher(data).items():
print("{:<12} {} {}".format(algo, result, f.name)) | Compute various hashes for the input data, that can be a file or a stream.
Example:
\b
$ habu.hasher README.rst
md5 992a833cd162047daaa6a236b8ac15ae README.rst
ripemd160 0566f9141e65e57cae93e0e3b70d1d8c2ccb0623 README.rst
sha1 d7dbfd2c5e2828eb22f776550c826e4166526253 README.rst
sha256 6bb22d927e1b6307ced616821a1877b6cc35e... README.rst
sha512 8743f3eb12a11cf3edcc16e400fb14d599b4a... README.rst
whirlpool 96bcc083242e796992c0f3462f330811f9e8c... README.rst
You can also specify which algorithm to use. In such case, the output is
only the value of the calculated hash:
\b
$ habu.hasher -a md5 README.rst
992a833cd162047daaa6a236b8ac15ae README.rst |
21,212 | def moderate(self, environ, request, id, action, key):
try:
id = self.isso.unsign(key, max_age=2**32)
except (BadSignature, SignatureExpired):
raise Forbidden
item = self.comments.get(id)
thread = self.threads.get(item[])
link = local("origin") + thread["uri"] + "
if item is None:
raise NotFound
if request.method == "GET":
modal = (
"<!DOCTYPE html>"
"<html>"
"<head>"
"<script>"
" if (confirm()) {"
" xhr = new XMLHttpRequest;"
" xhr.open(, window.location.href);"
" xhr.send(null);"
" xhr.onload = function() {"
" window.location.href = %s;"
" };"
" }"
"</script>" % (action.capitalize(), json.dumps(link)))
return Response(modal, 200, content_type="text/html")
if action == "activate":
if item[] == 1:
return Response("Already activated", 200)
with self.isso.lock:
self.comments.activate(id)
self.signal("comments.activate", thread, item)
return Response("Yo", 200)
elif action == "edit":
data = request.get_json()
with self.isso.lock:
rv = self.comments.update(id, data)
for key in set(rv.keys()) - API.FIELDS:
rv.pop(key)
self.signal("comments.edit", rv)
return JSON(rv, 200)
else:
with self.isso.lock:
self.comments.delete(id)
self.cache.delete(
, (item[] or item[]).encode())
self.signal("comments.delete", id)
return Response("Yo", 200)
| @api {get} / get comments
@apiGroup Thread
@apiDescription Queries the comments of a thread.
@apiParam {string} uri
The URI of thread to get the comments from.
@apiParam {number} [parent]
Return only comments that are children of the comment with the provided ID.
@apiUse plainParam
@apiParam {number} [limit]
The maximum number of returned top-level comments. Omit for unlimited results.
@apiParam {number} [nested_limit]
The maximum number of returned nested comments per commint. Omit for unlimited results.
@apiParam {number} [after]
Includes only comments were added after the provided UNIX timestamp.
@apiSuccess {number} total_replies
The number of replies if the `limit` parameter was not set. If `after` is set to `X`, this is the number of comments that were created after `X`. So setting `after` may change this value!
@apiSuccess {Object[]} replies
The list of comments. Each comment also has the `total_replies`, `replies`, `id` and `hidden_replies` properties to represent nested comments.
@apiSuccess {number} id
Id of the comment `replies` is the list of replies of. `null` for the list of toplevel comments.
@apiSuccess {number} hidden_replies
The number of comments that were ommited from the results because of the `limit` request parameter. Usually, this will be `total_replies` - `limit`.
@apiExample {curl} Get 2 comments with 5 responses:
curl 'https://comments.example.com/?uri=/thread/&limit=2&nested_limit=5'
@apiSuccessExample Example reponse:
{
"total_replies": 14,
"replies": [
{
"website": null,
"author": null,
"parent": null,
"created": 1464818460.732863,
"text": "<p>Hello, World!</p>",
"total_replies": 1,
"hidden_replies": 0,
"dislikes": 2,
"modified": null,
"mode": 1,
"replies": [
{
"website": null,
"author": null,
"parent": 1,
"created": 1464818460.769638,
"text": "<p>Hi, now some Markdown: <em>Italic</em>, <strong>bold</strong>, <code>monospace</code>.</p>",
"dislikes": 0,
"modified": null,
"mode": 1,
"hash": "2af4e1a6c96a",
"id": 2,
"likes": 2
}
],
"hash": "1cb6cc0309a2",
"id": 1,
"likes": 2
},
{
"website": null,
"author": null,
"parent": null,
"created": 1464818460.80574,
"text": "<p>Lorem ipsum dolor sit amet, consectetur adipisicing elit. Accusantium at commodi cum deserunt dolore, error fugiat harum incidunt, ipsa ipsum mollitia nam provident rerum sapiente suscipit tempora vitae? Est, qui?</p>",
"total_replies": 0,
"hidden_replies": 0,
"dislikes": 0,
"modified": null,
"mode": 1,
"replies": [],
"hash": "1cb6cc0309a2",
"id": 3,
"likes": 0
},
"id": null,
"hidden_replies": 12
} |
21,213 | def tf(self, term):
r
if in term:
raise ValueError(
)
tcount = self.get_count(term)
if tcount == 0:
return 0.0
return 1 + log10(tcount) | r"""Return term frequency.
Parameters
----------
term : str
The term for which to calculate tf
Returns
-------
float
The term frequency (tf)
Raises
------
ValueError
tf can only calculate the frequency of individual words
Examples
--------
>>> tqbf = 'The quick brown fox jumped over the lazy dog.\n'
>>> tqbf += 'And then it slept.\n And the dog ran off.'
>>> ngcorp = NGramCorpus(Corpus(tqbf))
>>> NGramCorpus(Corpus(tqbf)).tf('the')
1.3010299956639813
>>> NGramCorpus(Corpus(tqbf)).tf('fox')
1.0 |
21,214 | def get_hardware_source_by_id(self, hardware_source_id: str, version: str):
actual_version = "1.0.0"
if Utility.compare_versions(version, actual_version) > 0:
raise NotImplementedError("Hardware API requested version %s is greater than %s." % (version, actual_version))
hardware_source = HardwareSourceModule.HardwareSourceManager().get_hardware_source_for_hardware_source_id(hardware_source_id)
return HardwareSource(hardware_source) if hardware_source else None | Return the hardware source API matching the hardware_source_id and version.
.. versionadded:: 1.0
Scriptable: Yes |
21,215 | def create_cookie(name, value, **kwargs):
result = {
: 0,
: name,
: value,
: None,
: ,
: ,
: False,
: None,
: True,
: None,
: None,
: {: None},
: False,
}
badargs = set(kwargs) - set(result)
if badargs:
err =
raise TypeError(err % list(badargs))
result.update(kwargs)
result[] = bool(result[])
result[] = bool(result[])
result[] = result[].startswith()
result[] = bool(result[])
return cookielib.Cookie(**result) | Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie"). |
21,216 | def run(self, args):
email = args.email
username = args.username
auth_role = args.auth_role
project = self.fetch_project(args, must_exist=True, include_children=False)
user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username)
self.remote_store.set_user_project_permission(project, user, auth_role)
print(u.format(user.full_name, auth_role, project.name)) | Give the user with user_full_name the auth_role permissions on the remote project with project_name.
:param args Namespace arguments parsed from the command line |
21,217 | def unwire(awsclient, events, lambda_name, alias_name=ALIAS_NAME):
if not lambda_exists(awsclient, lambda_name):
log.error(colored.red(t exist... Bailing out...lambdaAliasArnUN-wiring lambda_arn %s
policies = None
try:
result = client_lambda.get_policy(FunctionName=lambda_name,
Qualifier=alias_name)
policies = json.loads(result[])
except ClientError as e:
if e.response[][] == :
log.warn("Permission policies not found")
else:
raise e
event_source']
_remove_event_source(awsclient, evt_source, lambda_arn)
return 0 | Unwire a list of event from an AWS Lambda function.
'events' is a list of dictionaries, where the dict must contains the
'schedule' of the event as string, and an optional 'name' and 'description'.
:param awsclient:
:param events: list of events
:param lambda_name:
:param alias_name:
:return: exit_code |
21,218 | def search_data_std(Channel, RunNos, RepeatNos, directoryPath=):
files = glob(.format(directoryPath))
files_CorrectChannel = []
for file_ in files:
if .format(Channel) in file_:
files_CorrectChannel.append(file_)
files_CorrectRunNo = []
for RunNo in RunNos:
files_match = _fnmatch.filter(
files_CorrectChannel, .format(RunNo))
for file_ in files_match:
files_CorrectRunNo.append(file_)
files_CorrectRepeatNo = []
for RepeatNo in RepeatNos:
files_match = _fnmatch.filter(
files_CorrectRunNo, .format(RepeatNo))
for file_ in files_match:
files_CorrectRepeatNo.append(file_)
return files_CorrectRepeatNo | Lets you find multiple datasets at once assuming they have a
filename which contains a pattern of the form:
CH<ChannelNo>_RUN00...<RunNo>_REPEAT00...<RepeatNo>
Parameters
----------
Channel : int
The channel you want to load
RunNos : sequence
Sequence of run numbers you want to load
RepeatNos : sequence
Sequence of repeat numbers you want to load
directoryPath : string, optional
The path to the directory housing the data
The default is the current directory
Returns
-------
Data_filepaths : list
A list containing the filepaths to the matching files |
21,219 | def run(self):
if self.workflow.builder.base_from_scratch:
self.log.info("Skipping comparing components: unsupported for FROM-scratch images")
return
worker_metadatas = self.workflow.postbuild_results.get(PLUGIN_FETCH_WORKER_METADATA_KEY)
comp_list = self.get_component_list_from_workers(worker_metadatas)
if not comp_list:
raise ValueError("No components to compare")
package_comparison_exceptions = get_package_comparison_exceptions(self.workflow)
master_comp = {}
failed_components = set()
for components in comp_list:
for component in components:
t = component[]
name = component[]
if name in package_comparison_exceptions:
self.log.info("Ignoring comparison of package %s", name)
continue
if t not in SUPPORTED_TYPES:
raise ValueError("Type %s not supported" % t)
if name in failed_components:
continue
identifier = (t, name)
if identifier not in master_comp:
master_comp[identifier] = component
continue
if t == T_RPM:
mc = master_comp[identifier]
try:
self.rpm_compare(mc, component)
except ValueError as ex:
self.log.debug("Mismatch details: %s", ex)
self.log.warning(
"Comparison mismatch for component %s:", name)
for comp in filter_components_by_name(name, comp_list):
self.log_rpm_component(comp)
failed_components.add(name)
if failed_components:
raise ValueError(
"Failed component comparison for components: "
"{components}".format(
components=.join(sorted(failed_components))
)
) | Run the plugin. |
21,220 | def store_psm_protein_relations(fn, header, pgdb, proteins):
allpsms = OrderedDict()
last_id, psmids_to_store = None, set()
store_soon = False
for psm in tsvreader.generate_tsv_psms(fn, header):
psm_id, prots = tsvreader.get_pepproteins(psm)
prots = [x for x in prots if x in proteins]
try:
allpsms[psm_id].extend(prots)
except KeyError:
allpsms[psm_id] = prots
if len(psmids_to_store) % DB_STORE_CHUNK == 0:
store_soon = True
if store_soon and last_id != psm_id:
pgdb.store_peptides_proteins(allpsms, psmids_to_store)
store_soon = False
psmids_to_store = set()
psmids_to_store.add(psm_id)
last_id = psm_id
if len(psmids_to_store) > 0:
pgdb.store_peptides_proteins(allpsms, psmids_to_store)
pgdb.index_protein_peptides()
return allpsms | Reads PSMs from file, extracts their proteins and peptides and passes
them to a database backend in chunks. |
21,221 | def cli(url, user_agent):
kwargs = {}
if user_agent:
kwargs[] = user_agent
archive_url = capture(url, **kwargs)
click.echo(archive_url) | Archives the provided URL using archive.is. |
21,222 | def close(self):
if self.compressed_file is None:
with self.path.open() as f:
f.seek(0, 2)
eof = f.tell()
self._update_offset_value(f, self.gdr_head+36, 8, eof)
if self.checksum:
f.write(self._md5_compute(f))
return
with self.path.open() as f:
f.seek(0, 2)
eof = f.tell()
self._update_offset_value(f, self.gdr_head+36, 8, eof)
with self.compressed_file.open() as g:
g.write(bytearray.fromhex(CDF.V3magicNUMBER_1))
g.write(bytearray.fromhex(CDF.V3magicNUMBER_2c))
self._write_ccr(f, g, self.compression)
if self.checksum:
g.seek(0, 2)
g.write(self._md5_compute(g))
self.path.unlink()
self.compressed_file.rename(self.path) | Closes the CDF Class.
1. If compression was set, this is where the compressed file is
written.
2. If a checksum is needed, this will place the checksum at the end
of the file. |
21,223 | def get_connect_redirect_url(self, request, socialaccount):
assert request.user.is_authenticated
url = reverse()
return url | Returns the default URL to redirect to after successfully
connecting a social account. |
21,224 | def _release_lock(self, identifier):
t
if redis.call("get", KEYS[1]) == ARGV[1]
then
return redis.call("del", KEYS[1])
else
return -1
end
')
num_keys_deleted = script(keys=[self._lock_name],
args=[identifier])
return (num_keys_deleted == 1) | Release the lock.
This requires you to actually have owned the lock. On return
you definitely do not own it, but if somebody else owned it
before calling this function, they still do.
:param str identifier: the session lock identifier
:return: :const:`True` if you actually did own the lock,
:const:`False` if you didn't |
21,225 | def get_default_filepath(cls):
if not cls.config_files:
return None
if not cls.config_searchpath:
return None
filename = cls.config_files[0]
filepath = cls.config_searchpath[0]
return os.path.join(filepath, filename) | Get the default filepath for the configuratin file. |
21,226 | def GetRunlevelsNonLSB(states):
if not states:
return set()
convert_table = {
"0": "0",
"1": "1",
"2": "2",
"3": "3",
"4": "4",
"5": "5",
"6": "6",
"S": "1",
"s": "1"
}
_LogInvalidRunLevels(states, convert_table)
return set([convert_table[s] for s in states.split() if s in convert_table]) | Accepts a string and returns a list of strings of numeric LSB runlevels. |
21,227 | def send_mass_text(self, group_or_users, content,
is_to_all=False, preview=False,
send_ignore_reprint=0, client_msg_id=None):
return self._send_mass_message(
group_or_users,
,
{
: {
: content
}
},
is_to_all,
preview,
send_ignore_reprint,
client_msg_id,
) | 群发文本消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param content: 消息正文
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:param send_ignore_reprint: 指定待群发的文章被判定为转载时,是否继续群发。
当 send_ignore_reprint 参数设置为1时,文章被判定为转载时,且原创文允许转载时,将继续进行群发操作。
当 send_ignore_reprint 参数设置为0时,文章被判定为转载时,将停止群发操作。
send_ignore_reprint 默认为0。
:type send_ignore_reprint: int
:param client_msg_id: 开发者侧群发 msgid,长度限制 64 字节
:type client_msg_id: str
:return: 返回的 JSON 数据包 |
21,228 | def get_oxi_state_decorated_structure(self, structure):
s = structure.copy()
if s.is_ordered:
valences = self.get_valences(s)
s.add_oxidation_state_by_site(valences)
else:
valences = self.get_valences(s)
s = add_oxidation_state_by_site_fraction(s, valences)
return s | Get an oxidation state decorated structure. This currently works only
for ordered structures only.
Args:
structure: Structure to analyze
Returns:
A modified structure that is oxidation state decorated.
Raises:
ValueError if the valences cannot be determined. |
21,229 | def get_stats(self):
if self.__path is None:
return 0,0
nfiles = 0
ndirs = 0
for fdict in self.get_repository_state():
fdname = list(fdict)[0]
if fdname == :
continue
if fdict[fdname].get(, False):
nfiles += 1
elif fdict[fdname].get(, False):
ndirs += 1
else:
raise Exception()
return ndirs,nfiles | Get repository descriptive stats
:Returns:
#. numberOfDirectories (integer): Number of diretories in repository
#. numberOfFiles (integer): Number of files in repository |
21,230 | def set(self, name, value):
if name not in self._options:
self.register(name, self._generator())
return self._options[name].__set__(self, value) | Set an option value.
Args:
name (str): The name of the option.
value: The value to set the option to.
Raises:
TypeError: If the value is not a string or appropriate native type.
ValueError: If the value is a string but cannot be coerced.
If the name is not registered a new option will be created using the
option generator. |
21,231 | def _compute_ticks(self, element, edges, widths, lims):
if self.xticks is None or not isinstance(self.xticks, int):
return None
if self.cyclic:
x0, x1, _, _ = lims
xvals = np.linspace(x0, x1, self.xticks)
labels = ["%.0f" % np.rad2deg(x) + for x in xvals]
elif self.xticks:
dim = element.get_dimension(0)
inds = np.linspace(0, len(edges), self.xticks, dtype=np.int)
edges = list(edges) + [edges[-1] + widths[-1]]
xvals = [edges[i] for i in inds]
labels = [dim.pprint_value(v) for v in xvals]
return [xvals, labels] | Compute the ticks either as cyclic values in degrees or as roughly
evenly spaced bin centers. |
21,232 | def invalidate(self, key):
path = self.path(self.xform_key(key))
try:
LOG.debug(, key, path)
path.unlink()
except OSError:
pass | Clear an item from the cache |
21,233 | def get_bool(self, key, default=None):
v = self.get(key, default)
if v != default:
v = v.strip().lower()
if v == :
v = True
elif v == :
v = False
elif default is None:
raise RuntimeError("invalid bool string: %s" % v)
else:
v = default
return v | Same as :meth:`dict.get`, but the value is converted to a bool.
The boolean value is considered, respectively, :obj:`True` or
:obj:`False` if the string is equal, ignoring case, to
``'true'`` or ``'false'``. |
21,234 | def _remove_string_from_commastring(self, field, string):
commastring = self.data.get(field, )
if string in commastring:
self.data[field] = commastring.replace(string, )
return True
return False | Remove a string from a comma separated list of strings
Args:
field (str): Field containing comma separated list
string (str): String to remove
Returns:
bool: True if string removed or False if not |
21,235 | def prep_db_parallel(samples, parallel_fn):
batch_groups, singles, out_retrieve, extras = _group_by_batches(samples, _has_variant_calls)
to_process = []
has_batches = False
for (name, caller), info in batch_groups.items():
fnames = [x[0] for x in info]
to_process.append([fnames, (str(name), caller, True), [x[1] for x in info], extras])
has_batches = True
for name, caller, data, fname in singles:
to_process.append([[fname], (str(name), caller, False), [data], extras])
output = parallel_fn("prep_gemini_db", to_process)
out_fetch = {}
for batch_id, out_file in output:
out_fetch[tuple(batch_id)] = out_file
out = []
for batch_name, data in out_retrieve:
out_variants = []
for vrn in data["variants"]:
use_population = vrn.pop("population", True)
if use_population:
vrn["population"] = out_fetch[(batch_name, vrn["variantcaller"])]
out_variants.append(vrn)
data["variants"] = out_variants
out.append([data])
for x in extras:
out.append([x])
return out | Prepares gemini databases in parallel, handling jointly called populations. |
21,236 | def remove_file(self, path):
self.get_file(path).remove()
self.remove_cache_buster(path) | Removes the given file |
21,237 | def determine_orig_wcsname(header, wnames, wkeys):
orig_wcsname = None
orig_key = None
if orig_wcsname is None:
for k,w in wnames.items():
if w[:4] == :
orig_wcsname = w
orig_key = k
break
if orig_wcsname is None:
if len(wnames) > 1:
orig_key = wkeys[-2]
orig_wcsname = wnames[orig_key]
return orig_wcsname,orig_key | Determine the name of the original, unmodified WCS solution |
21,238 | def local_open(url):
scheme, server, path, param, query, frag = urllib.parse.urlparse(url)
filename = urllib.request.url2pathname(path)
if os.path.isfile(filename):
return urllib.request.urlopen(url)
elif path.endswith() and os.path.isdir(filename):
files = []
for f in os.listdir(filename):
filepath = os.path.join(filename, f)
if f == :
with open(filepath, ) as fp:
body = fp.read()
break
elif os.path.isdir(filepath):
f +=
files.append(.format(name=f))
else:
tmpl = (
"<html><head><title>{url}</title>"
"</head><body>{files}</body></html>")
body = tmpl.format(url=url, files=.join(files))
status, message = 200, "OK"
else:
status, message, body = 404, "Path not found", "Not found"
headers = {: }
body_stream = six.StringIO(body)
return urllib.error.HTTPError(url, status, message, headers, body_stream) | Read a local path, with special support for directories |
21,239 | def lat_from_pole(ref_loc_lon, ref_loc_lat, pole_plon, pole_plat):
ref_loc = (ref_loc_lon, ref_loc_lat)
pole = (pole_plon, pole_plat)
paleo_lat = 90 - pmag.angle(pole, ref_loc)
return float(paleo_lat) | Calculate paleolatitude for a reference location based on a paleomagnetic pole
Required Parameters
----------
ref_loc_lon: longitude of reference location in degrees
ref_loc_lat: latitude of reference location
pole_plon: paleopole longitude in degrees
pole_plat: paleopole latitude in degrees |
21,240 | def get(self, file_id):
def ok(doc):
if doc is None:
raise NoFile("TxMongo: no file in gridfs with _id {0}".format(repr(file_id)))
return GridOut(self.__collection, doc)
return self.__collection.files.find_one({"_id": file_id}).addCallback(ok) | Get a file from GridFS by ``"_id"``.
Returns an instance of :class:`~gridfs.grid_file.GridOut`,
which provides a file-like interface for reading.
:Parameters:
- `file_id`: ``"_id"`` of the file to get
.. versionadded:: 1.6 |
21,241 | def is_valid_mpls_labels(labels):
if not isinstance(labels, (list, tuple)):
return False
for label in labels:
if not is_valid_mpls_label(label):
return False
return True | Returns True if the given value is a list of valid MPLS labels. |
21,242 | def update_image(self, container_name, image_name):
code, container = self.get_container(container_name)
if code != httplib.OK:
self.logger.error("Container %s is not exists. error code %s, error message %s", container_name, code,
container)
return False
_, old_image_name, _ = utils.parse_image_name(container.image)
repository, name, version = utils.parse_image_name(image_name)
if not repository or repository.lower() != DOCKER_NEG:
self.logger.error("You image %s must have a prefix string", image_name)
return False
if not repo.image_exists(name, tag=version):
self.logger.error("You image %s must be location in docker.neg repository.", image_name)
return False
if old_image_name.lower() != name.lower():
self.logger.error("You image %s must be same with container's Image.", image_name, container.image)
return False
code, result = self.update(container_name, tag=version)
if code != httplib.OK:
self.logger.error("Update container %s with image failure, code %s, result %s", container_name, code,
result)
return False
return True | update a container's image,
:param container_name: `class`:`str`, container name
:param image_name: `class`:`str`, the full image name, like alpine:3.3
:return: `class`:`bool`, True if success, otherwise False. |
21,243 | def rectgal_to_sphergal(X,Y,Z,vx,vy,vz,degree=False):
lbd= XYZ_to_lbd(X,Y,Z,degree=degree)
vrpmllpmbb= vxvyvz_to_vrpmllpmbb(vx,vy,vz,X,Y,Z,XYZ=True)
if sc.array(X).shape == ():
return sc.array([lbd[0],lbd[1],lbd[2],vrpmllpmbb[0],vrpmllpmbb[1],vrpmllpmbb[2]])
else:
out=sc.zeros((len(X),6))
out[:,0:3]= lbd
out[:,3:6]= vrpmllpmbb
return out | NAME:
rectgal_to_sphergal
PURPOSE:
transform phase-space coordinates in rectangular Galactic coordinates to spherical Galactic coordinates (can take vector inputs)
INPUT:
X - component towards the Galactic Center (kpc)
Y - component in the direction of Galactic rotation (kpc)
Z - component towards the North Galactic Pole (kpc)
vx - velocity towards the Galactic Center (km/s)
vy - velocity in the direction of Galactic rotation (km/s)
vz - velocity towards the North Galactic Pole (km/s)
degree - (Bool) if True, return l and b in degrees
OUTPUT:
(l,b,d,vr,pmll x cos(b),pmbb) in (rad,rad,kpc,km/s,mas/yr,mas/yr)
HISTORY:
2009-10-25 - Written - Bovy (NYU) |
21,244 | def _return_response(self, response):
self.filter_headers(response.msg)
if "content-length" in response.msg:
del response.msg["content-length"]
self.send_response(response.status, response.reason)
for header_key, header_value in response.msg.items():
self.send_header(header_key, header_value)
body = response.read()
self.send_header(, str(len(body)))
self.end_headers()
self.wfile.write(body) | :type result: HTTPResponse |
21,245 | def update_properties(self, properties):
self.manager.session.post(self.uri, body=properties)
assert self.manager._name_prop not in properties
self.properties.update(copy.deepcopy(properties)) | Update writeable properties of this PasswordRule.
The Password Rule must be user-defined. System-defined Password Rules
cannot be updated.
Authorization requirements:
* Task permission to the "Manage Password Rules" task.
Parameters:
properties (dict): New values for the properties to be updated.
Properties not to be updated are omitted.
Allowable properties are the properties with qualifier (w) in
section 'Data model' in section 'Password Rule object' in the
:term:`HMC API` book.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
:exc:`~zhmcclient.ConnectionError` |
21,246 | def task_add(self, t, periodic=None):
name = t.get_name()
if name in self._tasknames:
raise TaskError(name, %
(len(self._tasknames), ses(len(self._tasknames))))
self._tasknames[name] = (t, periodic)
self._tasks.add(t) | Register a task in this legion. "periodic" should be None, or
a callback function which will be called periodically when the
legion is otherwise idle. |
21,247 | def incidence(boundary):
return GroupBy(boundary).split(np.arange(boundary.size) // boundary.shape[1]) | given an Nxm matrix containing boundary info between simplices,
compute indidence info matrix
not very reusable; should probably not be in this lib |
21,248 | def max_play(w, i, grid):
"Play like Spock, except breaking ties by drunk_value."
return min(successors(grid),
key=lambda succ: (evaluate(succ), drunk_value(succ))) | Play like Spock, except breaking ties by drunk_value. |
21,249 | def findCells(fnames):
IDs=[]
filesByExt = filesByExtension(fnames)
for abfFname in filesByExt[]:
ID=os.path.splitext(abfFname)[0]
for picFname in filesByExt[]+filesByExt[]:
if picFname.startswith(ID):
IDs.append(ID)
break
return smartSort(IDs) | given a list of files, return a list of cells by their ID.
A cell is indicated when an ABF name matches the start of another file.
Example:
123456.abf
123456-whatever.tif |
21,250 | def get(self, guild_id):
if guild_id not in self._players:
p = self._player(lavalink=self.lavalink, guild_id=guild_id)
self._players[guild_id] = p
return self._players[guild_id] | Returns a player from the cache, or creates one if it does not exist. |
21,251 | def text(self):
if not self.content:
return
if self.encoding:
return self.content.decode(self.encoding, errors=)
return self.content.decode(self.guess_encoding(), errors=) | Content as string
If :attr:`encoding` is None, the encoding is guessed with :meth:`guess_encoding` |
21,252 | def get_power(self,callb=None):
if self.power_level is None:
response = self.req_with_resp(LightGetPower, LightStatePower, callb=callb )
return self.power_level | Convenience method to request the power status from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int |
21,253 | def plot_heat_map(z, include_values=False,
cmap=matplotlib.cm.Reds,
ax=None,
xlabel=, ylabel=,
xtick_labels=, ytick_labels=,
xtick_locs=None, ytick_locs=None,
xtick_kwargs={}, ytick_kwargs={},
clabel_pos=,
transpose_y=False, convert_to_log_scale=False,
show_colorbar=False, colorbar_dict={},
values_format=, values_font_size=,
values_color=None, values_text_kw={},
bad_color=None,
**kwargs):
xtick_kwargs.setdefault(, )
ytick_kwargs.setdefault(, )
auto_col_name, auto_col_labels, auto_row_name, auto_row_labels = extract_annotation(z)
if xtick_labels is : xtick_labels = auto_col_labels
if ytick_labels is : ytick_labels = auto_row_labels
if xlabel is : xlabel = auto_col_name
if ylabel is : ylabel = auto_row_name
if isinstance(z, pandas.DataFrame):
values = z.values
else:
values = z
if convert_to_log_scale:
values = numpy.log(values)
if transpose_y:
values = numpy.flipud(values)
if isinstance(cmap, str):
cmap = getattr(cm, cmap)
old_ax = plt.gca()
if ax is not None:
plt.sca(ax)
else:
ax = plt.gca()
output = ax.matshow(values, cmap=cmap, **kwargs)
if show_colorbar:
from mpl_toolkits.axes_grid1 import make_axes_locatable
divider = make_axes_locatable(ax)
colorbar_dict.setdefault(, "5%")
colorbar_dict.setdefault(, 0.05)
cax = divider.append_axes("right", **colorbar_dict)
cb = plt.colorbar(output, cax=cax)
plt.sca(ax)
if xtick_labels is not None and len(xtick_labels) > 0:
if xtick_locs:
plt.xticks(xtick_locs, xtick_labels, **xtick_kwargs)
else:
plt.xticks(range(len(xtick_labels)), xtick_labels, **xtick_kwargs)
if ytick_labels is not None and len(ytick_labels) > 0:
if ytick_locs:
plt.yticks(ytick_locs, ytick_labels, **ytick_kwargs)
else:
plt.yticks(range(len(ytick_labels)), ytick_labels, **ytick_kwargs)
if xlabel:
plt.xlabel(xlabel)
if ylabel:
plt.ylabel(ylabel)
if include_values:
def text_cmap(x):
if numpy.isnan(x):
return cmap(1.0)
if x > 0.2 and x < 0.5:
x = 0.2
if x < 0.8 and x >= 0.5:
x = 0.8
return cmap(1.0 - x)
values_text_kw[] = values_font_size
values_text_kw[] = values_color
_plot_table(values, text_format=values_format, cmap=text_cmap, **values_text_kw)
xaxis = ax.xaxis
if clabel_pos == :
xaxis.set_label_position()
xaxis.tick_top()
else:
ax.xaxis.tick_bottom()
ax.xaxis.set_label_position()
plt.sca(old_ax)
return output | Plot a heat map of z.
Parameters
-------------
z : ndarray | DataFrame
ax : None # NOT IMPLEMENTED YET
Axis to be used. If None uses the current axis.
xlabel : str | 'auto' | None
name for the x-axis
ylabel : str | 'auto' | None
name for the y-axis
xtick_labels : list of str
names for the columns
ytick_labels : list of str
names for the rows
transpose_y : bool
Flips the data along the y axis if true
convert_to_log_scale : bool
If true, plots the log of z.
clabel_pos : 'top' | 'bottom'
Location of the column labels. Default is 'top'.
cmap : colormap | str
colormap to use for plotting the values; e.g., matplotlib.cmap.Blues,
if str then expecting something like 'Blues' to look up using getattr(matplotlib.cm, ...)
values_color : None | color
if None, coloring will be the inverse of cmap
Otherwise the color given would be used for the text color of all the values.
bad_color : color
This is the color that will be used for nan values
Returns
---------------
Output from matshow command (matplotlib.image.AxesImage) |
21,254 | def get_badge(self):
try:
obj = Badge.objects.using(self.db_read).get(slug=self.slug)
logger.debug(, obj.slug, self.db_read)
except Badge.DoesNotExist:
obj = None
return obj | The related ``Badge`` object. |
21,255 | def ensure_dir(directory: str) -> None:
if not os.path.isdir(directory):
LOG.debug(f"Directory {directory} does not exist, creating it.")
os.makedirs(directory) | Create a directory if it doesn't exist. |
21,256 | def get_java_home():
error = RuntimeError("java home not found, try setting JAVA_HOME")
try:
return os.environ["JAVA_HOME"]
except KeyError:
wd = tempfile.mkdtemp(prefix=)
jclass = "Temp"
jsrc = os.path.join(wd, "%s.java" % jclass)
with open(jsrc, "w") as f:
f.write(JPROG.substitute(classname=jclass))
try:
subprocess.check_call(["javac", jsrc])
path = subprocess.check_output(
["java", "-cp", wd, jclass], universal_newlines=True
)
except (OSError, UnicodeDecodeError, subprocess.CalledProcessError):
raise error
finally:
shutil.rmtree(wd)
path = os.path.normpath(path.strip())
if os.path.exists(os.path.join(path, "include", "jni.h")):
return path
path = os.path.dirname(path)
if os.path.exists(os.path.join(path, "include", "jni.h")):
return path
raise error | \
Try getting JAVA_HOME from system properties.
We are interested in the JDK home, containing include/jni.h, while the
java.home property points to the JRE home. If a JDK is installed, however,
the two are (usually) related: the JDK home is either the same directory
as the JRE home (recent java versions) or its parent (and java.home points
to jdk_home/jre). |
21,257 | def component(self, *components):
r
for component in components:
self._component.append(component)
return self | r"""
When search() is called it will limit results to items in a component.
:param component: items passed in will be turned into a list
:returns: :class:`Search` |
21,258 | def has_selector(selector):
"Determine if the current platform has the selector available"
try:
if selector == :
p = select.poll()
p.poll(0)
else:
getattr(select, selector)().close()
return True
except (OSError, AttributeError):
return False | Determine if the current platform has the selector available |
21,259 | def model_from_list(l, header):
col = groups.sortableListe(PseudoAccesCategorie(n) for n in l)
return MultiSelectModel(col, header) | Return a model with a collection from a list of entry |
21,260 | def sanity_check_states(states_spec):
states = copy.deepcopy(states_spec)
is_unique = ( in states)
if is_unique:
states = dict(state=states)
for name, state in states.items():
if isinstance(state[], int):
state[] = (state[],)
if not in state:
state[] =
return states, is_unique | Sanity checks a states dict, used to define the state space for an MDP.
Throws an error or warns if mismatches are found.
Args:
states_spec (Union[None,dict]): The spec-dict to check (or None).
Returns: Tuple of 1) the state space desc and 2) whether there is only one component in the state space. |
21,261 | def warning(self, msg: str) -> None:
s = "{}: {}".format(self.fullname, msg)
servicemanager.LogWarningMsg(s)
if self.debugging:
log.warning(s) | Write a warning message to the Windows Application log
(± to the Python disk log). |
21,262 | def delete_router(self, name, tenant_id, rout_id, subnet_lst):
ret = self.delete_intf_router(name, tenant_id, rout_id, subnet_lst)
if not ret:
return False
try:
ret = self.neutronclient.delete_router(rout_id)
except Exception as exc:
LOG.error("Failed to delete router %(name)s ret %(ret)s "
"Exc %(exc)s",
{: name, : str(ret), : str(exc)})
return False
return True | Delete the openstack router.
Delete the router and remove the interfaces attached to it. |
21,263 | def applyStyleOnShape(self, shape, node, only_explicit=False):
"Apply style attributes of a sequence of nodes to an RL shape."
mappingN = (
("fill", "fillColor", "convertColor", "black"),
("fill-opacity", "fillOpacity", "convertOpacity", 1),
("fill-rule", "_fillRule", "convertFillRule", "nonzero"),
("stroke", "strokeColor", "convertColor", "none"),
("stroke-width", "strokeWidth", "convertLength", "1"),
("stroke-opacity", "strokeOpacity", "convertOpacity", 1),
("stroke-linejoin", "strokeLineJoin", "convertLineJoin", "0"),
("stroke-linecap", "strokeLineCap", "convertLineCap", "0"),
("stroke-dasharray", "strokeDashArray", "convertDashArray", "none"),
)
mappingF = (
("font-family", "fontName", "convertFontFamily", DEFAULT_FONT_NAME),
("font-size", "fontSize", "convertLength", "12"),
("text-anchor", "textAnchor", "id", "start"),
)
if shape.__class__ == Group:
for subshape in shape.contents:
self.applyStyleOnShape(subshape, node, only_explicit=only_explicit)
return
ac = self.attrConverter
for mapping in (mappingN, mappingF):
if shape.__class__ != String and mapping == mappingF:
continue
for (svgAttrName, rlgAttr, func, default) in mapping:
svgAttrValue = ac.findAttr(node, svgAttrName)
if svgAttrValue == :
if only_explicit:
continue
else:
svgAttrValue = default
if svgAttrValue == "currentColor":
svgAttrValue = ac.findAttr(node.getparent(), "color") or default
try:
meth = getattr(ac, func)
setattr(shape, rlgAttr, meth(svgAttrValue))
except (AttributeError, KeyError, ValueError):
pass
if getattr(shape, , None) is not None and shape.fillColor:
shape.fillColor.alpha = shape.fillOpacity | Apply styles from an SVG element to an RLG shape.
If only_explicit is True, only attributes really present are applied. |
21,264 | def targets(tgt, tgt_type=, **kwargs):
ret = {}
cloud_opts = salt.config.cloud_config(
os.path.join(os.path.dirname(__opts__[]), )
)
minions = __runner__[](tgt)
for minion_id, full_info in minions.items():
profile, provider = full_info.get(, None), full_info.get(, None)
vm_ = {
: provider,
: profile,
}
public_ips = full_info.get(, [])
private_ips = full_info.get(, [])
ip_list = []
for item in (public_ips, private_ips):
if isinstance(item, list):
ip_list = ip_list + item
elif isinstance(item, string_types):
ip_list.append(item)
roster_order = __opts__.get(, (
, ,
))
preferred_ip = extract_ipv4(roster_order, ip_list)
ret[minion_id] = copy.deepcopy(__opts__.get(, {}))
ret[minion_id].update({: preferred_ip})
ssh_username = salt.utils.cloud.ssh_usernames(vm_, cloud_opts)
if isinstance(ssh_username, string_types):
ret[minion_id][] = ssh_username
elif isinstance(ssh_username, list):
if ssh_username[0] != :
ret[minion_id][] = ssh_username[0]
password = salt.config.get_cloud_config_value(
, vm_, cloud_opts, search_global=False, default=None
)
if password:
ret[minion_id][] = password
key_filename = salt.config.get_cloud_config_value(
, vm_, cloud_opts, search_global=False, default=None
)
if key_filename:
ret[minion_id][] = key_filename
sudo = salt.config.get_cloud_config_value(
, vm_, cloud_opts, search_global=False, default=None
)
if sudo:
ret[minion_id][] = sudo
return ret | Return the targets from the flat yaml file, checks opts for location but
defaults to /etc/salt/roster |
21,265 | def _make_dispatch(cls, func):
sig = signature(func)
matchers = tuple(cls._make_all_matchers(sig.parameters.items()))
return (partial(cls._bind_args, sig, matchers), func) | Create a dispatch pair for func- a tuple of (bind_args, func), where
bind_args is a function that, when called with (args, kwargs), attempts
to bind those args to the type signature of func, or else raise a
TypeError |
21,266 | def _get_input_name(self, input_str, region=None, describe_output=None):
if in input_str:
stage_identifier, input_name = input_str.split(, 1)
return self._get_stage_id(stage_identifier) + + input_name
return input_str | :param input_str: A string of one of the forms: "<exported input field name>", "<explicit workflow input field name>", "<stage ID>.<input field name>", "<stage index>.<input field name>", "<stage name>.<input field name>"
:type input_str: string
:returns: If the given form was one of those which uses the stage index or stage name, it is translated to the stage ID for use in the API call (stage name takes precedence) |
21,267 | def recv(self, topic, payload, qos):
data = self._parse_mqtt_to_message(topic, payload, qos)
if data is None:
return
_LOGGER.debug(, data)
self.add_job(self.logic, data) | Receive a MQTT message.
Call this method when a message is received from the MQTT broker. |
21,268 | def lastNode(class_, hot_map):
children = hot_map[-1][2]
if children:
return class_.lastNode(children)
else:
return hot_map[-1][1] | Return the very last node (recursively) in the hot map. |
21,269 | def pixels_from_coordinates(lat, lon, max_y, max_x):
x_ratio, y_ratio = max_x/360., max_y/180.
x, y = np.zeros(lon.shape), np.zeros(lat.shape)
x = (lon + 180.) * x_ratio
y = (lat + 90.) * y_ratio
return x, y | Return the 2 matrix with lat and lon of each pixel.
Keyword arguments:
lat -- A latitude matrix
lon -- A longitude matrix
max_y -- The max vertical pixels amount of an orthorectified image.
max_x -- The max horizontal pixels amount of an orthorectified image. |
21,270 | async def close(self) -> None:
LOGGER.debug()
if self.cfg.get(, {}):
await self.load_cache(True)
Caches.purge_archives(self.dir_cache, True)
await super().close()
LOGGER.debug() | Explicit exit. If so configured, populate cache to prove for any creds on schemata,
cred defs, and rev regs marked of interest in configuration at initialization,
archive cache, and purge prior cache archives.
:return: current object |
21,271 | def cli(env, date_min, date_max, obj_event, obj_id, obj_type, utc_offset, metadata, limit):
columns = [, , , , ]
event_mgr = SoftLayer.EventLogManager(env.client)
user_mgr = SoftLayer.UserManager(env.client)
request_filter = event_mgr.build_filter(date_min, date_max, obj_event, obj_id, obj_type, utc_offset)
logs = event_mgr.get_event_logs(request_filter)
log_time = "%Y-%m-%dT%H:%M:%S.%f%z"
user_data = {}
if metadata:
columns.append()
row_count = 0
click.secho(", ".join(columns))
for log in logs:
if log is None:
click.secho( % request_filter, fg=)
return
user = log[]
label = log.get(, )
if user == "CUSTOMER":
username = user_data.get(log[])
if username is None:
username = user_mgr.get_user(log[], "mask[username]")[]
user_data[log[]] = username
user = username
if metadata:
metadata_data = log[].strip("\n\t")
click.secho(",,,,,".format(
log[],
label,
log[],
utils.clean_time(log[], in_format=log_time),
user,
metadata_data))
else:
click.secho(",,,,".format(
log[],
label,
log[],
utils.clean_time(log[], in_format=log_time),
user))
row_count = row_count + 1
if row_count >= limit and limit != -1:
return | Get Event Logs
Example:
slcli event-log get -d 01/01/2019 -D 02/01/2019 -t User -l 10 |
21,272 | def member_at_in(self, leaderboard_name, position, **options):
if position > 0 and position <= self.total_members_in(leaderboard_name):
page_size = options.get(, self.page_size)
current_page = math.ceil(float(position) / float(page_size))
offset = (position - 1) % page_size
leaders = self.leaders_in(
leaderboard_name,
current_page,
**options)
if leaders:
return leaders[offset] | Retrieve a member at the specified index from the leaderboard.
@param leaderboard_name [String] Name of the leaderboard.
@param position [int] Position in named leaderboard.
@param options [Hash] Options to be used when retrieving the member from the named leaderboard.
@return a page of leaders from the named leaderboard. |
21,273 | def _validate_auth(self, path, obj, _):
errs = []
if obj.type == :
if not obj.passAs:
errs.append()
if not obj.keyname:
errs.append()
elif obj.type == :
if not obj.grantTypes:
errs.append()
return path, obj.__class__.__name__, errs | validate that apiKey and oauth2 requirements |
21,274 | def unset_values(self):
self._warn_for_no_prompt = False
try:
for sym in self.unique_defined_syms:
sym.unset_value()
for choice in self.unique_choices:
choice.unset_value()
finally:
self._warn_for_no_prompt = True | Resets the user values of all symbols, as if Kconfig.load_config() or
Symbol.set_value() had never been called. |
21,275 | def get_location_from_sina(ip):
global sina
response = requests.get(sina % ip)
if not response.status_code == 200:
return
l = json.loads(response.content)
if not l[] == 1:
return
return ("%s,%s,%s,%s" % (l[], l[], l[], l[])).encode() | {
"ret":1,
"start":"58.18.0.0",
"end":"58.18.15.255",
"country":"中国",
"province":"内蒙古",
"city":"兴安",
"district":"",
"isp":"联通",
"type":"",
"desc":""
} |
21,276 | def append(self, lines):
if isinstance(lines, list):
self._lines = self._lines + lines
elif isinstance(lines, str):
lines = lines.split()
self._lines = self._lines + lines
else:
raise TypeError(.format(type(lines))) | Args:
lines (list): List of line strings to append to the end of the editor |
21,277 | def last_edit_time(self):
data = {: self.xsrf, : }
res = self._session.post(self.url + , data=data)
_, content = res.json()[]
soup = BeautifulSoup(content)
time_string = soup.find_all()[0][]
return datetime.strptime(time_string, "%Y-%m-%d %H:%M:%S") | :return: 问题最后编辑时间
:rtype: datetime.datetime |
21,278 | def _original_path(self, path):
def components_to_path():
if len(path_components) > len(normalized_components):
normalized_components.extend(
path_components[len(normalized_components):])
sep = self._path_separator(path)
normalized_path = sep.join(normalized_components)
if path.startswith(sep) and not normalized_path.startswith(sep):
normalized_path = sep + normalized_path
return normalized_path
if self.is_case_sensitive or not path:
return path
path_components = self._path_components(path)
normalized_components = []
current_dir = self.root
for component in path_components:
if not isinstance(current_dir, FakeDirectory):
return components_to_path()
dir_name, current_dir = self._directory_content(
current_dir, component)
if current_dir is None or (
isinstance(current_dir, FakeDirectory) and
current_dir._byte_contents is None and
current_dir.st_size == 0):
return components_to_path()
normalized_components.append(dir_name)
return components_to_path() | Return a normalized case version of the given path for
case-insensitive file systems. For case-sensitive file systems,
return path unchanged.
Args:
path: the file path to be transformed
Returns:
A version of path matching the case of existing path elements. |
21,279 | def oldest_peer(peers):
local_unit_no = int(os.getenv().split()[1])
for peer in peers:
remote_unit_no = int(peer.split()[1])
if remote_unit_no < local_unit_no:
return False
return True | Determines who the oldest peer is by comparing unit numbers. |
21,280 | def _config_bootstrap(self):
if not os.path.exists(CONFIG_PATH):
os.makedirs(CONFIG_PATH)
if not os.path.exists(CONFIG_FILE):
json.dump(CONFIG_DEFAULTS, open(CONFIG_FILE, ), indent=4,
separators=(, ))
config = CONFIG_DEFAULTS
if self._email and self._password:
config[] = self._email
config[] = str(obfuscate(self._password, ))
self._log.debug("Caching authentication in config file")
json.dump(config, open(CONFIG_FILE, ), indent=4,
separators=(, ))
else:
config = json.load(open(CONFIG_FILE))
if config.get(, PY2) != PY2:
raise Exception("Python versions have changed. Please run `setup` again to reconfigure the client.")
if config[] and config[]:
self._email = config[]
self._password = obfuscate(str(config[]), )
self._log.debug("Loaded authentication from config file") | Go through and establish the defaults on the file system.
The approach here was stolen from the CLI tool provided with the
module. Idea being that the user should not always need to provide a
username and password in order to run the script. If the configuration
file is already present with valid data, then lets use it. |
21,281 | def sound_touch_stop(self, call_params):
path = + self.api_version +
method =
return self.request(path, method, call_params) | REST Remove soundtouch audio effects on a Call |
21,282 | def authenticate_keystone(self, keystone_ip, username, password,
api_version=False, admin_port=False,
user_domain_name=None, domain_name=None,
project_domain_name=None, project_name=None):
self.log.debug()
if not api_version:
api_version = 2
sess, auth = self.get_keystone_session(
keystone_ip=keystone_ip,
username=username,
password=password,
api_version=api_version,
admin_port=admin_port,
user_domain_name=user_domain_name,
domain_name=domain_name,
project_domain_name=project_domain_name,
project_name=project_name
)
if api_version == 2:
client = keystone_client.Client(session=sess)
else:
client = keystone_client_v3.Client(session=sess)
client.auth_ref = auth.get_access(sess)
return client | Authenticate with Keystone |
21,283 | def client_auth(self):
if not self._client_auth:
self._client_auth = E.Element()
E.SubElement(self._client_auth, ).text = self.config.login_id
E.SubElement(self._client_auth, ).text = self.config.transaction_key
return self._client_auth | Generate an XML element with client auth data populated. |
21,284 | def smart_truncate(value, max_length=0, word_boundaries=False, separator=):
value = value.strip(separator)
if not max_length:
return value
if len(value) < max_length:
return value
if not word_boundaries:
return value[:max_length].strip(separator)
if separator not in value:
return value[:max_length]
truncated =
for word in value.split(separator):
if word:
next_len = len(truncated) + len(word) + len(separator)
if next_len <= max_length:
truncated += .format(word, separator)
if not truncated:
truncated = value[:max_length]
return truncated.strip(separator) | Truncate a string |
21,285 | def _set_ldp_protocol_stats_instance_total(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=ldp_protocol_stats_instance_total.ldp_protocol_stats_instance_total, is_container=, presence=False, yang_name="ldp-protocol-stats-instance-total", rest_name="ldp-protocol-stats-instance-total", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u}}, namespace=, defining_module=, yang_type=, is_config=False)
except (TypeError, ValueError):
raise ValueError({
: ,
: "container",
: ,
})
self.__ldp_protocol_stats_instance_total = t
if hasattr(self, ):
self._set() | Setter method for ldp_protocol_stats_instance_total, mapped from YANG variable /mpls_state/ldp/statistics/ldp_protocol_stats_instance_total (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_ldp_protocol_stats_instance_total is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ldp_protocol_stats_instance_total() directly. |
21,286 | def nullval(cls):
d = dict(cls.__dict__.items())
for k in d:
d[k] = 0
d[] = cls.sl
d[cls.level] = 0
return cls(**d) | Create a new instance where all of the values are 0 |
21,287 | def rt_location_log(logfile):
if os.name == :
f = open(logfile, )
else:
f = open(logfile, )
locations = []
for line_binary in f:
try:
line = line_binary.decode("utf8", "ignore")
except UnicodeDecodeError:
warnings.warn()
print(line_binary)
continue
match = re.search("GPS: POSITION:", line)
if match:
loc = line[match.end() + 1:].rstrip().split()
lat_sign = loc[0][0]
lat = loc[0][1:].split()
lat = int(lat[0]) + (int(lat[1]) / 60.0) + (float(lat[2]) / 3600.0)
if lat_sign == :
lat *= -1
lon_sign = loc[1][0]
lon = loc[1][1:].split()
lon = int(lon[0]) + (int(lon[1]) / 60.0) + (float(lon[2]) / 3600.0)
if lon_sign == :
lon *= -1
elev_sign = loc[2][0]
elev_unit = loc[2][-1]
if not elev_unit == :
raise NotImplementedError( +
elev_unit)
elev = int(loc[2][1:-1])
if elev_sign == :
elev *= -1
elev /= 1000
locations.append((lat, lon, elev))
f.close()
return locations | Extract location information from a RefTek raw log-file.
Function to read a specific RefTek RT130 log-file and find all location
information.
:type logfile: str
:param logfile: The logfile to look in
:returns: list of tuples of lat, lon, elevation in decimal degrees and km.
:rtype: list |
21,288 | def img(self):
import wx
with warnings.catch_warnings():
warnings.simplefilter()
img = wx.EmptyImage(self.width, self.height)
img.SetData(self.imgstr)
return img | return a wx image |
21,289 | def get_country_by_id(self, country_id) -> :
VALID_POSITIVE_INT.validate(country_id, , exc=ValueError)
if country_id not in self._countries_by_id.keys():
for country in self.countries:
if country.country_id == country_id:
return country
raise ValueError(country_id)
else:
return self._countries_by_id[country_id] | Gets a country in this coalition by its ID
Args:
country_id: country Id
Returns: Country |
21,290 | def add_health_monitor(self, loadbalancer, type, delay=10, timeout=10,
attemptsBeforeDeactivation=3, path="/", statusRegex=None,
bodyRegex=None, hostHeader=None):
uri = "/loadbalancers/%s/healthmonitor" % utils.get_id(loadbalancer)
req_body = {"healthMonitor": {
"type": type,
"delay": delay,
"timeout": timeout,
"attemptsBeforeDeactivation": attemptsBeforeDeactivation,
}}
uptype = type.upper()
if uptype.startswith("HTTP"):
lb = self._get_lb(loadbalancer)
if uptype != lb.protocol:
raise exc.ProtocolMismatch("Cannot set the Health Monitor type "
"to when the Load Balancer%spathstatusRegexbodyRegex' parameters.")
body_hm = req_body["healthMonitor"]
body_hm["path"] = path
body_hm["statusRegex"] = statusRegex
body_hm["bodyRegex"] = bodyRegex
if hostHeader:
body_hm["hostHeader"] = hostHeader
resp, body = self.api.method_put(uri, body=req_body)
return body | Adds a health monitor to the load balancer. If a monitor already
exists, it is updated with the supplied settings. |
21,291 | def get_datatype(self, table: str, column: str) -> str:
return self.flavour.get_datatype(self, table, column).upper() | Returns database SQL datatype for a column: e.g. VARCHAR. |
21,292 | def infer(self, number_of_processes=1, *args, **kwargs):
if number_of_processes == 1:
results = map(lambda x: x.infer(*args, **kwargs), self._inference_objects)
else:
inference_objects = self._inference_objects
results = raw_results_in_parallel(self._inference_objects, number_of_processes, *args,
**kwargs)
results = [inference._result_from_raw_result(raw_result)
for inference, raw_result in zip(inference_objects, results)]
results = sorted(results, key=lambda x: x.distance_at_minimum)
return InferenceResultsCollection(results) | :param number_of_processes: If set to more than 1, the inference routines will be paralellised
using ``multiprocessing`` module
:param args: arguments to pass to :meth:`Inference.infer`
:param kwargs: keyword arguments to pass to :meth:`Inference.infer`
:return: |
21,293 | def ip_rtm_config_route_static_route_oif_vrf_static_route_oif_type(self, **kwargs):
config = ET.Element("config")
ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def")
rtm_config = ET.SubElement(ip, "rtm-config", xmlns="urn:brocade.com:mgmt:brocade-rtm")
route = ET.SubElement(rtm_config, "route")
static_route_oif_vrf = ET.SubElement(route, "static-route-oif-vrf")
static_route_next_vrf_dest_key = ET.SubElement(static_route_oif_vrf, "static-route-next-vrf-dest")
static_route_next_vrf_dest_key.text = kwargs.pop()
next_hop_vrf_key = ET.SubElement(static_route_oif_vrf, "next-hop-vrf")
next_hop_vrf_key.text = kwargs.pop()
static_route_oif_name_key = ET.SubElement(static_route_oif_vrf, "static-route-oif-name")
static_route_oif_name_key.text = kwargs.pop()
static_route_oif_type = ET.SubElement(static_route_oif_vrf, "static-route-oif-type")
static_route_oif_type.text = kwargs.pop()
callback = kwargs.pop(, self._callback)
return callback(config) | Auto Generated Code |
21,294 | def _property_set(self, msg):
prop = self._sent_property.get()
if prop and hasattr(self, prop):
setattr(self, prop, self._sent_property.get())
self._sent_property = {} | Set command received and acknowledged. |
21,295 | def svm_predict(y, x, m, options=""):
def info(s):
print(s)
if scipy and isinstance(x, scipy.ndarray):
x = scipy.ascontiguousarray(x)
elif sparse and isinstance(x, sparse.spmatrix):
x = x.tocsr()
elif not isinstance(x, (list, tuple)):
raise TypeError("type of x: {0} is not supported!".format(type(x)))
if (not isinstance(y, (list, tuple))) and (not (scipy and isinstance(y, scipy.ndarray))):
raise TypeError("type of y: {0} is not supported!".format(type(y)))
predict_probability = 0
argv = options.split()
i = 0
while i < len(argv):
if argv[i] == :
i += 1
predict_probability = int(argv[i])
elif argv[i] == :
info = print_null
else:
raise ValueError("Wrong options")
i+=1
svm_type = m.get_svm_type()
is_prob_model = m.is_probability_model()
nr_class = m.get_nr_class()
pred_labels = []
pred_values = []
if scipy and isinstance(x, sparse.spmatrix):
nr_instance = x.shape[0]
else:
nr_instance = len(x)
if predict_probability:
if not is_prob_model:
raise ValueError("Model does not support probabiliy estimates")
if svm_type in [NU_SVR, EPSILON_SVR]:
info("Prob. model for test data: target value = predicted value + z,\n"
"z: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g" % m.get_svr_probability());
nr_class = 0
prob_estimates = (c_double * nr_class)()
for i in range(nr_instance):
if scipy and isinstance(x, sparse.spmatrix):
indslice = slice(x.indptr[i], x.indptr[i+1])
xi, idx = gen_svm_nodearray((x.indices[indslice], x.data[indslice]), isKernel=(m.param.kernel_type == PRECOMPUTED))
else:
xi, idx = gen_svm_nodearray(x[i], isKernel=(m.param.kernel_type == PRECOMPUTED))
label = libsvm.svm_predict_probability(m, xi, prob_estimates)
values = prob_estimates[:nr_class]
pred_labels += [label]
pred_values += [values]
else:
if is_prob_model:
info("Model supports probability estimates, but disabled in predicton.")
if svm_type in (ONE_CLASS, EPSILON_SVR, NU_SVC):
nr_classifier = 1
else:
nr_classifier = nr_class*(nr_class-1)//2
dec_values = (c_double * nr_classifier)()
for i in range(nr_instance):
if scipy and isinstance(x, sparse.spmatrix):
indslice = slice(x.indptr[i], x.indptr[i+1])
xi, idx = gen_svm_nodearray((x.indices[indslice], x.data[indslice]), isKernel=(m.param.kernel_type == PRECOMPUTED))
else:
xi, idx = gen_svm_nodearray(x[i], isKernel=(m.param.kernel_type == PRECOMPUTED))
label = libsvm.svm_predict_values(m, xi, dec_values)
if(nr_class == 1):
values = [1]
else:
values = dec_values[:nr_classifier]
pred_labels += [label]
pred_values += [values]
if len(y) == 0:
y = [0] * nr_instance
ACC, MSE, SCC = evaluations(y, pred_labels)
if svm_type in [EPSILON_SVR, NU_SVR]:
info("Mean squared error = %g (regression)" % MSE)
info("Squared correlation coefficient = %g (regression)" % SCC)
else:
info("Accuracy = %g%% (%d/%d) (classification)" % (ACC, int(round(nr_instance*ACC/100)), nr_instance))
return pred_labels, (ACC, MSE, SCC), pred_values | svm_predict(y, x, m [, options]) -> (p_labels, p_acc, p_vals)
y: a list/tuple/ndarray of l true labels (type must be int/double).
It is used for calculating the accuracy. Use [] if true labels are
unavailable.
x: 1. a list/tuple of l training instances. Feature vector of
each training instance is a list/tuple or dictionary.
2. an l * n numpy ndarray or scipy spmatrix (n: number of features).
Predict data (y, x) with the SVM model m.
options:
-b probability_estimates: whether to predict probability estimates,
0 or 1 (default 0); for one-class SVM only 0 is supported.
-q : quiet mode (no outputs).
The return tuple contains
p_labels: a list of predicted labels
p_acc: a tuple including accuracy (for classification), mean-squared
error, and squared correlation coefficient (for regression).
p_vals: a list of decision values or probability estimates (if '-b 1'
is specified). If k is the number of classes, for decision values,
each element includes results of predicting k(k-1)/2 binary-class
SVMs. For probabilities, each element contains k values indicating
the probability that the testing instance is in each class.
Note that the order of classes here is the same as 'model.label'
field in the model structure. |
21,296 | def unload_plugin(name, category=None):
if category is not None:
_all_plugins[category].pop(name)
else:
for cat in _all_plugins:
if name in _all_plugins[cat]:
_all_plugins[cat].pop(name) | remove single plugin
Parameters
----------
name : str
plugin name
category : str
plugin category
Examples
--------
>>> from pprint import pprint
>>> pprint(view_plugins())
{'decoders': {}, 'encoders': {}, 'parsers': {}}
>>> class DecoderPlugin(object):
... plugin_name = 'example'
... plugin_descript = 'a decoder for dicts containing _example_ key'
... dict_signature = ('_example_',)
...
>>> errors = load_plugin_classes([DecoderPlugin],category='decoders')
>>> pprint(view_plugins())
{'decoders': {'example': 'a decoder for dicts containing _example_ key'},
'encoders': {},
'parsers': {}}
>>> unload_plugin('example','decoders')
>>> pprint(view_plugins())
{'decoders': {}, 'encoders': {}, 'parsers': {}} |
21,297 | def create_tcp_socket (self, host):
port = int(self[])
sockinfo = get_sockinfo(host, port=port)
sock = create_socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect(sockinfo[0][4])
except socket.error:
sock.close()
raise
return sock | Create tcp socket, connect to it and return socket object. |
21,298 | def plot_rolling_volatility(returns, factor_returns=None,
rolling_window=APPROX_BDAYS_PER_MONTH * 6,
legend_loc=, ax=None, **kwargs):
if ax is None:
ax = plt.gca()
y_axis_formatter = FuncFormatter(utils.two_dec_places)
ax.yaxis.set_major_formatter(FuncFormatter(y_axis_formatter))
rolling_vol_ts = timeseries.rolling_volatility(
returns, rolling_window)
rolling_vol_ts.plot(alpha=.7, lw=3, color=, ax=ax,
**kwargs)
if factor_returns is not None:
rolling_vol_ts_factor = timeseries.rolling_volatility(
factor_returns, rolling_window)
rolling_vol_ts_factor.plot(alpha=.7, lw=3, color=, ax=ax,
**kwargs)
ax.set_title()
ax.axhline(
rolling_vol_ts.mean(),
color=,
linestyle=,
lw=3)
ax.axhline(0.0, color=, linestyle=, lw=2)
ax.set_ylabel()
ax.set_xlabel()
if factor_returns is None:
ax.legend([, ],
loc=legend_loc, frameon=True, framealpha=0.5)
else:
ax.legend([, , ],
loc=legend_loc, frameon=True, framealpha=0.5)
return ax | Plots the rolling volatility versus date.
Parameters
----------
returns : pd.Series
Daily returns of the strategy, noncumulative.
- See full explanation in tears.create_full_tear_sheet.
factor_returns : pd.Series, optional
Daily noncumulative returns of the benchmark factor to which betas are
computed. Usually a benchmark such as market returns.
- This is in the same style as returns.
rolling_window : int, optional
The days window over which to compute the volatility.
legend_loc : matplotlib.loc, optional
The location of the legend on the plot.
ax : matplotlib.Axes, optional
Axes upon which to plot.
**kwargs, optional
Passed to plotting function.
Returns
-------
ax : matplotlib.Axes
The axes that were plotted on. |
21,299 | def create_providerinfo(self, capabilities):
_pinfo = self.package_capabilities()
not_supported = {}
for key, val in capabilities.items():
try:
allowed = _pinfo[key]
except KeyError:
_pinfo[key] = val
else:
if isinstance(allowed, bool):
if allowed is False:
if val is True:
not_supported[key] = True
else:
_pinfo[key] = val
elif isinstance(allowed, str):
if val != allowed:
not_supported[key] = val
elif isinstance(allowed, list):
if isinstance(val, str):
sv = {val}
else:
try:
sv = set(val)
except TypeError:
if key == :
sv = set()
for v in val:
v.sort()
sv.add(.join(v))
else:
raise
else:
sv = set()
for v in val:
vs = v.split()
vs.sort()
sv.add(.join(vs))
sa = set(allowed)
if (sv & sa) == sv:
_pinfo[key] = list(sv)
else:
not_supported[key] = list(sv - sa)
if not_supported:
_msg = "Server doesnwebfingerprovider_info{}_endpoint'.format(name)] = instance.full_path
return _pinfo | Dynamically create the provider info response
:param capabilities:
:return: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.