code
stringlengths 51
2.38k
| docstring
stringlengths 4
15.2k
|
---|---|
def clear_adb_log(self):
try:
self._ad.adb.logcat('-c')
except adb.AdbError as e:
if b'failed to clear' in e.stderr:
self._ad.log.warning(
'Encountered known Android error to clear logcat.')
else:
raise
|
Clears cached adb content.
|
def bsrchd(value, ndim, array):
value = ctypes.c_double(value)
ndim = ctypes.c_int(ndim)
array = stypes.toDoubleVector(array)
return libspice.bsrchd_c(value, ndim, array)
|
Do a binary search for a key value within a double precision array,
assumed to be in increasing order. Return the index of the matching
array entry, or -1 if the key value is not found.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/bsrchd_c.html
:param value: Value to find in array.
:type value: float
:param ndim: Dimension of array.
:type ndim: int
:param array: Array to be searched.
:type array: Array of floats
:return: index
:rtype: int
|
def remote_urls(self):
cmd = 'git config -l | grep "url"'
return self.sh(cmd,
shell=True,
ignore_error=True).strip()
|
Get all configured remote urls for this Repository
Returns:
str: primary remote url for this Repository
(``git config -l | grep "url"``)
|
def make_elements(tokens, text, start=0, end=None, fallback=None):
result = []
end = end or len(text)
prev_end = start
for token in tokens:
if prev_end < token.start:
result.append(fallback(text[prev_end:token.start]))
result.append(token.as_element())
prev_end = token.end
if prev_end < end:
result.append(fallback(text[prev_end:end]))
return result
|
Make elements from a list of parsed tokens.
It will turn all unmatched holes into fallback elements.
:param tokens: a list of parsed tokens.
:param text: the original tet.
:param start: the offset of where parsing starts. Defaults to the start of text.
:param end: the offset of where parsing ends. Defauls to the end of text.
:param fallback: fallback element type.
:returns: a list of inline elements.
|
def chput(local_path=None, remote_path=None, user=None, group=None,
mode=None, use_sudo=True, mirror_local_mode=False, check=True):
result = None
if env.get('full') or not check or diff(local_path, remote_path):
result = put(local_path, remote_path, use_sudo,
mirror_local_mode, mode)
with hide('commands'):
chown(remote_path, user, group)
return result
|
Put file and set user and group ownership. Default to use sudo.
|
def _normalize_bbox(self, bbox, size):
bbox_ratio = float(bbox.width) / float(bbox.height)
size_ratio = float(size[0]) / float(size[1])
if round(size_ratio, 4) == round(bbox_ratio, 4):
return bbox
else:
if bbox.height * size_ratio >= bbox.width:
diff = bbox.height*size_ratio - bbox.width
return BBox((bbox.xmin - diff/2, bbox.ymin, bbox.xmax + diff/2, bbox.ymax), bbox.projection)
else:
diff = abs(bbox.width/size_ratio - bbox.height)
return BBox((bbox.xmin, bbox.ymin - diff/2, bbox.xmax, bbox.ymax + diff/2), bbox.projection)
|
Returns this bbox normalized to match the ratio of the given size.
|
def update(self):
self.kSS = 1.0
self.MSS = 1.0
self.KtoLnow_init = self.kSS
self.Rfunc = ConstantFunction(self.Rfree)
self.wFunc = ConstantFunction(self.wRte)
self.RfreeNow_init = self.Rfunc(self.kSS)
self.wRteNow_init = self.wFunc(self.kSS)
self.MaggNow_init = self.kSS
self.AaggNow_init = self.kSS
self.PermShkAggNow_init = 1.0
self.TranShkAggNow_init = 1.0
self.makeAggShkDstn()
self.AFunc = ConstantFunction(1.0)
|
Use primitive parameters to set basic objects. This is an extremely stripped-down version
of update for CobbDouglasEconomy.
Parameters
----------
none
Returns
-------
none
|
def api_headers(self, value):
value = validators.validate_api_headers("api_headers", value)
self._set_option("api_headers", value)
|
Set value for API headers.
|
def Error(filename, linenum, category, confidence, message):
if _ShouldPrintError(category, confidence, linenum):
_cpplint_state.IncrementErrorCount(category)
if _cpplint_state.output_format == 'vs7':
sys.stderr.write('%s(%s): %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
elif _cpplint_state.output_format == 'eclipse':
sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
else:
sys.stderr.write('%s:%s: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
|
Logs the fact we've found a lint error.
We log where the error was found, and also our confidence in the error,
that is, how certain we are this is a legitimate style regression, and
not a misidentification or a use that's sometimes justified.
False positives can be suppressed by the use of
"cpplint(category)" comments on the offending line. These are
parsed into _error_suppressions.
Args:
filename: The name of the file containing the error.
linenum: The number of the line containing the error.
category: A string used to describe the "category" this bug
falls under: "whitespace", say, or "runtime". Categories
may have a hierarchy separated by slashes: "whitespace/indent".
confidence: A number from 1-5 representing a confidence score for
the error, with 5 meaning that we are certain of the problem,
and 1 meaning that it could be a legitimate construct.
message: The error message.
|
def execute_request(server_url, creds, namespace, classname):
print('Requesting url=%s, ns=%s, class=%s' % \
(server_url, namespace, classname))
try:
CONN = WBEMConnection(server_url, creds,
default_namespace=namespace,
no_verification=True)
INSTANCES = CONN.EnumerateInstances(classname)
print('instances type=%s len=%s' % (type(INSTANCES),
len(INSTANCES)))
for inst in INSTANCES:
print('path=%s\n' % inst.path)
print(inst.tomof())
except Error as err:
if isinstance(err, CIMError):
print('Operation Failed: CIMError: code=%s, Description=%s' % \
(err.status_code_name, err.status_description))
else:
print ("Operation failed: %s" % err)
sys.exit(1)
|
Open a connection with the server_url and creds, and
enumerate instances defined by the functions namespace and
classname arguments.
Displays either the error return or the mof for instances
returned.
|
def image_to_string(image, lang=None, boxes=False):
input_file_name = '%s.bmp' % tempnam()
output_file_name_base = tempnam()
if not boxes:
output_file_name = '%s.txt' % output_file_name_base
else:
output_file_name = '%s.box' % output_file_name_base
try:
image.save(input_file_name)
status, error_string = run_tesseract(input_file_name,
output_file_name_base,
lang=lang,
boxes=boxes)
if status:
errors = get_errors(error_string)
raise TesseractError(status, errors)
f = file(output_file_name)
try:
return f.read().strip()
finally:
f.close()
finally:
cleanup(input_file_name)
cleanup(output_file_name)
|
Runs tesseract on the specified image. First, the image is written to disk,
and then the tesseract command is run on the image. Resseract's result is
read, and the temporary files are erased.
|
def get_objective_form(self, *args, **kwargs):
if isinstance(args[-1], list) or 'objective_record_types' in kwargs:
return self.get_objective_form_for_create(*args, **kwargs)
else:
return self.get_objective_form_for_update(*args, **kwargs)
|
Pass through to provider ObjectiveAdminSession.get_objective_form_for_update
|
def random_name_gen(size=6):
return ''.join(
[random.choice(string.ascii_uppercase)] +
[random.choice(string.ascii_uppercase + string.digits) for i in range(size - 1)]
) if size > 0 else ''
|
Generate a random python attribute name.
|
def get_consumption(self):
self.get_status()
try:
self.consumption = self.data['power']
except TypeError:
self.consumption = 0
return self.consumption
|
Get current power consumption in mWh.
|
def _read_country_names(self, countries_file=None):
if not countries_file:
countries_file = os.path.join(os.path.dirname(__file__), self.COUNTRY_FILE_DEFAULT)
with open(countries_file) as f:
countries_json = f.read()
return json.loads(countries_json)
|
Read list of countries from specified country file or default file.
|
def maybe_cythonize(extensions, *args, **kwargs):
if len(sys.argv) > 1 and 'clean' in sys.argv:
return extensions
if not cython:
return extensions
numpy_incl = pkg_resources.resource_filename('numpy', 'core/include')
for ext in extensions:
if (hasattr(ext, 'include_dirs') and
numpy_incl not in ext.include_dirs):
ext.include_dirs.append(numpy_incl)
build_ext.render_templates(_pxifiles)
return cythonize(extensions, *args, **kwargs)
|
Render tempita templates before calling cythonize
|
def try_int(o:Any)->Any:
"Try to convert `o` to int, default to `o` if not possible."
if isinstance(o, (np.ndarray,Tensor)): return o if o.ndim else int(o)
if isinstance(o, collections.Sized) or getattr(o,'__array_interface__',False): return o
try: return int(o)
except: return o
|
Try to convert `o` to int, default to `o` if not possible.
|
def _check_list_props(self, inst: "InstanceNode") -> None:
if self.keys:
self._check_keys(inst)
for u in self.unique:
self._check_unique(u, inst)
|
Check uniqueness of keys and "unique" properties, if applicable.
|
def find_free(cls, vrf, args):
xmlrpc = XMLRPCConnection()
q = {
'args': args,
'auth': AuthOptions().options
}
if isinstance(vrf, VRF):
q['vrf'] = { 'id': vrf.id }
elif vrf is None:
q['vrf'] = None
else:
raise NipapValueError('vrf parameter must be instance of VRF class')
try:
find_res = xmlrpc.connection.find_free_prefix(q)
except xmlrpclib.Fault as xml_fault:
raise _fault_to_exception(xml_fault)
pass
return find_res
|
Finds a free prefix.
Maps to the function
:py:func:`nipap.backend.Nipap.find_free_prefix` in the backend.
Please see the documentation for the backend function for
information regarding input arguments and return values.
|
def discount_rewards(r):
discounted_r = np.zeros_like(r)
running_add = 0
for t in reversed(range(0, r.size)):
if r[t] != 0:
running_add = 0
running_add = running_add * gamma + r[t]
discounted_r[t] = running_add
return discounted_r
|
take 1D float array of rewards and compute discounted reward
|
def crs(self, crs):
if isinstance(crs, QgsCoordinateReferenceSystem):
self._crs = crs
self._is_ready = False
else:
raise InvalidExtentError('%s is not a valid CRS object.' % crs)
|
Setter for extent_crs property.
:param crs: The coordinate reference system for the analysis boundary.
:type crs: QgsCoordinateReferenceSystem
|
def reload_wsgi():
"Gets the PID for the wsgi process and sends a HUP signal."
pid = run('supervisorctl pid varify-{host}'.format(host=env.host))
try:
int(pid)
sudo('kill -HUP {0}'.format(pid))
except (TypeError, ValueError):
pass
|
Gets the PID for the wsgi process and sends a HUP signal.
|
def _on_items_changed(self, change):
if change['type'] != 'container':
return
op = change['operation']
if op == 'append':
i = len(change['value'])-1
self.adapter.notifyItemInserted(i)
elif op == 'insert':
self.adapter.notifyItemInserted(change['index'])
elif op in ('pop', '__delitem__'):
self.adapter.notifyItemRemoved(change['index'])
elif op == '__setitem__':
self.adapter.notifyItemChanged(change['index'])
elif op == 'extend':
n = len(change['items'])
i = len(change['value'])-n
self.adapter.notifyItemRangeInserted(i, n)
elif op in ('remove', 'reverse', 'sort'):
self.adapter.notifyDataSetChanged()
|
Observe container events on the items list and update the
adapter appropriately.
|
def spawn(self, context=None):
if context is None:
context = self.default_context
if isinstance(context, collections.Callable):
context = context()
if not isinstance(context, collections.Mapping):
raise PatchboardError('Cannot determine a valid context')
return Client(self, context, self.api, self.endpoint_classes)
|
context may be a callable or a dict.
|
def date_to_um_date(date):
assert date.hour == 0 and date.minute == 0 and date.second == 0
return [date.year, date.month, date.day, 0, 0, 0]
|
Convert a date object to 'year, month, day, hour, minute, second.'
|
def list(region=None, key=None, keyid=None, profile=None):
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
buckets = conn.list_buckets()
if not bool(buckets.get('Buckets')):
log.warning('No buckets found')
if 'ResponseMetadata' in buckets:
del buckets['ResponseMetadata']
return buckets
except ClientError as e:
return {'error': __utils__['boto3.get_error'](e)}
|
List all buckets owned by the authenticated sender of the request.
Returns list of buckets
CLI Example:
.. code-block:: yaml
Owner: {...}
Buckets:
- {...}
- {...}
|
def list_message_files (package, suffix=".mo"):
for fname in glob.glob("po/*" + suffix):
localename = os.path.splitext(os.path.basename(fname))[0]
domainname = "%s.mo" % package.lower()
yield (fname, os.path.join(
"share", "locale", localename, "LC_MESSAGES", domainname))
|
Return list of all found message files and their installation paths.
|
def enqueue_jobs(self):
self.log.debug('Checking for scheduled jobs')
jobs = self.get_jobs_to_queue()
for job in jobs:
self.enqueue_job(job)
self.connection.expire(self.scheduler_key, int(self._interval) + 10)
return jobs
|
Move scheduled jobs into queues.
|
def weld_str_strip(array):
obj_id, weld_obj = create_weld_object(array)
weld_template =
weld_obj.weld_code = weld_template.format(array=obj_id)
return weld_obj
|
Strip whitespace from start and end of elements.
Note it currently only looks for whitespace (Ascii 32), not tabs or EOL.
Parameters
----------
array : numpy.ndarray or WeldObject
Input data.
Returns
-------
WeldObject
Representation of this computation.
|
def get(self, *args, **kwargs):
self.model = self.get_model(kwargs.get('id'))
result = yield self.model.fetch()
if not result:
LOGGER.debug('Not found')
self.not_found()
return
if not self.has_read_permission():
LOGGER.debug('Permission denied')
self.permission_denied()
return
self.add_headers()
self.finish(self.model_json())
|
Handle reading of the model
:param args:
:param kwargs:
|
def add(event, reactors, saltenv='base', test=None):
if isinstance(reactors, string_types):
reactors = [reactors]
sevent = salt.utils.event.get_event(
'master',
__opts__['sock_dir'],
__opts__['transport'],
opts=__opts__,
listen=True)
master_key = salt.utils.master.get_master_key('root', __opts__)
__jid_event__.fire_event({'event': event,
'reactors': reactors,
'key': master_key},
'salt/reactors/manage/add')
res = sevent.get_event(wait=30, tag='salt/reactors/manage/add-complete')
return res['result']
|
Add a new reactor
CLI Example:
.. code-block:: bash
salt-run reactor.add 'salt/cloud/*/destroyed' reactors='/srv/reactor/destroy/*.sls'
|
def _call_connection_lost_and_clean_up(self, exc):
self._state = _State.CLOSED
try:
self._protocol.connection_lost(exc)
finally:
self._rawsock.close()
if self._tls_conn is not None:
self._tls_conn.set_app_data(None)
self._tls_conn = None
self._rawsock = None
self._protocol = None
self._loop = None
|
Clean up all resources and call the protocols connection lost method.
|
def reset_parameter(**kwargs):
def _callback(env):
new_parameters = {}
for key, value in kwargs.items():
if key in ['num_class', 'num_classes',
'boosting', 'boost', 'boosting_type',
'metric', 'metrics', 'metric_types']:
raise RuntimeError("cannot reset {} during training".format(repr(key)))
if isinstance(value, list):
if len(value) != env.end_iteration - env.begin_iteration:
raise ValueError("Length of list {} has to equal to 'num_boost_round'."
.format(repr(key)))
new_param = value[env.iteration - env.begin_iteration]
else:
new_param = value(env.iteration - env.begin_iteration)
if new_param != env.params.get(key, None):
new_parameters[key] = new_param
if new_parameters:
env.model.reset_parameter(new_parameters)
env.params.update(new_parameters)
_callback.before_iteration = True
_callback.order = 10
return _callback
|
Create a callback that resets the parameter after the first iteration.
Note
----
The initial parameter will still take in-effect on first iteration.
Parameters
----------
**kwargs : value should be list or function
List of parameters for each boosting round
or a customized function that calculates the parameter in terms of
current number of round (e.g. yields learning rate decay).
If list lst, parameter = lst[current_round].
If function func, parameter = func(current_round).
Returns
-------
callback : function
The callback that resets the parameter after the first iteration.
|
def x_select_cb(self, w, index):
try:
self.x_col = self.cols[index]
except IndexError as e:
self.logger.error(str(e))
else:
self.plot_two_columns(reset_xlimits=True)
|
Callback to set X-axis column.
|
def _get_user_info(self, access_token, id_token):
try:
unverified_header = jwt.get_unverified_header(id_token)
except jwt.JWTError:
raise AuthError('Unable to decode the Id token header')
if 'kid' not in unverified_header:
raise AuthError('Id token header missing RSA key ID')
rsa_key = None
for key in jwks["keys"]:
if key["kid"] == unverified_header["kid"]:
rsa_key = {
"kty": key["kty"],
"kid": key["kid"],
"use": key["use"],
"n": key["n"],
"e": key["e"]
}
break
if not rsa_key:
raise AuthError('Id token using unrecognised RSA key ID')
try:
user_info = jwt.decode(
id_token,
rsa_key,
algorithms=['RS256'],
audience=AUTH0_CLIENTID,
access_token=access_token,
issuer="https://"+AUTH0_DOMAIN+"/"
)
return user_info
except jwt.ExpiredSignatureError:
raise AuthError('Id token is expired')
except jwt.JWTClaimsError:
raise AuthError("Incorrect claims: please check the audience and issuer")
except jwt.JWTError:
raise AuthError("Invalid header: Unable to parse authentication")
|
Extracts the user info payload from the Id Token.
Example return value:
{
"at_hash": "<HASH>",
"aud": "<HASH>",
"email_verified": true,
"email": "[email protected]",
"exp": 1551259495,
"family_name": "Surname",
"given_name": "Firstname",
"https://sso.mozilla.com/claim/groups": [
"all_scm_level_1",
"all_scm_level_2",
"all_scm_level_3",
# ...
],
"iat": 1550654695,
"iss": "https://auth.mozilla.auth0.com/",
"name": "Firstname Surname",
"nickname": "Firstname Surname",
"nonce": "<HASH>",
"picture": "<GRAVATAR_URL>",
"sub": "ad|Mozilla-LDAP|fsurname",
"updated_at": "2019-02-20T09:24:55.449Z",
}
|
def deserialize_duration(attr):
if isinstance(attr, ET.Element):
attr = attr.text
try:
duration = isodate.parse_duration(attr)
except(ValueError, OverflowError, AttributeError) as err:
msg = "Cannot deserialize duration object."
raise_with_traceback(DeserializationError, msg, err)
else:
return duration
|
Deserialize ISO-8601 formatted string into TimeDelta object.
:param str attr: response string to be deserialized.
:rtype: TimeDelta
:raises: DeserializationError if string format invalid.
|
def init(self, acct: Account, payer_acct: Account, gas_limit: int, gas_price: int) -> str:
func = InvokeFunction('init')
tx_hash = self.__sdk.get_network().send_neo_vm_transaction(self.__hex_contract_address, acct, payer_acct,
gas_limit, gas_price, func)
return tx_hash
|
This interface is used to call the TotalSupply method in ope4
that initialize smart contract parameter.
:param acct: an Account class that used to sign the transaction.
:param payer_acct: an Account class that used to pay for the transaction.
:param gas_limit: an int value that indicate the gas limit.
:param gas_price: an int value that indicate the gas price.
:return: the hexadecimal transaction hash value.
|
def _handle_prompt_command(self, buffer):
" When a command-prompt command is accepted. "
text = buffer.text
prompt_command = self.prompt_command
self.pymux.leave_command_mode(append_to_history=True)
self.pymux.handle_command(prompt_command.replace('%%', text))
|
When a command-prompt command is accepted.
|
def get_color(self,callb=None):
response = self.req_with_resp(LightGet, LightState, callb=callb)
return self.color
|
Convenience method to request the colour status from the device
This method will check whether the value has already been retrieved from the device,
if so, it will simply return it. If no, it will request the information from the device
and request that callb be executed when a response is received. The default callback
will simply cache the value.
:param callb: Callable to be used when the response is received. If not set,
self.resp_set_label will be used.
:type callb: callable
:returns: The cached value
:rtype: int
|
def load_average(self):
with io.open(self.load_average_file, 'r') as f:
file_columns = f.readline().strip().split()
return float(file_columns[self._load_average_file_column])
|
Returns the current load average.
|
def get_change(self, change_id):
uri = '/%s/change/%s' % (self.Version, change_id)
response = self.make_request('GET', uri)
body = response.read()
boto.log.debug(body)
if response.status >= 300:
raise exception.DNSServerError(response.status,
response.reason,
body)
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
|
Get information about a proposed set of changes, as submitted
by the change_rrsets method.
Returns a Python data structure with status information about the
changes.
:type change_id: str
:param change_id: The unique identifier for the set of changes.
This ID is returned in the response to the change_rrsets method.
|
def list_domains(self, service_id, version_number):
content = self._fetch("/service/%s/version/%d/domain" % (service_id, version_number))
return map(lambda x: FastlyDomain(self, x), content)
|
List the domains for a particular service and version.
|
def remove_item(self, item):
for idx, _item in enumerate(self.items):
if item == _item:
del self.items[idx]
return True
return False
|
Remove the specified item from the menu.
Args:
item (MenuItem): the item to be removed.
Returns:
bool: True if the item was removed; False otherwise.
|
def display_cached_string(self, bi):
if not isinstance(bi, SConfBuildInfo):
SCons.Warnings.warn(SConfWarning,
"The stored build information has an unexpected class: %s" % bi.__class__)
else:
self.display("The original builder output was:\n" +
(" |" + str(bi.string)).replace("\n", "\n |"))
|
Logs the original builder messages, given the SConfBuildInfo instance
bi.
|
def persistant_info(request, message, extra_tags='', fail_silently=False, *args, **kwargs):
add_message(request, INFO_PERSISTENT, message, extra_tags=extra_tags,
fail_silently=fail_silently, *args, **kwargs)
|
Adds a persistant message with the ``INFO`` level.
|
def computeAccuracyEnding(predictions, truths, iterations,
resets=None, randoms=None, num=None,
sequenceCounter=None):
accuracy = []
numIteration = []
numSequences = []
for i in xrange(len(predictions) - 1):
if num is not None and i > num:
continue
if truths[i] is None:
continue
if resets is not None or randoms is not None:
if not (resets[i+1] or randoms[i+1]):
continue
correct = truths[i] is None or truths[i] in predictions[i]
accuracy.append(correct)
numSequences.append(sequenceCounter[i])
numIteration.append(iterations[i])
return (accuracy, numIteration, numSequences)
|
Compute accuracy on the sequence ending
|
def getNewestCompleteTime(self):
bldrid = yield self.getBuilderId()
completed = yield self.master.data.get(
('builders', bldrid, 'buildrequests'),
[resultspec.Filter('complete', 'eq', [False])],
order=['-complete_at'], limit=1)
if completed:
return completed[0]['complete_at']
else:
return None
|
Returns the complete_at of the latest completed build request for
this builder, or None if there are no such build requests.
@returns: datetime instance or None, via Deferred
|
def _get_response_body_from_gzipped_content(self, url, response):
try:
gzipper = gzip.GzipFile(fileobj=six.BytesIO(response.text))
LOG.debug(self._("Received compressed response for "
"url %(url)s."), {'url': url})
uncompressed_string = (gzipper.read().decode('UTF-8'))
response_body = json.loads(uncompressed_string)
except Exception as e:
LOG.debug(
self._("Error occurred while decompressing body. "
"Got invalid response '%(response)s' for "
"url %(url)s: %(error)s"),
{'url': url, 'response': response.text, 'error': e})
raise exception.IloError(e)
return response_body
|
Get the response body from gzipped content
Try to decode as gzip (we should check the headers for
Content-Encoding=gzip)
if response.headers['content-encoding'] == "gzip":
...
:param url: the url for which response was sent
:type url: str
:param response: response content object, probably gzipped
:type response: object
:returns: returns response body
:raises IloError: if the content is **not** gzipped
|
def parent_resources(cls):
parent = cls.parent_resource
parents = [parent]
try:
while True:
parent = parent.parent_resource
parents.append(parent)
except AttributeError:
pass
parents.reverse()
return parents
|
Get a list of parent resources, starting from the Document
|
def _update_staticmethod(self, oldsm, newsm):
self._update(None, None, oldsm.__get__(0), newsm.__get__(0))
|
Update a staticmethod update.
|
def _prepare(self, serialized_obj):
nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)
encrypted = self.__safe.encrypt(serialized_obj, nonce)
signed = self.__signing_key.sign(encrypted)
return signed
|
Prepare the object to be sent over the untrusted channel.
|
def _id(self):
return (self.__class__, self.number_of_needles, self.needle_positions,
self.left_end_needle)
|
What this object is equal to.
|
def sync_readmes():
print("syncing README")
with open("README.md", 'r') as reader:
file_text = reader.read()
with open("README", 'w') as writer:
writer.write(file_text)
|
just copies README.md into README for pypi documentation
|
def andrew(S):
S.sort()
top = []
bot = []
for p in S:
while len(top) >= 2 and not left_turn(p, top[-1], top[-2]):
top.pop()
top.append(p)
while len(bot) >= 2 and not left_turn(bot[-2], bot[-1], p):
bot.pop()
bot.append(p)
return bot[:-1] + top[:0:-1]
|
Convex hull by Andrew
:param S: list of points as coordinate pairs
:requires: S has at least 2 points
:returns: list of points of the convex hull
:complexity: `O(n log n)`
|
def last_modified():
files = model.FileFingerprint.select().order_by(
orm.desc(model.FileFingerprint.file_mtime))
for file in files:
return file.file_mtime, file.file_path
return None, None
|
information about the most recently modified file
|
def extended_arg_patterns(self):
for arg in self._arg_iterator(self.args):
if isinstance(arg, Pattern):
if arg.mode > self.single:
while True:
yield arg
else:
yield arg
else:
yield arg
|
Iterator over patterns for positional arguments to be matched
This yields the elements of :attr:`args`, extended by their `mode`
value
|
def generators_from_logdir(logdir):
subdirs = io_wrapper.GetLogdirSubdirectories(logdir)
generators = [
itertools.chain(*[
generator_from_event_file(os.path.join(subdir, f))
for f in tf.io.gfile.listdir(subdir)
if io_wrapper.IsTensorFlowEventsFile(os.path.join(subdir, f))
]) for subdir in subdirs
]
return generators
|
Returns a list of event generators for subdirectories with event files.
The number of generators returned should equal the number of directories
within logdir that contain event files. If only logdir contains event files,
returns a list of length one.
Args:
logdir: A log directory that contains event files.
Returns:
List of event generators for each subdirectory with event files.
|
def get_all_articles(self, params=None):
if not params:
params = {}
return self._iterate_through_pages(self.get_articles_per_page, resource=ARTICLES, **{'params': params})
|
Get all articles
This will iterate over all pages until it gets all elements.
So if the rate limit exceeded it will throw an Exception and you will get nothing
:param params: search params
:return: list
|
async def run_jog(data):
axis = data.get('axis')
direction = data.get('direction')
step = data.get('step')
if axis not in ('x', 'y', 'z'):
message = '"axis" must be "x", "y", or "z"'
status = 400
elif direction not in (-1, 1):
message = '"direction" must be -1 or 1'
status = 400
elif step is None:
message = '"step" must be specified'
status = 400
else:
position = jog(
axis,
direction,
step,
session.adapter,
session.current_mount,
session.cp)
message = 'Jogged to {}'.format(position)
status = 200
return web.json_response({'message': message}, status=status)
|
Allow the user to jog the selected pipette around the deck map
:param data: Information obtained from a POST request.
The content type is application/json
The correct packet form should be as follows:
{
'token': UUID token from current session start
'command': 'jog'
'axis': The current axis you wish to move
'direction': The direction you wish to move (+ or -)
'step': The increment you wish to move
}
:return: The position you are moving to based on axis, direction, step
given by the user.
|
def resid_dev(self, endog, mu, scale=1.):
return (endog - mu) / np.sqrt(self.variance(mu)) / scale
|
Gaussian deviance residuals
Parameters
-----------
endog : array-like
Endogenous response variable
mu : array-like
Fitted mean response variable
scale : float, optional
An optional argument to divide the residuals by scale. The default
is 1.
Returns
-------
resid_dev : array
Deviance residuals as defined below
|
def create_conda_env(sandbox_dir, env_name, dependencies, options=()):
env_dir = os.path.join(sandbox_dir, env_name)
cmdline = ["conda", "create", "--yes", "--copy", "--quiet", "-p", env_dir] + list(options) + dependencies
log.info("Creating conda environment: ")
log.info(" command line: %s", cmdline)
subprocess.check_call(cmdline, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
log.debug("Environment created")
return env_dir, env_name
|
Create a conda environment inside the current sandbox for the given list of dependencies and options.
Parameters
----------
sandbox_dir : str
env_name : str
dependencies : list
List of conda specs
options
List of additional options to pass to conda. Things like ["-c", "conda-forge"]
Returns
-------
(env_dir, env_name)
|
def get_substituted_contents(contents, substitutions):
result = contents
for sub in substitutions:
result = sub.apply_and_get_result(result)
return result
|
Perform a list of substitutions and return the result.
contents: the starting string on which to beging substitutions
substitutions: list of Substitution objects to call, in order, with the
result of the previous substitution.
|
def tvdb_series_id(token, id_tvdb, lang="en", cache=True):
if lang not in TVDB_LANGUAGE_CODES:
raise MapiProviderException(
"'lang' must be one of %s" % ",".join(TVDB_LANGUAGE_CODES)
)
try:
url = "https://api.thetvdb.com/series/%d" % int(id_tvdb)
except ValueError:
raise MapiProviderException("id_tvdb must be numeric")
headers = {"Accept-Language": lang, "Authorization": "Bearer %s" % token}
status, content = _request_json(url, headers=headers, cache=cache)
if status == 401:
raise MapiProviderException("invalid token")
elif status == 404:
raise MapiNotFoundException
elif status != 200 or not content.get("data"):
raise MapiNetworkException("TVDb down or unavailable?")
return content
|
Returns a series records that contains all information known about a
particular series id
Online docs: api.thetvdb.com/swagger#!/Series/get_series_id=
|
def from_dynacRepr(cls, pynacRepr):
L = float(pynacRepr[1][0][0])
B = float(pynacRepr[1][0][1])
aperRadius = float(pynacRepr[1][0][2])
return cls(L, B, aperRadius)
|
Construct a ``Quad`` instance from the Pynac lattice element
|
def subscriberSocket(self, host, port, filt=b'', conflate=False):
socket = self._context.socket(zmq.SUB)
if conflate:
socket.setsockopt(zmq.CONFLATE, 1)
socket.connect(self.tcpAddress(host, port))
socket.setsockopt(zmq.SUBSCRIBE, filt)
return socket
|
Create a SUB-style socket for data receivers
|
def items(*args, **kwargs):
if args:
return item(*args)
pillarenv = kwargs.get('pillarenv')
if pillarenv is None:
if __opts__.get('pillarenv_from_saltenv', False):
pillarenv = kwargs.get('saltenv') or __opts__['saltenv']
else:
pillarenv = __opts__['pillarenv']
pillar_override = kwargs.get('pillar')
pillar_enc = kwargs.get('pillar_enc')
if pillar_override and pillar_enc:
try:
pillar_override = salt.utils.crypt.decrypt(
pillar_override,
pillar_enc,
translate_newlines=True,
opts=__opts__,
valid_rend=__opts__['decrypt_pillar_renderers'])
except Exception as exc:
raise CommandExecutionError(
'Failed to decrypt pillar override: {0}'.format(exc)
)
pillar = salt.pillar.get_pillar(
__opts__,
__grains__,
__opts__['id'],
pillar_override=pillar_override,
pillarenv=pillarenv)
return pillar.compile_pillar()
|
Calls the master for a fresh pillar and generates the pillar data on the
fly
Contrast with :py:func:`raw` which returns the pillar data that is
currently loaded into the minion.
pillar
If specified, allows for a dictionary of pillar data to be made
available to pillar and ext_pillar rendering. these pillar variables
will also override any variables of the same name in pillar or
ext_pillar.
.. versionadded:: 2015.5.0
pillar_enc
If specified, the data passed in the ``pillar`` argument will be passed
through this renderer to decrypt it.
.. note::
This will decrypt on the minion side, so the specified renderer
must be set up on the minion for this to work. Alternatively,
pillar data can be decrypted master-side. For more information, see
the :ref:`Pillar Encryption <pillar-encryption>` documentation.
Pillar data that is decrypted master-side, is not decrypted until
the end of pillar compilation though, so minion-side decryption
will be necessary if the encrypted pillar data must be made
available in an decrypted state pillar/ext_pillar rendering.
.. versionadded:: 2017.7.0
pillarenv
Pass a specific pillar environment from which to compile pillar data.
If not specified, then the minion's :conf_minion:`pillarenv` option is
not used, and if that also is not specified then all configured pillar
environments will be merged into a single pillar dictionary and
returned.
.. versionadded:: 2016.11.2
saltenv
Included only for compatibility with
:conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored.
CLI Example:
.. code-block:: bash
salt '*' pillar.items
|
def from_cap(cls, theta, lwin, clat=None, clon=None, nwin=None,
theta_degrees=True, coord_degrees=True, dj_matrix=None,
weights=None):
if theta_degrees:
tapers, eigenvalues, taper_order = _shtools.SHReturnTapers(
_np.radians(theta), lwin)
else:
tapers, eigenvalues, taper_order = _shtools.SHReturnTapers(
theta, lwin)
return SHWindowCap(theta, tapers, eigenvalues, taper_order,
clat, clon, nwin, theta_degrees, coord_degrees,
dj_matrix, weights, copy=False)
|
Construct spherical cap localization windows.
Usage
-----
x = SHWindow.from_cap(theta, lwin, [clat, clon, nwin, theta_degrees,
coord_degrees, dj_matrix, weights])
Returns
-------
x : SHWindow class instance
Parameters
----------
theta : float
Angular radius of the spherical cap localization domain (default
in degrees).
lwin : int
Spherical harmonic bandwidth of the localization windows.
clat, clon : float, optional, default = None
Latitude and longitude of the center of the rotated spherical cap
localization windows (default in degrees).
nwin : int, optional, default (lwin+1)**2
Number of localization windows.
theta_degrees : bool, optional, default = True
True if theta is in degrees.
coord_degrees : bool, optional, default = True
True if clat and clon are in degrees.
dj_matrix : ndarray, optional, default = None
The djpi2 rotation matrix computed by a call to djpi2.
weights : ndarray, optional, default = None
Taper weights used with the multitaper spectral analyses.
|
def createFile(dataArray=None, outfile=None, header=None):
assert(dataArray is not None), "Please supply a data array for createFiles"
try:
fitsobj = fits.HDUList()
if header is not None:
try:
del(header['NAXIS1'])
del(header['NAXIS2'])
if 'XTENSION' in header:
del(header['XTENSION'])
if 'EXTNAME' in header:
del(header['EXTNAME'])
if 'EXTVER' in header:
del(header['EXTVER'])
except KeyError:
pass
if 'NEXTEND' in header:
header['NEXTEND'] = 0
hdu = fits.PrimaryHDU(data=dataArray, header=header)
try:
del hdu.header['PCOUNT']
del hdu.header['GCOUNT']
except KeyError:
pass
else:
hdu = fits.PrimaryHDU(data=dataArray)
fitsobj.append(hdu)
if outfile is not None:
fitsobj.writeto(outfile)
finally:
fitsobj.close()
if outfile is not None:
del fitsobj
fitsobj = None
return fitsobj
|
Create a simple fits file for the given data array and header.
Returns either the FITS object in-membory when outfile==None or
None when the FITS file was written out to a file.
|
def _indicator_table():
from xclim import temperature, precip
import inspect
inds = _get_indicators([temperature, precip])
table = []
for ind in inds:
args = {name: p.default for (name, p) in ind._sig.parameters.items() if p.default != inspect._empty}
table.append(ind.json(args))
return table
|
Return a sequence of dicts storing metadata about all available indices.
|
def removeDataFrameRows(self, rows):
if not self.editable:
return False
if rows:
position = min(rows)
count = len(rows)
self.beginRemoveRows(QtCore.QModelIndex(), position, position + count - 1)
removedAny = False
for idx, line in self._dataFrame.iterrows():
if idx in rows:
removedAny = True
self._dataFrame.drop(idx, inplace=True)
if not removedAny:
return False
self._dataFrame.reset_index(inplace=True, drop=True)
self.endRemoveRows()
return True
return False
|
Removes rows from the dataframe.
:param rows: (list)
of row indexes to removes.
:return: (bool)
True on success, False on failure.
|
def is_quota_exceeded(self) -> bool:
if self.quota and self._url_table is not None:
return self.size >= self.quota and \
self._url_table.get_root_url_todo_count() == 0
|
Return whether the quota is exceeded.
|
def mine_items(self, identifiers, params=None, callback=None):
params = {'dontcache': 1} if not params else {}
requests = metadata_requests(identifiers, params, callback, self)
yield from self.mine(requests)
|
Mine metadata from Archive.org items.
:param identifiers: Archive.org identifiers to be mined.
:type identifiers: iterable
:param params: URL parameters to send with each metadata
request.
:type params: dict
:param callback: A callback function to be called on each
:py:class:`aiohttp.client.ClientResponse`.
:type callback: func
|
def sha1_digest(instr):
if six.PY3:
b = salt.utils.stringutils.to_bytes(instr)
return hashlib.sha1(b).hexdigest()
return hashlib.sha1(instr).hexdigest()
|
Generate an sha1 hash of a given string.
|
def summary(self) -> str:
if not self.translations:
self.update()
return summary.metar(self.translations)
|
Condensed report summary created from translations
|
def put_summary(self, summary):
if isinstance(summary, six.binary_type):
summary = tf.Summary.FromString(summary)
assert isinstance(summary, tf.Summary), type(summary)
for val in summary.value:
if val.WhichOneof('value') == 'simple_value':
val.tag = re.sub('tower[0-9]+/', '', val.tag)
suffix = '-summary'
if val.tag.endswith(suffix):
val.tag = val.tag[:-len(suffix)]
self._dispatch(lambda m: m.process_scalar(val.tag, val.simple_value))
self._dispatch(lambda m: m.process_summary(summary))
|
Put a `tf.Summary`.
|
def wait_for_healthy(
raiden: 'RaidenService',
node_address: Address,
retry_timeout: float,
) -> None:
network_statuses = views.get_networkstatuses(
views.state_from_raiden(raiden),
)
while network_statuses.get(node_address) != NODE_NETWORK_REACHABLE:
gevent.sleep(retry_timeout)
network_statuses = views.get_networkstatuses(
views.state_from_raiden(raiden),
)
|
Wait until `node_address` becomes healthy.
Note:
This does not time out, use gevent.Timeout.
|
def create_url(url_protocol, host, api, url_params):
is_batch = url_params.pop("batch", None)
apis = url_params.pop("apis", None)
version = url_params.pop("version", None) or url_params.pop("v", None)
method = url_params.pop('method', None)
host_url_seg = url_protocol + "://%s" % host
api_url_seg = "/%s" % api
batch_url_seg = "/batch" if is_batch else ""
method_url_seg = "/%s" % method if method else ""
params = {}
if apis:
params["apis"] = ",".join(apis)
if version:
params["version"] = version
url = host_url_seg + api_url_seg + batch_url_seg + method_url_seg
if params:
url += "?" + urlencode(params)
return url
|
Generate the proper url for sending off data for analysis
|
def get_creator_by_name(name):
return {'docker(container)': Container.creator,
'shell': Bash.creator, 'docker(image)': Image.creator,
'python': Script.creator, 'packer': Packer.creator,
'ansible(simple)': Ansible.creator}[name]
|
Get creator function by name.
Args:
name (str): name of the creator function.
Returns:
function: creater function.
|
def get_utc_date(entry):
if entry['numeric_date_stamp'] == '0':
entry['numeric_date_stamp_utc'] = '0'
return entry
else:
if '.' in entry['numeric_date_stamp']:
t = datetime.strptime(entry['numeric_date_stamp'],
'%Y%m%d%H%M%S.%f')
else:
t = datetime.strptime(entry['numeric_date_stamp'],
'%Y%m%d%H%M%S')
tdelta = timedelta(hours = int(entry['tzone'][1:3]),
minutes = int(entry['tzone'][3:5]))
if entry['tzone'][0] == '-':
ut = t + tdelta
else:
ut = t - tdelta
entry['numeric_date_stamp_utc'] = ut.strftime('%Y%m%d%H%M%S.%f')
return entry
|
Return datestamp converted to UTC
|
def _retrieve_indices(cols):
if isinstance(cols, int):
return [cols]
elif isinstance(cols, slice):
start = cols.start if cols.start else 0
stop = cols.stop
step = cols.step if cols.step else 1
return list(range(start, stop, step))
elif isinstance(cols, list) and cols:
if isinstance(cols[0], bool):
return np.flatnonzero(np.asarray(cols))
elif isinstance(cols[0], int):
return cols
else:
raise TypeError('No valid column specifier. Only a scalar, list or slice of all'
'integers or a boolean mask are allowed.')
|
Retrieve a list of indices corresponding to the provided column specification.
|
def save_form(self, form):
force = self.get_force_instance_values()
if force:
for k, v in force.items():
setattr(form.instance, k, v)
should_add = False
if self.parent_object:
m2ms = [f.name for f in form.instance._meta.many_to_many]
m2ms.extend(
[f.field.rel.related_name for f in
[
f for f in form.instance._meta.get_fields(include_hidden=True)
if f.many_to_many and f.auto_created
]
]
)
if self.parent_field in m2ms:
should_add = True
else:
try:
form.instance._meta.get_field(self.parent_field)
setattr(form.instance, self.parent_field,
self.parent_object)
except FieldDoesNotExist:
pass
obj = form.save()
if should_add:
getattr(obj, self.parent_field).add(self.parent_object)
return obj
|
Save a valid form. If there is a parent attribute,
this will make sure that the parent object is added
to the saved object. Either as a relationship before
saving or in the case of many to many relations after
saving. Any forced instance values are set as well.
Returns the saved object.
|
def get_db(db, ip='localhost', port=27017, user=None, password=None):
if platform.system().lower() == 'darwin':
connect = False
else:
connect = True
if user and password:
import urllib
pwd = urllib.quote_plus(password)
uri = 'mongodb://{}:{}@{}:{}'.format(user, pwd, ip, port)
conn = MongoClient(uri, connect=connect)
else:
conn = MongoClient(ip, port, connect=connect)
return conn[db]
|
Returns a pymongo Database object.
.. note:
Both ``user`` and ``password`` are required when connecting to a MongoDB
database that has authentication enabled.
Arguments:
db (str): Name of the MongoDB database. Required.
ip (str): IP address of the MongoDB server. Default is ``localhost``.
port (int): Port of the MongoDB server. Default is ``27017``.
user (str): Username, if authentication is enabled on the MongoDB database.
Default is ``None``, which results in requesting the connection
without authentication.
password (str): Password, if authentication is enabled on the MongoDB database.
Default is ``None``, which results in requesting the connection
without authentication.
|
def remove(self, *labelvalues):
if not self._labelnames:
raise ValueError('No label names were set when constructing %s' % self)
if len(labelvalues) != len(self._labelnames):
raise ValueError('Incorrect label count (expected %d, got %s)' % (len(self._labelnames), labelvalues))
labelvalues = tuple(unicode(l) for l in labelvalues)
with self._lock:
del self._metrics[labelvalues]
|
Remove the given labelset from the metric.
|
def add_action(self, action, add_to_toolbar=True, add_to_legend=False):
self.actions.append(action)
self.iface.addPluginToMenu(self.tr('InaSAFE'), action)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_legend:
self.iface.addCustomActionForLayerType(
action,
self.tr('InaSAFE'),
QgsMapLayer.VectorLayer,
True)
self.iface.addCustomActionForLayerType(
action,
self.tr('InaSAFE'),
QgsMapLayer.RasterLayer,
True)
|
Add a toolbar icon to the InaSAFE toolbar.
:param action: The action that should be added to the toolbar.
:type action: QAction
:param add_to_toolbar: Flag indicating whether the action should also
be added to the InaSAFE toolbar. Defaults to True.
:type add_to_toolbar: bool
:param add_to_legend: Flag indicating whether the action should also
be added to the layer legend menu. Default to False.
:type add_to_legend: bool
|
def RegisterKey(cls, key,
getter=None, setter=None, deleter=None, lister=None):
key = key.lower()
if getter is not None:
cls.Get[key] = getter
if setter is not None:
cls.Set[key] = setter
if deleter is not None:
cls.Delete[key] = deleter
if lister is not None:
cls.List[key] = lister
|
Register a new key mapping.
A key mapping is four functions, a getter, setter, deleter,
and lister. The key may be either a string or a glob pattern.
The getter, deleted, and lister receive an MP4Tags instance
and the requested key name. The setter also receives the
desired value, which will be a list of strings.
The getter, setter, and deleter are used to implement __getitem__,
__setitem__, and __delitem__.
The lister is used to implement keys(). It should return a
list of keys that are actually in the MP4 instance, provided
by its associated getter.
|
def disarm(self, wait=True, timeout=None):
self.armed = False
if wait:
self.wait_for(lambda: not self.armed, timeout=timeout,
errmsg='failed to disarm vehicle')
|
Disarm the vehicle.
If wait is True, wait for disarm operation to complete before
returning. If timeout is nonzero, raise a TimeouTerror if the
vehicle has not disarmed after timeout seconds.
|
def get_projects(osa_repo_dir, commit):
repo = Repo(osa_repo_dir)
checkout(repo, commit)
yaml_files = glob.glob(
'{0}/playbooks/defaults/repo_packages/*.yml'.format(osa_repo_dir)
)
yaml_parsed = []
for yaml_file in yaml_files:
with open(yaml_file, 'r') as f:
yaml_parsed.append(yaml.load(f))
merged_dicts = {k: v for d in yaml_parsed for k, v in d.items()}
return normalize_yaml(merged_dicts)
|
Get all projects from multiple YAML files.
|
def __is_function_action(self, action_function):
is_function_action = True
if not hasattr(action_function, '__call__'):
return False
try:
for end_string, context in action_function():
if not isinstance(end_string, basestring):
self.log_error("Action function must return end of filename as a string as first argument")
if not isinstance(context, dict):
self.log_error("Action function must return context as a dict as second argument")
break
except Exception:
is_function_action = False
return is_function_action
|
Detect if given function is really an action function.
Args:
action_function: Function to test.
Note:
We don't care if the variable refer to a function but rather if it is callable or not.
|
def links(self):
if not self._responses:
return None
if 'Link' in self._responses[-1].headers:
links = []
for l in headers.parse_link(self._responses[-1].headers['Link']):
link = {'target': l.target}
link.update({k: v for (k, v) in l.parameters})
links.append(link)
return links
|
Return the parsed link header if it was set, returning a list of
the links as a dict.
:rtype: list(dict()) or None
|
def kscale(matrix, k=7, dists=None):
dists = (kdists(matrix, k=k) if dists is None else dists)
scale = dists.dot(dists.T)
return scale
|
Returns the local scale based on the k-th nearest neighbour
|
def get_iex_dividends(start=None, **kwargs):
import warnings
warnings.warn(WNG_MSG % ("get_iex_dividends", "refdata.get_iex_dividends"))
return Dividends(start=start, **kwargs).fetch()
|
MOVED to iexfinance.refdata.get_iex_dividends
|
def set_verbosity(cls, verbosity):
if verbosity > 0:
logger = KittyObject.get_logger()
levels = [logging.DEBUG]
verbosity = min(verbosity, len(levels)) - 1
logger.setLevel(levels[verbosity])
|
Set verbosity of logger
:param verbosity: verbosity level. currently, we only support 1 (logging.DEBUG)
|
def _mark_received(self, tsn):
if uint32_gte(self._last_received_tsn, tsn) or tsn in self._sack_misordered:
self._sack_duplicates.append(tsn)
return True
self._sack_misordered.add(tsn)
for tsn in sorted(self._sack_misordered):
if tsn == tsn_plus_one(self._last_received_tsn):
self._last_received_tsn = tsn
else:
break
def is_obsolete(x):
return uint32_gt(x, self._last_received_tsn)
self._sack_duplicates = list(filter(is_obsolete, self._sack_duplicates))
self._sack_misordered = set(filter(is_obsolete, self._sack_misordered))
|
Mark an incoming data TSN as received.
|
def GetRpcServer(options):
rpc_server_class = HttpRpcServer
def GetUserCredentials():
global global_status
st = global_status
global_status = None
email = options.email
if email is None:
email = GetEmail("Email (login for uploading to %s)" % options.server)
password = getpass.getpass("Password for %s: " % email)
global_status = st
return (email, password)
host = (options.host or options.server).lower()
if host == "localhost" or host.startswith("localhost:"):
email = options.email
if email is None:
email = "[email protected]"
logging.info("Using debug user %s. Override with --email" % email)
server = rpc_server_class(
options.server,
lambda: (email, "password"),
host_override=options.host,
extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email},
save_cookies=options.save_cookies)
server.authenticated = True
return server
return rpc_server_class(options.server, GetUserCredentials,
host_override=options.host, save_cookies=options.save_cookies)
|
Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
|
def get_playcount(self):
return _number(
_extract(
self._request(self.ws_prefix + ".getInfo", cacheable=True), "playcount"
)
)
|
Returns the number of plays on the network
|
def get_default_value(self):
default = self.default_value
if isinstance(default, collections.Callable):
default = default()
return default
|
return default value
|
def list(cls, name, parent=None, interleave=None):
node = cls.leaf_list(name, parent, interleave=interleave)
node.keys = []
node.keymap = {}
return node
|
Create _list_ node for a list.
|
def setSignalHeader(self, edfsignal, channel_info):
if edfsignal < 0 or edfsignal > self.n_channels:
raise ChannelDoesNotExist(edfsignal)
self.channels[edfsignal] = channel_info
self.update_header()
|
Sets the parameter for signal edfsignal.
channel_info should be a dict with
these values:
'label' : channel label (string, <= 16 characters, must be unique)
'dimension' : physical dimension (e.g., mV) (string, <= 8 characters)
'sample_rate' : sample frequency in hertz (int)
'physical_max' : maximum physical value (float)
'physical_min' : minimum physical value (float)
'digital_max' : maximum digital value (int, -2**15 <= x < 2**15)
'digital_min' : minimum digital value (int, -2**15 <= x < 2**15)
|
def remove_element_attributes(elem_to_parse, *args):
element = get_element(elem_to_parse)
if element is None:
return element
if len(args):
attribs = element.attrib
return {key: attribs.pop(key) for key in args if key in attribs}
return {}
|
Removes the specified keys from the element's attributes, and
returns a dict containing the attributes that have been removed.
|
def _get_call_names_helper(node):
if isinstance(node, ast.Name):
if node.id not in BLACK_LISTED_CALL_NAMES:
yield node.id
elif isinstance(node, ast.Subscript):
yield from _get_call_names_helper(node.value)
elif isinstance(node, ast.Str):
yield node.s
elif isinstance(node, ast.Attribute):
yield node.attr
yield from _get_call_names_helper(node.value)
|
Recursively finds all function names.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.