Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
27,500 |
def openSourceWindow(self, current_class, method=None):
log.debug("openSourceWindow for %s" % current_class)
sourcewin = self.getMeOpenedWindowIfExists(current_class.current_title + "(S)")
if not sourcewin:
current_filename = self.session.get_filename_by_class(current_class)
current_digest = self.session.get_digest_by_class(current_class)
sourcewin = SourceWindow(win=self,
current_class=current_class,
current_title=current_class.current_title + "(S)",
current_filename=current_filename,
current_digest=current_digest,
session=self.session)
sourcewin.reload_java_sources()
self.central.addTab(sourcewin, sourcewin.title)
self.central.setTabToolTip(self.central.indexOf(sourcewin),
sourcewin.title)
if method:
sourcewin.browse_to_method(method)
self.central.setCurrentWidget(sourcewin)
|
Main function to open a decompile source window
It checks if it already opened and open that tab,
otherwise, initialize a new window.
|
27,501 |
def _CalculateYLines(self, dists):
tot_dist = sum(dists)
if tot_dist > 0:
pixel_dist = [float(d * (self._gheight-20))/tot_dist for d in dists]
pixel_grid = [0]+[int(pd + sum(pixel_dist[0:i])) for i,pd in
enumerate(pixel_dist)]
else:
pixel_grid = []
return pixel_grid
|
Builds a list with y-coordinates for the horizontal lines in the graph.
Args:
# One integer for each pair of stations
# indicating the approximate distance
dists: [0,33,140, ... ,X]
Returns:
# One integer y-coordinate for each station normalized between
# 0 and X, where X is the height of the graph in pixels
[0, 33, 140, ... , X]
|
27,502 |
def node_path_transforms(self, node):
a, b = self.node_path(node)
return ([n.transform for n in a[:-1]] +
[n.transform.inverse for n in b])[::-1]
|
Return the list of transforms along the path to another node.
The transforms are listed in reverse order, such that the last
transform should be applied first when mapping from this node to
the other.
Parameters
----------
node : instance of Node
The other node.
Returns
-------
transforms : list
A list of Transform instances.
|
27,503 |
def create(self, build_sid):
data = values.of({: build_sid, })
payload = self._version.create(
,
self._uri,
data=data,
)
return DeploymentInstance(
self._version,
payload,
service_sid=self._solution[],
environment_sid=self._solution[],
)
|
Create a new DeploymentInstance
:param unicode build_sid: The build_sid
:returns: Newly created DeploymentInstance
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentInstance
|
27,504 |
def Adapter(self, **kw):
s device identifier. For servers, this
should be the externally accessible URL that launches the SyncML
transaction, and for clients this should be a unique ID, such as
the IMEI number (for mobile phones). If not specified, it will
be defaulted to the `devID` of the `devinfo` object. If it
cannot be loaded from the database or from the `devinfo`, then
it must be provided before any synchronization can begin.
:param name:
sets the local adapter\s function.
:param devinfo:
sets the local adapter :class:`pysyncml.devinfo.DeviceInfo`. If
not specified, it will be auto-loaded from the database. If it
cannot be loaded from the database, then it must be provided
before any synchronization can begin.
:param peer:
TODO: document...
:param maxGuidSize:
TODO: document...
:param maxMsgSize:
TODO: document...
:param maxObjSize:
TODO: document...
:param conflictPolicy:
sets the default conflict handling policy for this adapter,
and can be overriden on a per-store basis (applies only when
operating as the server role).
xml'
if isinstance(ret.codec, basestring):
ret.codec = codec.Codec.factory(ret.codec)
if ret.devID is not None:
peers = ret.getKnownPeers()
if len(peers) == 1 and peers[0].url is not None:
ret._peer = peers[0]
return ret
|
.. TODO:: move this documentation into model/adapter.py?...
The Adapter constructor supports the following parameters:
:param devID:
sets the local adapter\'s device identifier. For servers, this
should be the externally accessible URL that launches the SyncML
transaction, and for clients this should be a unique ID, such as
the IMEI number (for mobile phones). If not specified, it will
be defaulted to the `devID` of the `devinfo` object. If it
cannot be loaded from the database or from the `devinfo`, then
it must be provided before any synchronization can begin.
:param name:
sets the local adapter\'s device name - usually a human-friendly
description of this SyncML\'s function.
:param devinfo:
sets the local adapter :class:`pysyncml.devinfo.DeviceInfo`. If
not specified, it will be auto-loaded from the database. If it
cannot be loaded from the database, then it must be provided
before any synchronization can begin.
:param peer:
TODO: document...
:param maxGuidSize:
TODO: document...
:param maxMsgSize:
TODO: document...
:param maxObjSize:
TODO: document...
:param conflictPolicy:
sets the default conflict handling policy for this adapter,
and can be overriden on a per-store basis (applies only when
operating as the server role).
|
27,505 |
def num_lines(self):
if self.from_stdin:
return None
if not self._num_lines:
self._iterate_lines()
return self._num_lines
|
Lazy evaluation of the number of lines.
Returns None for stdin input currently.
|
27,506 |
def info(self, headers=None):
return self.transport.forward_request(
method=, path=, headers=headers)
|
Retrieves information of the node being connected to via the
root endpoint ``'/'``.
Args:
headers (dict): Optional headers to pass to the request.
Returns:
dict: Details of the node that this instance is connected
to. Some information that may be interesting:
* the server version and
* an overview of all the endpoints
Note:
Currently limited to one node, and will be expanded to
return information for each node that this instance is
connected to.
|
27,507 |
def _writeFile(cls, filePath, content, encoding = None):
filePath = os.path.realpath(filePath)
log.debug(_("Real file path to write: %s" % filePath))
if encoding is None:
encoding = File.DEFAULT_ENCODING
try:
encodedContent = .join(content).encode(encoding)
except LookupError as msg:
raise SubFileError(_("Unknown encoding name: .") % encoding)
except UnicodeEncodeError:
raise SubFileError(
_("There are some characters in that cannot be encoded to .")
% {"file": filePath, "enc": encoding})
tmpFilePath = "%s.tmp" % filePath
bakFilePath = "%s.bak" % filePath
with open(tmpFilePath, ) as f:
f.write(encodedContent)
f.flush()
try:
os.rename(filePath, bakFilePath)
except FileNotFoundError:
pass
os.rename(tmpFilePath, filePath)
try:
os.unlink(bakFilePath)
except FileNotFoundError:
pass
|
Safe file writing. Most common mistakes are checked against and reported before write
operation. After that, if anything unexpected happens, user won't be left without data or
with corrupted one as this method writes to a temporary file and then simply renames it
(which should be atomic operation according to POSIX but who knows how Ext4 really works.
@see: http://lwn.net/Articles/322823/).
|
27,508 |
def _find_package(c):
configured_value = c.get("packaging", {}).get("package", None)
if configured_value:
return configured_value
packages = [
path
for path in os.listdir(".")
if (
os.path.isdir(path)
and os.path.exists(os.path.join(path, "__init__.py"))
and path not in ("tests", "integration", "sites", "vendor")
)
]
if not packages:
sys.exit("Unable to find a local Python package!")
if len(packages) > 1:
sys.exit("Found multiple Python packages: {0!r}".format(packages))
return packages[0]
|
Try to find 'the' One True Package for this project.
Mostly for obtaining the ``_version`` file within it.
Uses the ``packaging.package`` config setting if defined. If not defined,
fallback is to look for a single top-level Python package (directory
containing ``__init__.py``). (This search ignores a small blacklist of
directories like ``tests/``, ``vendor/`` etc.)
|
27,509 |
def normalize(self) -> :
tensor = self.tensor / bk.ccast(bk.sqrt(self.norm()))
return State(tensor, self.qubits, self._memory)
|
Normalize the state
|
27,510 |
def inject_print_functions(module_name=None, module_prefix=,
DEBUG=False, module=None):
module = _get_module(module_name, module)
if SILENT:
def print(*args):
pass
def printDBG(*args):
pass
def print_(*args):
pass
else:
if DEBUG_PRINT:
def print(*args):
from utool._internal.meta_util_dbg import get_caller_name
calltag = .join((, get_caller_name(N=DEBUG_PRINT_N), ))
util_logging._utool_print()(calltag, *args)
else:
def print(*args):
util_logging._utool_print()(*args)
if __AGGROFLUSH__:
def print_(*args):
util_logging._utool_write()(*args)
util_logging._utool_flush()()
else:
def print_(*args):
util_logging._utool_write()(*args)
dotpos = module.__name__.rfind()
if dotpos == -1:
module_name = module.__name__
else:
module_name = module.__name__[dotpos + 1:]
def _replchars(str_):
return str_.replace(, ).replace(, ).replace(, )
flag1 = % _replchars(module_name)
flag2 = % _replchars(module_prefix)
DEBUG_FLAG = any([flag in sys.argv for flag in [flag1, flag2]])
for curflag in ARGV_DEBUG_FLAGS:
if curflag in module_prefix:
DEBUG_FLAG = True
if __DEBUG_ALL__ or DEBUG or DEBUG_FLAG:
print( % (module_name, module_prefix))
def printDBG(*args):
msg = .join(map(str, args))
util_logging.__UTOOL_PRINTDBG__(module_prefix + + msg)
else:
def printDBG(*args):
pass
print_funcs = (print, print_, printDBG)
return print_funcs
|
makes print functions to be injected into the module
|
27,511 |
def _recenter_epsf(self, epsf_data, epsf, centroid_func=centroid_com,
box_size=5, maxiters=20, center_accuracy=1.0e-4):
epsf = EPSFModel(data=epsf_data, origin=epsf.origin, normalize=False,
oversampling=epsf.oversampling)
epsf.fill_value = 0.0
xcenter, ycenter = epsf.origin
dx_total = 0
dy_total = 0
y, x = np.indices(epsf_data.shape, dtype=np.float)
iter_num = 0
center_accuracy_sq = center_accuracy ** 2
center_dist_sq = center_accuracy_sq + 1.e6
center_dist_sq_prev = center_dist_sq + 1
while (iter_num < maxiters and
center_dist_sq >= center_accuracy_sq):
iter_num += 1
slices_large, slices_small = overlap_slices(epsf_data.shape,
box_size,
(ycenter, xcenter))
epsf_cutout = epsf_data[slices_large]
mask = ~np.isfinite(epsf_cutout)
xcenter_new, ycenter_new = centroid_func(epsf_cutout, mask=mask)
xcenter_new += slices_large[1].start
ycenter_new += slices_large[0].start
dx = xcenter - xcenter_new
dy = ycenter - ycenter_new
center_dist_sq = dx**2 + dy**2
if center_dist_sq >= center_dist_sq_prev:
break
center_dist_sq_prev = center_dist_sq
dx_total += dx
dy_total += dy
epsf_data = epsf.evaluate(x=x, y=y, flux=1.0,
x_0=xcenter + dx_total,
y_0=ycenter + dy_total,
use_oversampling=False)
return epsf_data
|
Calculate the center of the ePSF data and shift the data so the
ePSF center is at the center of the ePSF data array.
Parameters
----------
epsf_data : 2D `~numpy.ndarray`
A 2D array containing the ePSF image.
epsf : `EPSFModel` object
The ePSF model.
centroid_func : callable, optional
A callable object (e.g. function or class) that is used to
calculate the centroid of a 2D array. The callable must
accept a 2D `~numpy.ndarray`, have a ``mask`` keyword and
optionally an ``error`` keyword. The callable object must
return a tuple of two 1D `~numpy.ndarray`\\s, representing
the x and y centroids. The default is
`~photutils.centroids.centroid_com`.
recentering_boxsize : float or tuple of two floats, optional
The size (in pixels) of the box used to calculate the
centroid of the ePSF during each build iteration. If a
single integer number is provided, then a square box will be
used. If two values are provided, then they should be in
``(ny, nx)`` order. The default is 5.
maxiters : int, optional
The maximum number of recentering iterations to perform.
The default is 20.
center_accuracy : float, optional
The desired accuracy for the centers of stars. The building
iterations will stop if the center of the ePSF changes by
less than ``center_accuracy`` pixels between iterations.
The default is 1.0e-4.
Returns
-------
result : 2D `~numpy.ndarray`
The recentered ePSF data.
|
27,512 |
def argmin_list(seq, fn):
best_score, best = fn(seq[0]), []
for x in seq:
x_score = fn(x)
if x_score < best_score:
best, best_score = [x], x_score
elif x_score == best_score:
best.append(x)
return best
|
Return a list of elements of seq[i] with the lowest fn(seq[i]) scores.
>>> argmin_list(['one', 'to', 'three', 'or'], len)
['to', 'or']
|
27,513 |
def hotplugRegisterCallback(
self, callback,
events=HOTPLUG_EVENT_DEVICE_ARRIVED | HOTPLUG_EVENT_DEVICE_LEFT,
flags=HOTPLUG_ENUMERATE,
vendor_id=HOTPLUG_MATCH_ANY,
product_id=HOTPLUG_MATCH_ANY,
dev_class=HOTPLUG_MATCH_ANY,
):
def wrapped_callback(context_p, device_p, event, _):
assert addressof(context_p.contents) == addressof(
self.__context_p.contents), (context_p, self.__context_p)
device = USBDevice(
self,
device_p,
event != HOTPLUG_EVENT_DEVICE_LEFT,
)
self.__close_set.add(device)
unregister = bool(callback(
self,
device,
event,
))
if unregister:
del self.__hotplug_callback_dict[handle]
return unregister
handle = c_int()
callback_p = libusb1.libusb_hotplug_callback_fn_p(wrapped_callback)
mayRaiseUSBError(libusb1.libusb_hotplug_register_callback(
self.__context_p, events, flags, vendor_id, product_id, dev_class,
callback_p, None, byref(handle),
))
handle = handle.value
assert handle not in self.__hotplug_callback_dict, (
handle,
self.__hotplug_callback_dict,
)
self.__hotplug_callback_dict[handle] = (callback_p, wrapped_callback)
return handle
|
Registers an hotplug callback.
On success, returns an opaque value which can be passed to
hotplugDeregisterCallback.
Callback must accept the following positional arguments:
- this USBContext instance
- an USBDevice instance
If device has left, configuration descriptors may not be
available. Its device descriptor will be available.
- event type, one of:
HOTPLUG_EVENT_DEVICE_ARRIVED
HOTPLUG_EVENT_DEVICE_LEFT
Callback must return whether it must be unregistered (any true value
to be unregistered, any false value to be kept registered).
Note: given callback will be invoked during event handling, meaning
it cannot call any synchronous libusb function.
|
27,514 |
def resize(self, size, interp=):
resized_data = sm.imresize(self.data, size, interp=interp, mode=)
return SegmentationImage(resized_data, self._frame)
|
Resize the image.
Parameters
----------
size : int, float, or tuple
* int - Percentage of current size.
* float - Fraction of current size.
* tuple - Size of the output image.
interp : :obj:`str`, optional
Interpolation to use for re-sizing ('nearest', 'lanczos', 'bilinear',
'bicubic', or 'cubic')
|
27,515 |
def getPredictionResults(network, clRegionName):
classifierRegion = network.regions[clRegionName]
actualValues = classifierRegion.getOutputData("actualValues")
probabilities = classifierRegion.getOutputData("probabilities")
steps = classifierRegion.getSelf().stepsList
N = classifierRegion.getSelf().maxCategoryCount
results = {step: {} for step in steps}
for i in range(len(steps)):
stepProbabilities = probabilities[i * N:(i + 1) * N - 1]
mostLikelyCategoryIdx = stepProbabilities.argmax()
predictedValue = actualValues[mostLikelyCategoryIdx]
predictionConfidence = stepProbabilities[mostLikelyCategoryIdx]
results[steps[i]]["predictedValue"] = predictedValue
results[steps[i]]["predictionConfidence"] = predictionConfidence
return results
|
Get prediction results for all prediction steps.
|
27,516 |
def transform_from_rot_trans(R, t):
R = R.reshape(3, 3)
t = t.reshape(3, 1)
return np.vstack((np.hstack([R, t]), [0, 0, 0, 1]))
|
Transforation matrix from rotation matrix and translation vector.
|
27,517 |
def discard_incoming_messages(self):
self.inbox.clear()
previous = self._discard_incoming_messages
self._discard_incoming_messages = True
try:
yield
finally:
self._discard_incoming_messages = previous
|
Discard all incoming messages for the time of the context manager.
|
27,518 |
def minimize(self, loss_fn, x, optim_state):
grads = self._compute_gradients(loss_fn, x, optim_state)
return self._apply_gradients(grads, x, optim_state)
|
Analogous to tf.Optimizer.minimize
:param loss_fn: tf Tensor, representing the loss to minimize
:param x: list of Tensor, analogous to tf.Optimizer's var_list
:param optim_state: A possibly nested dict, containing any optimizer state.
Returns:
new_x: list of Tensor, updated version of `x`
new_optim_state: dict, updated version of `optim_state`
|
27,519 |
def create_context_menu(self, extended):
menu = Gtk.Menu()
self._menu(menu, extended)
return menu
|
Create the context menu.
|
27,520 |
def choice_info(self):
info = ReadSBo(self.sbo_url).info(self.name, ".info")
fill = self.fill_pager(info)
self.pager(info + fill)
|
View .info file
|
27,521 |
async def sendmail(
self, sender, recipients, message, mail_options=None, rcpt_options=None
):
if isinstance(recipients, str):
recipients = [recipients]
if mail_options is None:
mail_options = []
if rcpt_options is None:
rcpt_options = []
await self.ehlo_or_helo_if_needed()
if self.supports_esmtp:
if "size" in self.esmtp_extensions:
mail_options.append("size={}".format(len(message)))
await self.mail(sender, mail_options)
errors = []
for recipient in recipients:
try:
await self.rcpt(recipient, rcpt_options)
except SMTPCommandFailedError as e:
errors.append(e)
if len(recipients) == len(errors):
raise SMTPNoRecipientError(errors)
await self.data(message)
return errors
|
Performs an entire e-mail transaction.
Example:
>>> try:
>>> with SMTP() as client:
>>> try:
>>> r = client.sendmail(sender, recipients, message)
>>> except SMTPException:
>>> print("Error while sending message.")
>>> else:
>>> print("Result: {}.".format(r))
>>> except ConnectionError as e:
>>> print(e)
Result: {}.
Args:
sender (str): E-mail address of the sender.
recipients (list of str or str): E-mail(s) address(es) of the
recipient(s).
message (str or bytes): Message body.
mail_options (list of str): ESMTP options (such as *8BITMIME*) to
send along the *MAIL* command.
rcpt_options (list of str): ESMTP options (such as *DSN*) to
send along all the *RCPT* commands.
Raises:
ConnectionResetError: If the connection with the server is
unexpectedely lost.
SMTPCommandFailedError: If the server refuses our EHLO/HELO
greeting.
SMTPCommandFailedError: If the server refuses our MAIL command.
SMTPCommandFailedError: If the server refuses our DATA command.
SMTPNoRecipientError: If the server refuses all given
recipients.
Returns:
dict: A dict containing an entry for each recipient that was
refused. Each entry is associated with a (code, message)
2-tuple containing the error code and message, as returned by
the server.
When everythign runs smoothly, the returning dict is empty.
.. note:: The connection remains open after. It's your responsibility
to close it. A good practice is to use the asynchronous context
manager instead. See :meth:`SMTP.__aenter__` for further details.
|
27,522 |
def univariate_envelope_plot(x, mean, std, ax=None, base_alpha=0.375, envelopes=[1, 3], lb=None, ub=None, expansion=10, **kwargs):
if ax is None:
f = plt.figure()
ax = f.add_subplot(1, 1, 1)
elif ax == :
ax = plt.gca()
mean = scipy.asarray(mean, dtype=float).copy()
std = scipy.asarray(std, dtype=float).copy()
e = []
for i in envelopes:
lower = mean - i * std
upper = mean + i * std
if ub is not None:
lower[lower > ub] = ub
upper[upper > ub] = ub
if lb is not None:
lower[lower < lb] = lb
upper[upper < lb] = lb
e.append(ax.fill_between(x, lower, upper, facecolor=color, alpha=base_alpha / i))
return (l, e)
|
Make a plot of a mean curve with uncertainty envelopes.
|
27,523 |
def GetUser(self, sid=None, uid=None, username=None):
if sid:
for user in self.users:
if user.sid == sid:
return user
return None
if uid:
for user in self.users:
if user.uid == uid:
return user
if username:
for user in self.users:
if user.username == username:
if uid and user.uid and user.uid != uid:
return None
else:
return user
|
Retrieve a User based on sid, uid or username.
On windows we first get a SID and use it to find the username. We want to
avoid combining users with name collisions, which occur when local users
have the same username as domain users (something like Admin is particularly
common). So if a SID is provided, don't also try to match by username.
On linux we first get a username, then use this to find the UID, so we want
to combine these records or we end up with multiple partially-filled user
records.
TODO(user): this won't work at all well with a query for uid=0 because
that is also the default for User objects that don't have uid
set.
Args:
sid: Windows user sid
uid: Linux/Darwin user id
username: string
Returns:
rdf_client.User or None
|
27,524 |
def _withholdOffbids(self):
limits = self.limits
if not limits.has_key():
limits[] = self.priceCap
if not self.limits.has_key():
self.limits[] = self.priceCap
for offer in self.offers:
if round(offer.quantity, 4) <= 0.0:
logger.info("Withholding non-posistive quantity [%.2f] "
"offer." % offer.quantity)
offer.withheld = True
for bid in self.bids:
if round(bid.quantity, 4) <= 0.0:
logger.info("Withholding non-posistive quantity [%.2f] "
"bid." % bid.quantity)
bid.withheld = True
if limits.has_key("maxOffer"):
for offer in self.offers:
if offer.price > limits["maxOffer"]:
logger.info("Offer price [%.2f] above limit [%.3f], "
"withholding." % (offer.price, limits["maxOffer"]))
offer.withheld = True
if limits.has_key("minBid"):
for bid in self.bids:
if bid.price < limits["minBid"]:
logger.info("Bid price [%.2f] below limit [%.2f], "
"withholding." % (bid.price, limits["minBid"]))
bid.withheld = True
|
Withholds offers/bids with invalid (<= 0.0) quantities or prices
outwith the set limits.
|
27,525 |
def p_union(self, p):
{}
p[0] = ast.Union(
name=p[2], fields=p[4], annotations=p[6], lineno=p.lineno(2)
)
|
union : UNION IDENTIFIER '{' field_seq '}' annotations
|
27,526 |
def get(self, term):
if self._items[term].used is False:
cont = self._items[term]
self._grammar.nonterminals.add(cont.nonterminal)
self._grammar.rules.add(cont.rule)
cont.used = True
return self._items[term].nonterminal
|
Get nonterminal rewritable to term.
If the rules is not in the grammar, nonterminal and rule rewritable to terminal are add into grammar.
:param term: Term for which get the nonterminal.
:return: ChomskyTermNonterminal class for terminal.
|
27,527 |
def _compute_ambig_ll(self):
for Dll in self.ll:
for L in one2two.keys():
Dll[L] = max(Dll[one2two[L][0]], Dll[one2two[L][1]] )
Dll[] = 0.0
Dll[] = 0.0
|
m._compute_ambig_ll() -- Extend log-likelihood matrix to include ambiguity codes
e.g. What the score of a 'S'? Here we use the max of C and G.
|
27,528 |
def _from_center_cartesian(
self, x: float, y: float, z: float) -> Point:
center = self.center()
if self._shape is WellShape.RECTANGULAR:
x_size = self._length
y_size = self._width
else:
x_size = self._diameter
y_size = self._diameter
z_size = self._depth
return Point(
x=center.point.x + (x * (x_size / 2.0)),
y=center.point.y + (y * (y_size / 2.0)),
z=center.point.z + (z * (z_size / 2.0)))
|
Specifies an arbitrary point relative to the center of the well based
on percentages of the radius in each axis. For example, to specify the
back-right corner of a well at 1/4 of the well depth from the bottom,
the call would be `_from_center_cartesian(1, 1, -0.5)`.
No checks are performed to ensure that the resulting position will be
inside of the well.
:param x: a float in the range [-1.0, 1.0] for a percentage of half of
the radius/length in the X axis
:param y: a float in the range [-1.0, 1.0] for a percentage of half of
the radius/width in the Y axis
:param z: a float in the range [-1.0, 1.0] for a percentage of half of
the height above/below the center
:return: a Point representing the specified location in absolute deck
coordinates
|
27,529 |
def get_mol(chebi_id):
chebi_id_regexp = + str(chebi_id) +
mol_file_end_regexp =
this_structure = []
filename = get_file()
with io.open(filename, , encoding=) as textfile:
in_chebi_id = False
next(textfile)
for line in textfile:
if in_chebi_id or line[0].isdigit():
if re.match(chebi_id_regexp, line) \
and int(line.split()[0]) \
in __get_default_structure_ids():
tokens = line.strip().split()
in_chebi_id = True
this_structure = []
this_structure.append(.join(tokens[2:])
.replace(, ))
this_structure.append()
elif in_chebi_id:
if re.match(mol_file_end_regexp, line):
tokens = line.strip().split()
this_structure.append(tokens[0].replace(, ))
return Structure(.join(this_structure),
Structure.mol,
int(tokens[2][0]))
else:
this_structure.append(line)
return None
|
Returns mol
|
27,530 |
def _find_file(self, load):
path = load.get()
if not path:
return {: ,
: }
tgt_env = load.get(, )
return self.find_file(path, tgt_env)
|
Convenience function for calls made using the RemoteClient
|
27,531 |
def read_file(file_path, mode = ):
contents =
with open(file_path, mode) as f:
contents = f.read()
return contents
|
Read the contents of a file
:param file_path: Path of the file to be read
:return: Contents of the file
|
27,532 |
def resample_image_to_target(image, target, interp_type=, imagetype=0, verbose=False, **kwargs):
fixed = target
moving = image
compose = None
transformlist =
interpolator = interp_type
interpolator_oldoptions = ("linear", "nearestNeighbor", "gaussian", "cosineWindowedSinc", "bSpline")
if isinstance(interp_type, int):
interpolator = interpolator_oldoptions[interp_type]
accepted_interpolators = {"linear", "nearestNeighbor", "multiLabel", "gaussian",
"bSpline", "cosineWindowedSinc", "welchWindowedSinc",
"hammingWindowedSinc", "lanczosWindowedSinc", "genericLabel"}
if interpolator not in accepted_interpolators:
raise ValueError( % accepted_interpolators)
args = [fixed, moving, transformlist, interpolator]
if not isinstance(fixed, str):
if isinstance(fixed, iio.ANTsImage) and isinstance(moving, iio.ANTsImage):
inpixeltype = fixed.pixeltype
warpedmovout = moving.clone()
f = fixed
m = moving
if (moving.dimension == 4) and (fixed.dimension==3) and (imagetype==0):
raise ValueError()
wmo = warpedmovout
mytx = [, ]
if compose is None:
args = [, fixed.dimension, , m, , wmo, , f, , interpolator] + mytx
tfn = % compose if compose is not None else
if compose is not None:
mycompo = % tfn
args = [, fixed.dimension, , m, , mycompo, , f, , interpolator] + mytx
myargs = utils._int_antsProcessArguments(args)
libfn(processed_args)
|
Resample image by using another image as target reference.
This function uses ants.apply_transform with an identity matrix
to achieve proper resampling.
ANTsR function: `resampleImageToTarget`
Arguments
---------
image : ANTsImage
image to resample
target : ANTsImage
image of reference, the output will be in this space
interp_type : string
Choice of interpolator. Supports partial matching.
linear
nearestNeighbor
multiLabel for label images but genericlabel is preferred
gaussian
bSpline
cosineWindowedSinc
welchWindowedSinc
hammingWindowedSinc
lanczosWindowedSinc
genericLabel use this for label images
imagetype : integer
choose 0/1/2/3 mapping to scalar/vector/tensor/time-series
verbose : boolean
print command and run verbose application of transform.
kwargs : keyword arguments
additional arugment passed to antsApplyTransforms C code
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> fi = ants.image_read(ants.get_ants_data('r16'))
>>> fi2mm = ants.resample_image(fi, (2,2), use_voxels=0, interp_type='linear')
>>> resampled = ants.resample_image_to_target(fi2mm, fi, verbose=True)
|
27,533 |
def parse(self, s, tokenize=True, tags=True, chunks=True, relations=False, lemmata=False, encoding="utf-8", **kwargs):
if tokenize is True:
s = self.find_tokens(s, **kwargs)
if isinstance(s, (list, tuple)):
s = [isinstance(s, basestring) and s.split(" ") or s for s in s]
if isinstance(s, basestring):
s = [s.split(" ") for s in s.split("\n")]
for i in range(len(s)):
for j in range(len(s[i])):
if isinstance(s[i][j], str):
s[i][j] = decode_string(s[i][j], encoding)
if tags or chunks or relations or lemmata:
s[i] = self.find_tags(s[i], **kwargs)
else:
s[i] = [[w] for w in s[i]]
if chunks or relations:
s[i] = self.find_chunks(s[i], **kwargs)
if relations:
s[i] = self.find_labels(s[i], **kwargs)
if lemmata:
s[i] = self.find_lemmata(s[i], **kwargs)
if not kwargs.get("collapse", True) \
or kwargs.get("split", False):
return s
format = ["word"]
if tags:
format.append("part-of-speech")
if chunks:
format.extend(("chunk", "preposition"))
if relations:
format.append("relation")
if lemmata:
format.append("lemma")
for i in range(len(s)):
for j in range(len(s[i])):
s[i][j][0] = s[i][j][0].replace("/", "&slash;")
s[i][j] = "/".join(s[i][j])
s[i] = " ".join(s[i])
s = "\n".join(s)
s = TaggedString(s, format, language=kwargs.get("language", self.language))
return s
|
Takes a string (sentences) and returns a tagged Unicode string (TaggedString).
Sentences in the output are separated by newlines.
With tokenize=True, punctuation is split from words and sentences are separated by \n.
With tags=True, part-of-speech tags are parsed (NN, VB, IN, ...).
With chunks=True, phrase chunk tags are parsed (NP, VP, PP, PNP, ...).
With relations=True, semantic role labels are parsed (SBJ, OBJ).
With lemmata=True, word lemmata are parsed.
Optional parameters are passed to
the tokenizer, tagger, chunker, labeler and lemmatizer.
|
27,534 |
def create_connection(self, session=None):
connection = Connection(conn_id=self.db_conn_id)
uri = self._generate_connection_uri()
self.log.info("Creating connection %s", self.db_conn_id)
connection.parse_from_uri(uri)
session.add(connection)
session.commit()
|
Create connection in the Connection table, according to whether it uses
proxy, TCP, UNIX sockets, SSL. Connection ID will be randomly generated.
:param session: Session of the SQL Alchemy ORM (automatically generated with
decorator).
|
27,535 |
def solarcalcs(self):
self.dir = self.forc.dir
self.dif = self.forc.dif
if self.dir + self.dif > 0.:
self.logger.debug("{} Solar radiation > 0".format(__name__))
self.solarangles()
self.horSol = max(math.cos(self.zenith)*self.dir, 0.0)
self.Kw_term = min(abs(1./self.UCM.canAspect*(0.5-self.critOrient/math.pi) \
+ 1/math.pi*self.tanzen*(1-math.cos(self.critOrient))),1.)
self.Kr_term = min(abs(2.*self.critOrient/math.pi \
- (2/math.pi*self.UCM.canAspect*self.tanzen)*(1-math.cos(self.critOrient))), 1-2*self.UCM.canAspect*self.Kw_term)
self.bldSol = self.horSol*self.Kw_term + self.UCM.wallConf*self.dif
self.roadSol = self.horSol*self.Kr_term + self.UCM.roadConf*self.dif
if self.simTime.month < self.parameter.vegStart or self.simTime.month > self.parameter.vegEnd:
alb_road = self.UCM.road.albedo
else:
alb_road = self.UCM.road.albedo*(1.-self.UCM.road.vegCoverage) + self.parameter.vegAlbedo*self.UCM.road.vegCoverage
rr = alb_road * self.roadSol
rw = self.UCM.alb_wall * self.bldSol
fr = (1. - (1. - 2.*self.UCM.wallConf) * self.UCM.alb_wall + (1. - self.UCM.roadConf) \
* self.UCM.wallConf * alb_road * self.UCM.alb_wall)
self.mr = (rr + (1.0-self.UCM.roadConf) * alb_road * (rw + self.UCM.wallConf * self.UCM.alb_wall * rr)) / fr
self.mw = (rw + self.UCM.wallConf * self.UCM.alb_wall * rr) / fr
self.UCM.road.solRec = self.roadSol + (1 - self.UCM.roadConf)*self.mw
for j in range(len(self.BEM)):
self.BEM[j].roof.solRec = self.horSol + self.dif
self.BEM[j].wall.solRec = self.bldSol + (1 - 2*self.UCM.wallConf) * self.mw + self.UCM.wallConf * self.mr
self.rural.solRec = self.horSol + self.dif
self.UCM.SolRecRoof = self.horSol + self.dif
self.UCM.SolRecRoad = self.UCM.road.solRec
self.UCM.SolRecWall = self.bldSol+(1-2*self.UCM.wallConf)*self.UCM.road.albedo*self.roadSol
self.UCM.treeSensHeat = (1-self.parameter.vegAlbedo)*(1-self.parameter.treeFLat)*self.UCM.SolRecRoad
self.UCM.treeLatHeat = (1-self.parameter.vegAlbedo)*self.parameter.treeFLat*self.UCM.SolRecRoad
else:
self.logger.debug("{} Solar radiation = 0".format(__name__))
self.UCM.road.solRec = 0.
self.rural.solRec = 0.
for j in range(len(self.BEM)):
self.BEM[j].roof.solRec = 0.
self.BEM[j].wall.solRec = 0.
self.UCM.SolRecRoad = 0.
self.UCM.SolRecRoof = 0.
self.UCM.SolRecWall = 0.
self.UCM.treeSensHeat = 0.
self.UCM.treeLatHeat = 0.
return self.rural, self.UCM, self.BEM
|
Solar Calculation
Mutates RSM, BEM, and UCM objects based on following parameters:
UCM # Urban Canopy - Building Energy Model object
BEM # Building Energy Model object
simTime # Simulation time bbject
RSM # Rural Site & Vertical Diffusion Model Object
forc # Forcing object
parameter # Geo Param Object
rural # Rural road Element object
Properties
self.dir # Direct sunlight
self.dif # Diffuse sunlight
self.tanzen
self.critOrient
self.horSol
self.Kw_term
self.Kr_term
self.mr
self.mw
|
27,536 |
def status_messages(self):
messages = IStatusMessage(self.request)
m = messages.show()
for item in m:
item.id = idnormalizer.normalize(item.message)
return m
|
Returns status messages if any
|
27,537 |
def store(self, name=None):
if name is None:
name = self.get_default_driver()
self._stores[name] = self._get(name)
return self._stores[name]
|
Get a cache store instance by name.
:param name: The cache store name
:type name: str
:rtype: Repository
|
27,538 |
def __params_descriptor(self, message_type, request_kind, path, method_id):
path_parameter_dict = self.__get_path_parameters(path)
if not isinstance(message_type, resource_container.ResourceContainer):
if path_parameter_dict:
_logger.warning(
,
method_id, type(message_type))
return self.__params_descriptor_without_container(
message_type, request_kind, path)
message_type = message_type.parameters_message_class()
params = {}
param_order = []
for field_name, matched_path_parameters in path_parameter_dict.iteritems():
field = message_type.field_by_name(field_name)
self.__validate_path_parameters(field, matched_path_parameters)
for field in sorted(message_type.all_fields(), key=lambda f: f.number):
matched_path_parameters = path_parameter_dict.get(field.name, [])
self.__add_parameters_from_field(field, matched_path_parameters,
params, param_order)
return params, param_order
|
Describe the parameters of a method.
If the message_type is not a ResourceContainer, will fall back to
__params_descriptor_without_container (which will eventually be deprecated).
If the message type is a ResourceContainer, then all path/query parameters
will come from the ResourceContainer This method will also make sure all
path parameters are covered by the message fields.
Args:
message_type: messages.Message or ResourceContainer class, Message with
parameters to describe.
request_kind: The type of request being made.
path: string, HTTP path to method.
method_id: string, Unique method identifier (e.g. 'myapi.items.method')
Returns:
A tuple (dict, list of string): Descriptor of the parameters, Order of the
parameters.
|
27,539 |
def get_ldap_user_membership(self, user_dn):
membership_filter = self.conf_LDAP_SYNC_GROUP_MEMBERSHIP_FILTER.replace(, user_dn.replace(, "\(").replace(, "\)"))
try:
uri, groups = self.ldap_search(membership_filter, self.conf_LDAP_SYNC_GROUP_ATTRIBUTES.keys(), False, membership_filter)
except Exception as e:
logger.error("Error reading membership: Filter %s, Keys %s" % (membership_filter, str(self.conf_LDAP_SYNC_GROUP_ATTRIBUTES.keys())))
return None
return (uri, groups)
|
Retrieve user membership from LDAP server.
|
27,540 |
def tempo_account_add_account(self, data=None):
url =
if data is None:
return
return self.post(url, data=data)
|
Creates Account, adding new Account requires the Manage Accounts Permission.
:param data: String then it will convert to json
:return:
|
27,541 |
def save(self):
self.get_writer().flush()
mpc_filename = self.get_writer().get_filename()
self.get_writer().close()
self._writer = None
return mpc_filename
|
Update the SouceReading information for the currently recorded observations and then flush those to a file.
@return: mpc_filename of the resulting save.
|
27,542 |
def log_print_request(method, url, query_params=None, headers=None, body=None):
log_msg =
log_msg += % method
log_msg += % url
if query_params is not None:
log_msg += .format(str(query_params))
if headers is not None:
log_msg += .format(json.dumps(dict(headers), sort_keys=True, indent=4))
if body is not None:
try:
log_msg += .format(_get_pretty_body(headers, body))
except:
log_msg += "\t> Payload could't be formatted"
logger.debug(log_msg)
|
Log an HTTP request data in a user-friendly representation.
:param method: HTTP method
:param url: URL
:param query_params: Query parameters in the URL
:param headers: Headers (dict)
:param body: Body (raw body, string)
:return: None
|
27,543 |
def file_verify( sender_blockchain_id, sender_key_id, input_path, sig, config_path=CONFIG_PATH, wallet_keys=None ):
config_dir = os.path.dirname(config_path)
old_key = False
old_key_index = 0
sender_old_key_index = 0
sender_key_info = file_key_lookup( sender_blockchain_id, None, None, key_id=sender_key_id, config_path=config_path, wallet_keys=wallet_keys )
if in sender_key_info:
log.error("Failed to look up sender key: %s" % sender_key_info[])
return {: }
if in sender_key_info.keys():
old_key = True
sender_old_key_index = sender_key_info[]
res = blockstack_gpg.gpg_verify( input_path, sig, sender_key_info, config_dir=config_dir )
if in res:
log.error("Failed to verify from %s.%s" % (sender_blockchain_id, sender_key_id))
return {: }
return {: True}
|
Verify that a file was signed with the given blockchain ID
@config_path should be for the *client*, not blockstack-file
Return {'status': True} on succes
Return {'error': ...} on error
|
27,544 |
def create_bar_chart(self, x_labels, y_values, y_label):
self.setup(0.25)
ax1 = self.get_ax()
ax1.set_xticks(list(range(len(x_labels))))
ax1.set_xticklabels([x_labels[i] for i in range(len(x_labels))],
rotation=90)
plt.ylabel(y_label)
x_pos = range(len(x_labels))
plt.bar(x_pos, y_values, align="center")
return ax1
|
Creates bar char
:param x_labels: Names for each variable
:param y_values: Values of x labels
:param y_label: Label of y axis
:return: Bar chart
|
27,545 |
def get(tgt, fun, tgt_type=):
mine, pass in the target,
function to look up and the target type
CLI Example:
.. code-block:: bash
salt-run mine.get network.interfaces
'
ret = salt.utils.minions.mine_get(tgt, fun, tgt_type, __opts__)
return ret
|
Gathers the data from the specified minions' mine, pass in the target,
function to look up and the target type
CLI Example:
.. code-block:: bash
salt-run mine.get '*' network.interfaces
|
27,546 |
def get_health_monitor(self, loadbalancer):
uri = "/loadbalancers/%s/healthmonitor" % utils.get_id(loadbalancer)
resp, body = self.api.method_get(uri)
return body.get("healthMonitor", {})
|
Returns a dict representing the health monitor for the load
balancer. If no monitor has been configured, returns an
empty dict.
|
27,547 |
def _to_pywintypes(row):
def _pywintype(x):
if isinstance(x, dt.date):
return dt.datetime(x.year, x.month, x.day, tzinfo=dt.timezone.utc)
elif isinstance(x, (dt.datetime, pa.Timestamp)):
if x.tzinfo is None:
return x.replace(tzinfo=dt.timezone.utc)
elif isinstance(x, str):
if re.match("^\d{4}-\d{2}-\d{2}$", x):
return "'" + x
return x
elif isinstance(x, np.integer):
return int(x)
elif isinstance(x, np.floating):
return float(x)
elif x is not None and not isinstance(x, (str, int, float, bool)):
return str(x)
return x
return [_pywintype(x) for x in row]
|
convert values in a row to types accepted by excel
|
27,548 |
def save_colormap(self, name=None):
if name == None: name = self.get_name()
if name == "" or not type(name)==str: return "Error: invalid name."
colormaps = _os.path.join(_settings.path_home, )
_settings.MakeDir(colormaps)
path = _os.path.join(_settings.path_home, , name+".cmap")
f = open(path, )
f.write(str(self._colorpoint_list))
f.close()
return self
|
Saves the colormap with the specified name. None means use internal
name. (See get_name())
|
27,549 |
def cmd_asterix(self, args):
usage = "usage: asterix <set|start|stop|restart|status>"
if len(args) == 0:
print(usage)
return
if args[0] == "set":
self.asterix_settings.command(args[1:])
elif args[0] == "start":
self.start_listener()
elif args[0] == "stop":
self.stop_listener()
elif args[0] == "restart":
self.stop_listener()
self.start_listener()
elif args[0] == "status":
self.print_status()
else:
print(usage)
|
asterix command parser
|
27,550 |
def matchesTripleExprRef(cntxt: Context, T: RDFGraph, expr: ShExJ.tripleExprLabel, _: DebugContext) -> bool:
expr = cntxt.tripleExprFor(expr)
if expr is None:
cntxt.fail_reason = "{expr}: Reference not found"
return False
return matchesExpr(cntxt, T, expr)
|
expr is an tripleExprRef and satisfies(value, tripleExprWithId(tripleExprRef), G, m).
The tripleExprWithId function is defined in Triple Expression Reference Requirement below.
|
27,551 |
def update_flags(self, idlist, flags):
return self.update_bugs(idlist, self.build_update(flags=flags))
|
A thin back compat wrapper around build_update(flags=X)
|
27,552 |
def train(self, X_feat, X_seq, y,
X_feat_valid=None, X_seq_valid=None, y_valid=None,
n_cores=3):
if X_feat_valid is None and X_seq_valid is None and y_valid is None:
X_feat_valid = X_feat
X_seq_valid = X_seq
y_valid = y
print("Using training samples also for validation ")
X_seq = np.expand_dims(X_seq, axis=1)
X_seq_valid = np.expand_dims(X_seq_valid, axis=1)
if self.is_trained() is True:
print("Model already fitted. Re-training feature not implemented yet")
return
assert X_seq.shape[0] == X_feat.shape[0] == y.shape[0]
assert y.shape == (X_feat.shape[0], self._num_tasks)
self._param["seq_length"] = X_seq.shape[2]
self._param["n_add_features"] = X_feat.shape[1]
if not self._param["seq_length"] == X_seq_valid.shape[2]:
raise Exception("sequence lengths dont use padding
X_spline, S, _ = splines.get_gam_splines(start=0,
end=self._param["seq_length"] - padd_loss - 1,
n_bases=self._param["n_splines"],
spline_order=3,
add_intercept=False)
self._splines = {"X_spline": X_spline,
"S": S
}
self._graph = tf.Graph()
self._var = self._get_var_initialization(self._graph, X_feat_train=X_feat, y_train=y)
self._other_var = self._build_graph(self._graph, self._var)
var_res_init = self._get_var_res(self._graph, self._var, self._other_var)
self.init_weights = self._var_res_to_weights(var_res=var_res_init)
if self._param["optimizer"] == "adam":
_train = self._train_adam
elif self._param["optimizer"] == "lbfgs":
_train = self._train_lbfgs
else:
raise Exception("Optimizer {} not implemented".format(self._param["optimizer"]))
self._var_res = _train(X_feat, X_seq, y,
X_feat_valid, X_seq_valid, y_valid,
graph=self._graph, var=self._var, other_var=self._other_var,
early_stop_patience=self._param["early_stop_patience"],
n_cores=n_cores)
self._model_fitted = True
return True
|
Train the CONCISE model
:py:attr:`X_feat`, :py:attr:`X_seq`, py:attr:`y` are preferrably returned by the :py:func:`concise.prepare_data` function.
Args:
X_feat: Numpy (float) array of shape :code:`(N, D)`. Feature design matrix storing :code:`N` training samples and :code:`D` features
X_seq: Numpy (float) array of shape :code:`(N, 1, N_seq, 4)`. It represents 1-hot encoding of the DNA/RNA sequence.(:code:`N`-seqeuences of length :code:`N_seq`)
y: Numpy (float) array of shape :code:`(N, 1)`. Response variable.
X_feat_valid: :py:attr:`X_feat` used for model validation.
X_seq_valid: :py:attr:`X_seq` used for model validation.
y: :py:attr:`y` used for model validation.
n_cores (int): Number of CPU cores used for training. If available, GPU is used for training and this argument is ignored.
|
27,553 |
def ignore_event(self, event_id):
for ignore in self.ignore:
if (ignore == event_id or
(ignore.endswith() and event_id.startswith(ignore[:-1]))):
return True
return False
|
Verify event id against list of events to ignore.
>>> e = EventHandling(ignore=[
... 'test1_00',
... 'test2_*',
... ])
>>> e.ignore_event('test1_00')
True
>>> e.ignore_event('test2_00')
True
>>> e.ignore_event('test3_00')
False
|
27,554 |
async def async_request(session, url, **kwargs):
_LOGGER.debug("Sending %s to %s", kwargs, url)
try:
res = await session(url, **kwargs)
if res.content_type != :
raise ResponseError(
"Invalid content type: {}".format(res.content_type))
response = await res.json()
_LOGGER.debug("HTTP request response: %s", response)
_raise_on_error(response)
return response
except aiohttp.client_exceptions.ClientError as err:
raise RequestError(
"Error requesting data from {}: {}".format(url, err)
) from None
|
Do a web request and manage response.
|
27,555 |
def _get_billing_cycle_number(self, billing_cycle):
begins_before_initial_date = billing_cycle.date_range.lower < self.initial_billing_cycle.date_range.lower
if begins_before_initial_date:
raise ProvidedBillingCycleBeginsBeforeInitialBillingCycle(
.format(billing_cycle, self.initial_billing_cycle)
)
billing_cycle_number = BillingCycle.objects.filter(
date_range__contained_by=DateRange(
self.initial_billing_cycle.date_range.lower,
billing_cycle.date_range.upper,
bounds=,
),
).count()
return billing_cycle_number
|
Gets the 1-indexed number of the billing cycle relative to the provided billing cycle
|
27,556 |
def secretbox_encrypt(data, **kwargs):
YmFkcGFzcwo=
data = salt.utils.stringutils.to_bytes(data)
sk = _get_sk(**kwargs)
b = libnacl.secret.SecretBox(sk)
return base64.b64encode(b.encrypt(data))
|
Encrypt data using a secret key generated from `nacl.keygen`.
The same secret key can be used to decrypt the data using `nacl.secretbox_decrypt`.
CLI Examples:
.. code-block:: bash
salt-run nacl.secretbox_encrypt datatoenc
salt-call --local nacl.secretbox_encrypt datatoenc sk_file=/etc/salt/pki/master/nacl
salt-call --local nacl.secretbox_encrypt datatoenc sk='YmFkcGFzcwo='
|
27,557 |
def drop_index(self, raw):
dbname = raw[].split(, 1)[0]
collname = raw[][]
self.dest[dbname][collname].drop_index(raw[][])
|
Executes a drop index command.
{ "op" : "c",
"ns" : "testdb.$cmd",
"o" : { "dropIndexes" : "testcoll",
"index" : "nuie_1" } }
|
27,558 |
def create_role(self, name, policies=None, ttl=None, max_ttl=None, period=None, bound_service_principal_ids=None,
bound_group_ids=None, bound_location=None, bound_subscription_ids=None,
bound_resource_group_names=None, bound_scale_sets=None, mount_point=DEFAULT_MOUNT_POINT):
if policies is None:
policies = []
if not isinstance(policies, list) or not all([isinstance(p, str) for p in policies]):
error_msg =
raise exceptions.ParamValidationError(error_msg.format(
arg=policies,
arg_type=type(policies),
))
params = {
: policies,
: ttl,
: max_ttl,
: period,
: bound_service_principal_ids,
: bound_group_ids,
: bound_location,
: bound_subscription_ids,
: bound_resource_group_names,
: bound_scale_sets,
}
api_path = .format(mount_point=mount_point, name=name)
return self._adapter.post(
url=api_path,
json=params,
)
|
Create a role in the method.
Role types have specific entities that can perform login operations against this endpoint. Constraints specific
to the role type must be set on the role. These are applied to the authenticated entities attempting to login.
Supported methods:
POST: /auth/{mount_point}/role/{name}. Produces: 204 (empty body)
:param name: Name of the role.
:type name: str | unicode
:param policies: Policies to be set on tokens issued using this role.
:type policies: list
:param ttl: The TTL period of tokens issued using this role in seconds.
:type ttl: str | unicode
:param max_ttl: The maximum allowed lifetime of tokens issued in seconds using this role.
:type max_ttl: str | unicode
:param period: If set, indicates that the token generated using this role should never expire. The token should
be renewed within the duration specified by this value. At each renewal, the token's TTL will be set to the
value of this parameter.
:type period: str | unicode
:param bound_service_principal_ids: The list of Service Principal IDs that login is restricted to.
:type bound_service_principal_ids: list
:param bound_group_ids: The list of group ids that login is restricted to.
:type bound_group_ids: list
:param bound_location: The list of locations that login is restricted to.
:type bound_location: list
:param bound_subscription_ids: The list of subscription IDs that login is restricted to.
:type bound_subscription_ids: list
:param bound_resource_group_names: The list of resource groups that login is restricted to.
:type bound_resource_group_names: list
:param bound_scale_sets: The list of scale set names that the login is restricted to.
:type bound_scale_sets: list
:param mount_point: The "path" the azure auth method was mounted on.
:type mount_point: str | unicode
:return: The response of the request.
:rtype: requests.Response
|
27,559 |
def element_id_by_label(browser, label):
label = XPathSelector(browser,
unicode( % label))
if not label:
return False
return label.get_attribute()
|
Return the id of a label's for attribute
|
27,560 |
def refine_peaks_spectrum(sx, ixpeaks, nwinwidth, method=None,
geometry=None, debugplot=0):
nmed = nwinwidth//2
xfpeaks = np.zeros(len(ixpeaks))
sfpeaks = np.zeros(len(ixpeaks))
for iline in range(len(ixpeaks)):
jmax = ixpeaks[iline]
x_fit = np.arange(-nmed, nmed+1, dtype=np.float)
j1 = jmax - nmed
j2 = jmax + nmed + 1
if j1 < 0:
j1 = 0
j2 = 2 * nmed + 1
if j2 >= len(sx):
raise ValueError("Unexpected j2=" + str(j2) +
" value when len(sx)=" + str(len(sx)))
if j2 >= len(sx):
j2 = len(sx)
j1 = j2 - (2 * nmed + 1)
if j1 < 0:
raise ValueError("Unexpected j1=" + str(j1) +
" value when len(sx)=" + str(len(sx)))
y_fit = np.copy(sx[j1:j2])
sx_peak_flux = y_fit.max()
if sx_peak_flux != 0:
y_fit /= sx_peak_flux
if method == "gaussian":
if y_fit.min() <= 0:
if debugplot >= 10:
print("WARNING: negative or null value encountered" +
" in refine_peaks_spectrum with gaussian.")
print(" Using poly2 method instead.")
final_method = "poly2"
else:
final_method = "gaussian"
else:
final_method = method
if final_method == "poly2":
poly_funct = Polynomial.fit(x_fit, y_fit, 2)
poly_funct = Polynomial.cast(poly_funct)
coef = poly_funct.coef
if len(coef) == 3:
if coef[2] != 0:
refined_peak = -coef[1]/(2.0*coef[2]) + jmax
else:
refined_peak = 0.0 + jmax
else:
refined_peak = 0.0 + jmax
elif final_method == "gaussian":
poly_funct = Polynomial.fit(x_fit, np.log(y_fit), 2)
poly_funct = Polynomial.cast(poly_funct)
coef = poly_funct.coef
if len(coef) == 3:
if coef[2] != 0:
refined_peak = -coef[1]/(2.0*coef[2]) + jmax
else:
refined_peak = 0.0 + jmax
if coef[2] >= 0:
sfpeaks[iline] = None
else:
sfpeaks[iline] = np.sqrt(-1 / (2.0 * coef[2]))
else:
refined_peak = 0.0 + jmax
sfpeaks[iline] = None
else:
raise ValueError("Invalid method=" + str(final_method) + " value")
xfpeaks[iline] = refined_peak
if debugplot % 10 != 0:
from numina.array.display.matplotlib_qt import plt
fig = plt.figure()
set_window_geometry(geometry)
ax = fig.add_subplot(111)
xmin = x_fit.min()-1
xmax = x_fit.max()+1
ymin = 0
ymax = y_fit.max()*1.10
ax.set_xlim(xmin, xmax)
ax.set_ylim(ymin, ymax)
ax.set_xlabel()
ax.set_ylabel()
ax.set_title("Fit to line at array index " + str(jmax) +
"\n(method=" + final_method + ")")
plt.plot(x_fit, y_fit, "bo")
x_plot = np.linspace(start=-nmed, stop=nmed, num=1000,
dtype=np.float)
if final_method == "poly2":
y_plot = poly_funct(x_plot)
elif final_method == "gaussian":
amp = np.exp(coef[0] - coef[1] * coef[1] / (4 * coef[2]))
x0 = -coef[1] / (2.0 * coef[2])
sigma = np.sqrt(-1 / (2.0 * coef[2]))
y_plot = amp * np.exp(-(x_plot - x0)**2 / (2 * sigma**2))
else:
raise ValueError("Invalid method=" + str(final_method) +
" value")
ax.plot(x_plot, y_plot, color="red")
print(, refined_peak)
plt.show(block=False)
plt.pause(0.001)
pause_debugplot(debugplot)
return xfpeaks, sfpeaks
|
Refine line peaks in spectrum.
Parameters
----------
sx : 1d numpy array, floats
Input array.
ixpeaks : 1d numpy array, int
Initial peak locations, in array coordinates (integers).
These values can be the output from the function
find_peaks_spectrum().
nwinwidth : int
Width of the window where each peak must be refined.
method : string
"poly2" : fit to a 2nd order polynomial
"gaussian" : fit to a Gaussian
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed:
00 : no debug, no plots
01 : no debug, plots without pauses
02 : no debug, plots with pauses
10 : debug, no plots
11 : debug, plots without pauses
12 : debug, plots with pauses
Returns
-------
fxpeaks : 1d numpy array, float
Refined peak locations, in array coordinates.
sxpeaks : 1d numpy array, float
When fitting Gaussians, this array stores the fitted line
widths (sigma). Otherwise, this array returns zeros.
|
27,561 |
def _import_module(self, module_path):
LOGGER.debug(, module_path)
try:
return __import__(module_path)
except ImportError as error:
LOGGER.critical(, module_path, error)
return None
|
Dynamically import a module returning a handle to it.
:param str module_path: The module path
:rtype: module
|
27,562 |
def run_supernova_keyring(ctx, action, environment, parameter):
if action == :
result = credentials.get_user_password(env=environment,
param=parameter)
if not result:
click.echo("\nUnable to find a credential matching the data "
"provided.")
ctx.exit(1)
else:
click.echo("\nFound credential for {0}: {1}".format(*result))
ctx.exit()
elif action == :
msg = .format(environment, parameter)
credential = click.prompt(text=msg, hide_input=True)
result = credentials.set_user_password(environment=environment,
parameter=parameter,
password=credential)
if result:
click.echo("\nSuccessfully stored.")
ctx.exit()
else:
click.echo("\nUnable to store your credential.")
ctx.exit(1)
else:
click.secho("ERROR: must specify --get or --set", bold=True)
click.echo(ctx.get_help())
ctx.exit()
|
Sets or retrieves credentials stored in your system's keyring using the
python-keyring module.
Global credentials can be shared between multiple configuration sections:
\b
[prod]
OS_PASSWORD=USE_KEYRING['sso_password']
...
\b
[staging]
OS_PASSWORD=USE_KEYRING['my sso_password']
...
You could retrieve or set the global credential using these commands:
\b
supernova -g global sso_password <= get the credential
supernova -s global sso_password <= set the credential
Local credentials are intended for use with only one configuration section:
\b
[prod]
OS_PASSWORD=USE_KEYRING
...
\b
[staging]
OS_PASSWORD=USE_KEYRING
...
You could retrieve or set the local credential using these commands:
\b
supernova -g prod OS_PASSWORD <= get the credential for prod
supernova -s prod OS_PASSWORD <= set the credential for prod
\b
supernova -g staging OS_PASSWORD <= get the credential for staging
supernova -s staging OS_PASSWORD <= set the credential for staging
Full documentation:
\b
http://supernova.readthedocs.org/en/latest/configuring/
|
27,563 |
async def _flush(self, request: , stacks: List[Stack]):
for stack in stacks:
await self.platform.send(request, stack)
|
Perform the actual sending to platform. This is separated from
`flush()` since it needs to be inside a middleware call.
|
27,564 |
async def set_focus(self, set_focus_request):
response = hangouts_pb2.SetFocusResponse()
await self._pb_request(,
set_focus_request, response)
return response
|
Set focus to a conversation.
|
27,565 |
def inputcooker(self):
try:
while True:
c = self._inputcooker_getc()
if not self.iacseq:
if c == IAC:
self.iacseq += c
continue
elif c == chr(13) and not(self.sb):
c2 = self._inputcooker_getc(block=False)
if c2 == theNULL or c2 == :
c = chr(10)
elif c2 == chr(10):
c = c2
else:
self._inputcooker_ungetc(c2)
c = chr(10)
elif c in [x[0] for x in self.ESCSEQ.keys()]:
codes = c
for keyseq in self.ESCSEQ.keys():
if len(keyseq) == 0:
continue
while codes == keyseq[:len(codes)] and len(codes) <= keyseq:
if codes == keyseq:
c = self.ESCSEQ[keyseq]
break
codes = codes + self._inputcooker_getc()
if codes == keyseq:
break
self._inputcooker_ungetc(codes[1:])
codes = codes[0]
self._inputcooker_store(c)
elif len(self.iacseq) == 1:
if c in (DO, DONT, WILL, WONT):
self.iacseq += c
continue
self.iacseq =
if c == IAC:
self._inputcooker_store(c)
else:
if c == SB:
self.sb = 1
self.sbdataq =
elif c == SE:
self.sb = 0
self.options_handler(self.sock, c, NOOPT)
elif len(self.iacseq) == 2:
cmd = self.iacseq[1]
self.iacseq =
if cmd in (DO, DONT, WILL, WONT):
self.options_handler(self.sock, cmd, c)
except (EOFError, socket.error):
pass
|
Input Cooker - Transfer from raw queue to cooked queue.
Set self.eof when connection is closed. Don't block unless in
the midst of an IAC sequence.
|
27,566 |
def _IOC(cls, dir, op, structure=None):
control = cls(dir, op, structure)
def do(dev, **args):
return control(dev, **args)
return do
|
Encode an ioctl id.
|
27,567 |
def user_structure(user, site):
full_name = user.get_full_name().split()
first_name = full_name[0]
try:
last_name = full_name[1]
except IndexError:
last_name =
return {: user.pk,
: user.email,
: user.get_username(),
: last_name,
: first_name,
: % (
PROTOCOL, site.domain,
user.get_absolute_url())}
|
An user structure.
|
27,568 |
def set_wv_parameters(filter_name, grism_name):
if filter_name not in EMIR_VALID_FILTERS:
raise ValueError(, filter_name)
if grism_name not in EMIR_VALID_GRISMS:
raise ValueError(, grism_name)
wv_parameters = {}
wv_parameters[] = 1.0
if grism_name == "J" and filter_name == "J":
wv_parameters[] = 2
wv_parameters[] = 54
wv_parameters[] = [18]
wv_parameters[] = np.polynomial.Polynomial([
1.25122231e+04,
-4.83412417e+00,
5.31657015e-04
])
wv_parameters[] = np.polynomial.Polynomial([
7.73411692e-01,
-3.28055653e-05,
-1.20813896e-08
])
wv_parameters[] = 11200.0000
wv_parameters[] = 0.77
wv_parameters[] = EMIR_NAXIS1_ENLARGED
wv_parameters[] = None
wv_parameters[] = None
wv_parameters[] = None
wv_parameters[] = None
elif grism_name == "H" and filter_name == "H":
wv_parameters[] = 2
wv_parameters[] = 54
wv_parameters[] = [12]
wv_parameters[] = np.polynomial.Polynomial([
1.65536274e+04,
-7.63517173e+00,
7.74790265e-04
])
wv_parameters[] = np.polynomial.Polynomial([
1.21327515e+00,
1.42140078e-05,
-1.27489119e-07
])
wv_parameters[] = 14500.0000
wv_parameters[] = 1.2200
wv_parameters[] = EMIR_NAXIS1_ENLARGED
wv_parameters[] = None
wv_parameters[] = None
wv_parameters[] = None
wv_parameters[] = None
elif grism_name == "K" and filter_name == "Ksp":
wv_parameters[] = 2
wv_parameters[] = 54
wv_parameters[] = [12]
wv_parameters[] = np.polynomial.Polynomial([
2.21044741e+04,
-1.08737529e+01,
9.05081653e-04
])
wv_parameters[] = np.polynomial.Polynomial([
1.72696857e+00,
2.35009351e-05,
-1.02164228e-07
])
wv_parameters[] = 19100.0000
wv_parameters[] = 1.7300
wv_parameters[] = EMIR_NAXIS1_ENLARGED
wv_parameters[] = None
wv_parameters[] = None
wv_parameters[] = None
wv_parameters[] = None
elif grism_name == "LR" and filter_name == "YJ":
wv_parameters[] = 4
wv_parameters[] = 55
wv_parameters[] = [20]
wv_parameters[] = np.polynomial.Polynomial([
1.04272465e+04,
-2.33176855e+01,
6.55101267e-03
])
wv_parameters[] = np.polynomial.Polynomial([
3.49037727e+00,
1.26008332e-03,
-4.66149678e-06
])
wv_parameters[] = 8900.0000
wv_parameters[] = 3.5600
wv_parameters[] = 1270
wv_parameters[] = 4000
wv_parameters[] = 15000
wv_parameters[] = 8900
wv_parameters[] = 13400
elif grism_name == "LR" and filter_name == "HK":
wv_parameters[] = 4
wv_parameters[] = 55
wv_parameters[] = [25]
wv_parameters[] = np.polynomial.Polynomial([
2.00704978e+04,
-4.07702886e+01,
-5.95247468e-03
])
wv_parameters[] = np.polynomial.Polynomial([
6.54247758e+00,
2.09061196e-03,
-2.48206609e-06
])
wv_parameters[] = 14500.0000
wv_parameters[] = 6.83
wv_parameters[] = 1435
wv_parameters[] = 8000
wv_parameters[] = 30000
wv_parameters[] = 14500
wv_parameters[] = 24300
else:
print("filter_name..:", filter_name)
print("grism_name...:", grism_name)
raise ValueError("invalid filter_name and grism_name combination")
return wv_parameters
|
Set wavelength calibration parameters for rectified images.
Parameters
----------
filter_name : str
Filter name.
grism_name : str
Grism name.
Returns
-------
wv_parameters : dictionary
Python dictionary containing relevant wavelength calibration
parameters:
- crpix1_enlarged : float
- crval1_enlarged : float
- cdelt1_enlarged : float
- naxis1_enlarged: int
- islitlet_min : int
Minimium slitlet number.
- islitlet_max : int
Maximium slitlet number.
- nbrightlines : python list
List of integers containing the number of brightlines to be
used in the initial wavelength calibration.
- poly_crval1_linear : numpy.polynomial.Polynomial instance
Polynomial providing the value of CRVAL1_linear as a function
of csu_bar_slit_center.
- poly_cdelt1_linear : numpy.polynomial.Polynomial instance
Polynomial providing the value of CDELT1_linear as a function
of csu_bar_slit_center.
|
27,569 |
def is_android_raw(raw):
val = None
if raw[0:2] == b"PK" and b in raw:
val = "APK"
elif raw[0:3] == b"dex":
val = "DEX"
elif raw[0:3] == b"dey":
val = "DEY"
elif raw[0:4] == b"\x03\x00\x08\x00" or raw[0:4] == b"\x00\x00\x08\x00":
val = "AXML"
elif raw[0:4] == b"\x02\x00\x0C\x00":
val = "ARSC"
return val
|
Returns a string that describes the type of file, for common Android
specific formats
|
27,570 |
def strip(value):
if not value or (ord(value[0]) > 32 and ord(value[-1]) > 32):
return value
s = 0
e = len(value)
while s < e:
if ord(value[s]) > 32:
break
s += 1
else:
return ""
for i in reversed(range(s, e)):
if ord(value[i]) > 32:
return value[s:i + 1]
return ""
|
REMOVE WHITESPACE (INCLUDING CONTROL CHARACTERS)
|
27,571 |
def octagonal_tiling_graph(M: int, N: int) -> nx.Graph:
grp = nx.Graph()
octogon = [[(1, 0), (0, 1)],
[(0, 1), (0, 2)],
[(0, 2), (1, 3)],
[(1, 3), (2, 3)],
[(2, 3), (3, 2)],
[(3, 2), (3, 1)],
[(3, 1), (2, 0)],
[(2, 0), (1, 0)]]
left = [[(1, 0), (1, -1)], [(2, 0), (2, -1)]]
up = [[(0, 1), (-1, 1)], [(0, 2), (-1, 2)]]
for m in range(M):
for n in range(N):
edges = octogon
if n != 0:
edges = edges + left
if m != 0:
edges = edges + up
for (x0, y0), (x1, y1) in edges:
grp.add_edge((m*4+x0, n*4+y0), (m*4+x1, n*4+y1))
positions = {node: node for node in grp}
nx.set_node_attributes(grp, positions, )
return grp
|
Return the octagonal tiling graph (4.8.8, truncated square tiling,
truncated quadrille) of MxNx8 nodes
The 'positions' node attribute gives node coordinates for the octagonal
tiling. (Nodes are located on a square lattice, and edge lengths are
uneven)
|
27,572 |
def getAsGdalRaster(self, rasterFieldName, tableName, rasterIdFieldName, rasterId, gdalFormat, newSRID=None, **kwargs):
if not (gdalFormat in RasterConverter.supportedGdalRasterFormats(self._session)):
raise ValueError(
.format(gdalFormat))
if newSRID:
srid = .format(newSRID)
else:
srid =
if kwargs:
optionsList = []
for key, value in kwargs.iteritems():
kwargString = "".format(key, value)
optionsList.append(kwargString)
optionsString = .join(optionsList)
options = .format(optionsString)
else:
options =
statement = {1}.format(rasterFieldName, gdalFormat, tableName, rasterIdFieldName, rasterId, options, srid)
result = self._session.execute(statement).scalar()
return bytes(result).decode()
|
Returns a string/buffer representation of the raster in the specified format. Wrapper for
ST_AsGDALRaster function in the database.
|
27,573 |
def open_resource(source):
try:
return open(source, mode=)
except (IOError, OSError) as err:
try:
resource = urlopen(source)
except ValueError:
pass
else:
resource.name = resource.url
if hasattr(resource, ):
return resource
else:
return closing(resource)
raise err
except TypeError:
if hasattr(source, ) and hasattr(source, ):
return source
raise
|
Opens a resource in binary reading mode. Wraps the resource with a
context manager when it doesn't have one.
:param source: a filepath or an URL.
|
27,574 |
def update_main_table(self):
data = (json.dumps(self.settings),)
self.cursor.execute()
self.cursor.execute()
if self.cursor.fetchall() == []:
self.cursor.execute(, data)
else:
self.cursor.execute(, data)
|
Write generator settings to database.
|
27,575 |
def run(self):
self.log.debug("Entered main loop")
while not self.shutdown:
if not self._pipe_service:
time.sleep(0.3)
self.update_status()
if self._pipe_service and self._pipe_service.poll(1):
try:
message = self._pipe_service.recv()
if isinstance(message, dict) and "band" in message:
try:
handler = getattr(self, "parse_band_" + message["band"])
except AttributeError:
handler = None
self.log.warning("Unknown band %s", str(message["band"]))
if handler:
handler(message)
else:
self.log.warning("Invalid message received %s", str(message))
except EOFError:
error_message = False
if self._service_status == CommonService.SERVICE_STATUS_END:
self.log.info("Service terminated")
elif self._service_status == CommonService.SERVICE_STATUS_ERROR:
error_message = "Service terminated with error code"
elif self._service_status in (
CommonService.SERVICE_STATUS_NONE,
CommonService.SERVICE_STATUS_NEW,
CommonService.SERVICE_STATUS_STARTING,
):
error_message = (
"Service may have died unexpectedly in "
+ "initialization (last known status: %s)"
% CommonService.human_readable_state.get(
self._service_status, self._service_status
)
)
else:
error_message = (
"Service may have died unexpectedly"
" (last known status: %s)"
% CommonService.human_readable_state.get(
self._service_status, self._service_status
)
)
if error_message:
self.log.error(error_message)
self._terminate_service()
if self.restart_service:
self.exponential_backoff()
else:
self.shutdown = True
if error_message:
raise workflows.Error(error_message)
with self.__lock:
if (
self._service is None
and self.restart_service
and self._service_factory
):
self.update_status(status_code=CommonService.SERVICE_STATUS_NEW)
self.switch_service()
if not self._transport.is_connected():
self._terminate_service()
raise workflows.Error("Lost transport layer connection")
self.log.debug("Left main loop")
self.update_status(status_code=CommonService.SERVICE_STATUS_TEARDOWN)
self._terminate_service()
self.log.debug("Terminating.")
|
The main loop of the frontend. Here incoming messages from the service
are processed and forwarded to the corresponding callback methods.
|
27,576 |
def signature_parser(func):
args, trail, kwargs, defaults = inspect.getargspec(func)
if not args:
args = []
if not defaults:
defaults = []
if kwargs:
raise Exception("Candebugdebugset logging level to DEBUGversionversions version number and exit"
params += special_flags
used_shorts = set()
for param, default in zip(params, defaults):
args = ["--%s" % param.replace("_", "-")]
short = None
if param in shorts:
short = shorts[param]
else:
if param not in special_flags and len(param) > 1:
first_char = param[0]
if first_char not in used_shorts:
used_shorts.add(first_char)
short = + first_char
if short and short != :
args = [short] + args
kwargs = {: default, : param.replace("-", "_")}
if param == :
kwargs[] =
kwargs[] = module_version(func)
elif default is True:
kwargs[] =
elif default is False:
kwargs[] =
elif isinstance(default, list):
kwargs[] =
kwargs[] = unidecode
else:
kwargs[] =
if type(default) in [type(None), unicode]:
kwargs[] = unidecode
else:
kwargs[] = type(default)
if param in helps:
kwargs[] = helps[param]
if param in metavars:
kwargs[] = metavars[param]
parser.add_argument(*args, **kwargs)
for need in needed:
kwargs = {: , : unidecode}
if need in helps:
kwargs[] = helps[need]
if need in shorts:
args = [shorts[need]]
else:
args = [need]
parser.add_argument(*args, **kwargs)
if trail:
kwargs = {: , : unidecode, : "*"}
if trail in helps:
kwargs[] = helps[trail]
if trail in shorts:
kwargs[] = shorts[trail]
else:
kwargs[] = trail
parser.add_argument(, **kwargs)
return parser
|
Creates an argparse.ArgumentParser from the function's signature.
Arguments with no default are compulsary positional arguments,
Arguments with defaults are optional --flags.
If the default is True or False, the action of the flag will
toggle the argument and the flag takes no parameters.
If the default is None or a unicode string, the flag takes a
string argument that passed to the function as a unicode string
decoded using entrypoint.ENCODING
If the default is a string, then the argument is passed as a binary
string (be careful!), an int and a float cause parsing of those too.
If you want the arguments to be a file, consider using the
@withfile decorator.
Documentation can be read out of the function's docstring, which should
be of the basic form:
'''
A short introduction to your program.
arg: Help for positional parameter.
frm/from: Help for a positional parameter
with a reserved public name
(i.e. this displays to the user as "from"
but sets the "frm" variable)
--opt: Help for optional parameter.
-f --flag: An optional parameter that has a short version.
--mode=MODE: An optional parameter that takes a MODE
-t --type: A combination of both of the above, and one
which requires continuing of the documentation
on an indented line
An epilog explaining anything you feel needs further clarity.
----
Any documentation for the function itself that isn't covered by the
public documentation above the line.
'''
All sections, and indeed the presence of a docstring, are not required.
NOTE: for this to work, the function's signature must be in-tact
some decorators (like @acceptargv for example) destroy, or
mutilate the signature.
|
27,577 |
def encode16Int(value):
value = int(value)
encoded = bytearray(2)
encoded[0] = value >> 8
encoded[1] = value & 0xFF
return encoded
|
Encodes a 16 bit unsigned integer into MQTT format.
Returns a bytearray
|
27,578 |
def transaction_read(self, items):
try:
return self.dynamodb_client.transact_get_items(TransactItems=items)
except botocore.exceptions.ClientError as error:
if error.response["Error"]["Code"] == "TransactionCanceledException":
raise TransactionCanceled from error
raise BloopException("Unexpected error during transaction read.") from error
|
Wraps :func:`boto3.DynamoDB.Client.db.transact_get_items`.
:param items: Unpacked into "TransactionItems" for :func:`boto3.DynamoDB.Client.transact_get_items`
:raises bloop.exceptions.TransactionCanceled: if the transaction was canceled.
:return: Dict with "Records" list
|
27,579 |
def _resolve_serializer(self, serializer):
if isinstance(serializer, Serializer):
return serializer
if serializer in self._serializers:
return self._serializers[serializer]
raise RuntimeError()
|
Resolve the given serializer.
:param serializer: The serializer to resolve
:type serializer: str or Serializer
:rtype: Serializer
|
27,580 |
def all_fields(self):
return [field
for container in FieldsContainer.class_container.values()
for field in getattr(self, container)]
|
A list with all the fields contained in this object.
|
27,581 |
def python_portable_string(string, encoding=):
if isinstance(string, six.string_types):
return string
if six.PY3:
return string.decode(encoding)
raise ValueError( % str(type(string)))
|
Converts bytes into a string type.
Valid string types are retuned without modification. So in Python 2, type str
and unicode are not converted.
In Python 3, type bytes is converted to type str (unicode)
|
27,582 |
def good_sequences_to_track(flow, motion_threshold=1.0):
endpoints = []
in_low = False
for i, val in enumerate(flow):
if val < motion_threshold:
if not in_low:
endpoints.append(i)
in_low = True
else:
if in_low:
endpoints.append(i-1)
in_low = False
def mean_score_func(m):
mu = 15
sigma = 8
top_val = normpdf(mu, mu, sigma)
return normpdf(m, mu, sigma) / top_val
def max_score_func(m):
mu = 40
sigma = 8
if m <= mu:
return 1.
else:
top_val = normpdf(mu, mu, sigma)
return normpdf(m, mu, sigma) / top_val
def length_score_func(l):
mu = 30
sigma = 10
top_val = normpdf(mu, mu, sigma)
return normpdf(l, mu, sigma) / top_val
min_length = 5
sequences = []
for k, i in enumerate(endpoints[:-1]):
for j in endpoints[k+1:]:
length = j - i
if length < min_length:
continue
seq = flow[i:j+1]
m_score = mean_score_func(np.mean(seq))
mx_score = max_score_func(np.max(seq))
l_score = length_score_func(length)
logger.debug("%d, %d scores: (mean=%.5f, max=%.5f, length=%.5f)" % (i,j,m_score, mx_score, l_score))
if min(m_score, mx_score, l_score) < 0.2:
continue
score = m_score + mx_score + l_score
sequences.append((i, j, score))
return sorted(sequences, key=lambda x: x[2], reverse=True)
|
Get list of good frames to do tracking in.
Looking at the optical flow, this function chooses a span of frames
that fulfill certain criteria.
These include
* not being too short or too long
* not too low or too high mean flow magnitude
* a low max value (avoids motion blur)
Currently, the cost function for a sequence is hard coded. Sorry about that.
Parameters
-------------
flow : ndarray
The optical flow magnitude
motion_threshold : float
The maximum amount of motion to consider for sequence endpoints.
Returns
------------
sequences : list
Sorted list of (a, b, score) elements (highest scpre first) of sequences
where a sequence is frames with frame indices in the span [a, b].
|
27,583 |
def _get_struct_gradient(self, shape_number):
obj = _make_object("Gradient")
bc = BitConsumer(self._src)
obj.SpreadMode = bc.u_get(2)
obj.InterpolationMode = bc.u_get(2)
obj.NumGradients = bc.u_get(4)
obj.GradientRecords = gradient_records = []
for _ in range(obj.NumGradients):
record = _make_object("GradRecord")
gradient_records.append(record)
record.Ratio = unpack_ui8(self._src)
if shape_number <= 2:
record.Color = self._get_struct_rgb()
else:
record.Color = self._get_struct_rgba()
return obj
|
Get the values for the GRADIENT record.
|
27,584 |
def export(self, nidm_version, export_dir, prepend_path):
if self.path is not None:
if export_dir is not None:
new_file = os.path.join(export_dir, self.filename)
if not self.path == new_file:
if prepend_path.endswith():
with zipfile.ZipFile(prepend_path) as z:
extracted = z.extract(str(self.path), export_dir)
shutil.move(extracted, new_file)
else:
if prepend_path:
file_copied = os.path.join(prepend_path, self.path)
else:
file_copied = self.path
shutil.copy(file_copied, new_file)
if self.temporary:
os.remove(self.path)
else:
new_file = self.path
if nidm_version[] in ["1.0.0", "1.1.0"]:
loc = Identifier("file://./" + self.filename)
else:
loc = Identifier(self.filename)
self.add_attributes([(NFO[], self.filename)])
if export_dir:
self.add_attributes([(PROV[], loc)])
if nidm_version[] in ("1.0.0", "1.1.0"):
path, org_filename = os.path.split(self.path)
if (org_filename is not self.filename) \
and (not self.temporary):
self.add_attributes([(NFO[], org_filename)])
if self.is_nifti():
if self.sha is None:
self.sha = self.get_sha_sum(new_file)
if self.fmt is None:
self.fmt = "image/nifti"
self.add_attributes([
(CRYPTO[], self.sha),
(DCT[], self.fmt)
])
|
Copy file over of export_dir and create corresponding triples
|
27,585 |
def save(self, *args, **kwargs):
if not self.summary:
self.summary = truncatewords(self.body, 50)
self.body_formatted = sanetize_text(self.body)
super(Article, self).save()
|
Store summary if none was given
and created formatted version of body text.
|
27,586 |
def sum(self, phi1, inplace=True):
phi = self if inplace else self.copy()
if isinstance(phi1, (int, float)):
phi.values += phi1
else:
phi1 = phi1.copy()
extra_vars = set(phi1.variables) - set(phi.variables)
if extra_vars:
slice_ = [slice(None)] * len(phi.variables)
slice_.extend([np.newaxis] * len(extra_vars))
phi.values = phi.values[tuple(slice_)]
phi.variables.extend(extra_vars)
new_var_card = phi1.get_cardinality(extra_vars)
phi.cardinality = np.append(phi.cardinality, [new_var_card[var] for var in extra_vars])
extra_vars = set(phi.variables) - set(phi1.variables)
if extra_vars:
slice_ = [slice(None)] * len(phi1.variables)
slice_.extend([np.newaxis] * len(extra_vars))
phi1.values = phi1.values[tuple(slice_)]
phi1.variables.extend(extra_vars)
for axis in range(phi.values.ndim):
exchange_index = phi1.variables.index(phi.variables[axis])
phi1.variables[axis], phi1.variables[exchange_index] = phi1.variables[exchange_index], \
phi1.variables[axis]
phi1.values = phi1.values.swapaxes(axis, exchange_index)
phi.values = phi.values + phi1.values
if not inplace:
return phi
|
DiscreteFactor sum with `phi1`.
Parameters
----------
phi1: `DiscreteFactor` instance.
DiscreteFactor to be added.
inplace: boolean
If inplace=True it will modify the factor itself, else would return
a new factor.
Returns
-------
DiscreteFactor or None: if inplace=True (default) returns None
if inplace=False returns a new `DiscreteFactor` instance.
Example
-------
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> phi1 = DiscreteFactor(['x1', 'x2', 'x3'], [2, 3, 2], range(12))
>>> phi2 = DiscreteFactor(['x3', 'x4', 'x1'], [2, 2, 2], range(8))
>>> phi1.sum(phi2, inplace=True)
>>> phi1.variables
['x1', 'x2', 'x3', 'x4']
>>> phi1.cardinality
array([2, 3, 2, 2])
>>> phi1.values
array([[[[ 0, 0],
[ 4, 6]],
[[ 0, 4],
[12, 18]],
[[ 0, 8],
[20, 30]]],
[[[ 6, 18],
[35, 49]],
[[ 8, 24],
[45, 63]],
[[10, 30],
[55, 77]]]])
|
27,587 |
def read_json(path):
if path.startswith(("http://", "https://")):
try:
return json.loads(urlopen(path).read().decode())
except HTTPError:
raise FileNotFoundError("%s not found", path)
elif path.startswith("s3://"):
bucket = get_boto3_bucket(path.split("/")[2])
key = "/".join(path.split("/")[3:])
for obj in bucket.objects.filter(Prefix=key):
if obj.key == key:
return json.loads(obj.get()[].read().decode())
raise FileNotFoundError("%s not found", path)
else:
try:
with open(path, "r") as src:
return json.loads(src.read())
except:
raise FileNotFoundError("%s not found", path)
|
Read local or remote.
|
27,588 |
def unencrypt_file(inputfile, filename, passphrase=None):
import gnupg
def get_passphrase(passphrase=passphrase):
return passphrase or getpass() or None
temp_dir = tempfile.mkdtemp(dir=settings.TMP_DIR)
try:
new_basename = os.path.basename(filename).replace(, )
temp_filename = os.path.join(temp_dir, new_basename)
try:
inputfile.seek(0)
g = gnupg.GPG()
result = g.decrypt_file(file=inputfile, passphrase=get_passphrase(),
output=temp_filename)
if not result:
raise DecryptionError( % result.status)
outputfile = create_spooled_temporary_file(temp_filename)
finally:
if os.path.exists(temp_filename):
os.remove(temp_filename)
finally:
os.rmdir(temp_dir)
return outputfile, new_basename
|
Unencrypt input file using GPG and remove .gpg extension to its name.
:param inputfile: File to encrypt
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:param passphrase: Passphrase of GPG key, if equivalent to False, it will
be asked to user. If user answer an empty pass, no
passphrase will be used.
:type passphrase: ``str`` or ``None``
:returns: Tuple with file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
|
27,589 |
def decoratornames(self):
result = set()
decoratornodes = []
if self.decorators is not None:
decoratornodes += self.decorators.nodes
decoratornodes += self.extra_decorators
for decnode in decoratornodes:
try:
for infnode in decnode.infer():
result.add(infnode.qname())
except exceptions.InferenceError:
continue
return result
|
Get the qualified names of each of the decorators on this function.
:returns: The names of the decorators.
:rtype: set(str)
|
27,590 |
def refreshTitles(self):
for index in range(self.count()):
widget = self.widget(index)
self.setTabText(index, widget.windowTitle())
|
Refreshes the titles for each view within this tab panel.
|
27,591 |
def bed(args):
from jcvi.formats.bed import sort
p = OptionParser(bed.__doc__)
p.add_option("--blockonly", default=False, action="store_true",
help="Only print out large blocks, not fragments [default: %default]")
p.add_option("--point", default=False, action="store_true",
help="Print accesssion as single point instead of interval")
p.add_option("--scale", type="float",
help="Scale the OM distance by factor")
p.add_option("--switch", default=False, action="store_true",
help="Switch reference and aligned map elements [default: %default]")
p.add_option("--nosort", default=False, action="store_true",
help="Do not sort bed [default: %default]")
opts, args = p.parse_args(args)
if len(args) != 1:
sys.exit(not p.print_help())
xmlfile, = args
bedfile = xmlfile.rsplit(".", 1)[0] + ".bed"
om = OpticalMap(xmlfile)
om.write_bed(bedfile, point=opts.point, scale=opts.scale,
blockonly=opts.blockonly, switch=opts.switch)
if not opts.nosort:
sort([bedfile, "--inplace"])
|
%prog bed xmlfile
Print summary of optical map alignment in BED format.
|
27,592 |
def dipole(src, rec, depth, res, freqtime, aniso=None, eperm=None, mperm=None,
verb=2):
r
printstartfinish(verb, t0)
return PTM, PTE
|
r"""Return the electromagnetic field due to a dipole source.
This is a modified version of ``empymod.model.dipole()``. It returns the
separated contributions of TM--, TM-+, TM+-, TM++, TMdirect, TE--, TE-+,
TE+-, TE++, and TEdirect.
Parameters
----------
src, rec : list of floats or arrays
Source and receiver coordinates (m): [x, y, z].
The x- and y-coordinates can be arrays, z is a single value.
The x- and y-coordinates must have the same dimension.
Sources or receivers placed on a layer interface are considered in the
upper layer.
Sources and receivers must be in the same layer.
depth : list
Absolute layer interfaces z (m); #depth = #res - 1
(excluding +/- infinity).
res : array_like
Horizontal resistivities rho_h (Ohm.m); #res = #depth + 1.
freqtime : float
Frequency f (Hz). (The name ``freqtime`` is kept for consistency with
``empymod.model.dipole()``. Only one frequency at once.
aniso : array_like, optional
Anisotropies lambda = sqrt(rho_v/rho_h) (-); #aniso = #res.
Defaults to ones.
eperm : array_like, optional
Relative electric permittivities epsilon (-);
#eperm = #res. Default is ones.
mperm : array_like, optional
Relative magnetic permeabilities mu (-);
#mperm = #res. Default is ones.
verb : {0, 1, 2, 3, 4}, optional
Level of verbosity, default is 2:
- 0: Print nothing.
- 1: Print warnings.
- 2: Print additional runtime and kernel calls
- 3: Print additional start/stop, condensed parameter information.
- 4: Print additional full parameter information
Returns
-------
TM, TE : list of ndarrays, (nfreq, nrec, nsrc)
Frequency-domain EM field [V/m], separated into
TM = [TM--, TM-+, TM+-, TM++, TMdirect]
and
TE = [TE--, TE-+, TE+-, TE++, TEdirect].
However, source and receiver are normalised. So the source strength is
1 A and its length is 1 m. Therefore the electric field could also be
written as [V/(A.m2)].
The shape of EM is (nfreq, nrec, nsrc). However, single dimensions
are removed.
|
27,593 |
def update_scheduled_time(self, when):
tiger = self.tiger
ts = get_timestamp(when)
assert ts
pipeline = tiger.connection.pipeline()
key = tiger._key(SCHEDULED, self.queue)
tiger.scripts.zadd(key, ts, self.id, mode=, client=pipeline)
pipeline.zscore(key, self.id)
_, score = pipeline.execute()
if not score:
raise TaskNotFound(.format(
self.id, self.queue, SCHEDULED
))
self._ts = ts
|
Updates a scheduled task's date to the given date. If the task is not
scheduled, a TaskNotFound exception is raised.
|
27,594 |
def _check_box_toggled(self, widget, data=None):
active = widget.get_active()
arg_name = data
if in self.args[arg_name]:
self.args[arg_name][].set_sensitive(active)
if in self.args[arg_name]:
self.args[arg_name][].set_sensitive(active)
self.path_window.show_all()
|
Function manipulates with entries and buttons.
|
27,595 |
def _to_cwlfile_with_indexes(val, get_retriever):
val["indexes"] = _index_blacklist(val["indexes"])
tval = {"base": _remove_remote_prefix(val["base"]),
"indexes": [_remove_remote_prefix(f) for f in val["indexes"]]}
cp_dir, cp_base = os.path.split(os.path.commonprefix([tval["base"]] + tval["indexes"]))
if (cp_base and cp_dir == os.path.dirname(tval["base"]) and
not ("/snpeff/" in cp_dir or "/hisat2" in cp_dir)):
return _item_to_cwldata(val["base"], get_retriever, val["indexes"])
else:
dirname = os.path.dirname(tval["base"])
assert all([x.startswith(dirname) for x in tval["indexes"]])
return {"class": "File", "path": directory_tarball(dirname)}
|
Convert reads with ready to go indexes into the right CWL object.
Identifies the top level directory and creates a tarball, avoiding
trying to handle complex secondary setups which are not cross platform.
Skips doing this for reference files and standard setups like bwa, which
take up too much time and space to unpack multiple times.
|
27,596 |
def get_or_create(session: Session,
model: DeclarativeMeta,
defaults: Dict[str, Any] = None,
**kwargs: Any) -> Tuple[Any, bool]:
instance = session.query(model).filter_by(**kwargs).first()
if instance:
return instance, False
else:
params = dict((k, v) for k, v in kwargs.items()
if not isinstance(v, ClauseElement))
params.update(defaults or {})
instance = model(**params)
session.add(instance)
return instance, True
|
Fetches an ORM object from the database, or creates one if none existed.
Args:
session: an SQLAlchemy :class:`Session`
model: an SQLAlchemy ORM class
defaults: default initialization arguments (in addition to relevant
filter criteria) if we have to create a new instance
kwargs: optional filter criteria
Returns:
a tuple ``(instance, newly_created)``
See http://stackoverflow.com/questions/2546207 (this function is a
composite of several suggestions).
|
27,597 |
def update_lang(self,
lang: Optional[Text],
data: List[Tuple[Text, Text]],
flags: Flags):
sd = SortingDict()
for item in (self.parse_item(x[0], x[1], flags) for x in data):
if item:
sd.append(item)
if lang not in self.dict:
self.dict[lang] = {}
d = self.dict[lang]
for k, v in sd.extract().items():
if k not in d:
d[k] = SentenceGroup()
d[k].update(v, flags)
|
Update translations for one specific lang
|
27,598 |
def match_url_regex(rules, url, callback):
parts = urlparse(url)
path = unquote(parts.path)
for rule in rules:
if parts.scheme not in rule[]:
continue
if parts.netloc not in rule[]:
continue
for regex in rule[]:
m = re.search(regex, path)
if m is None:
continue
result = callback(m)
if result is not None:
return result
|
Given rules and a callback, find the rule that matches the url.
Rules look like::
(
{
'schemes': ['https', 'ssh'],
'netlocs': ['hg.mozilla.org'],
'path_regexes': [
"^(?P<path>/mozilla-(central|unified))(/|$)",
]
},
...
)
Args:
rules (list): a list of dictionaries specifying lists of ``schemes``,
``netlocs``, and ``path_regexes``.
url (str): the url to test
callback (function): a callback that takes an ``re.MatchObject``.
If it returns None, continue searching. Otherwise, return the
value from the callback.
Returns:
value: the value from the callback, or None if no match.
|
27,599 |
def persistent_object_context_changed(self):
super().persistent_object_context_changed()
def detach():
for listener in self.__interval_mutated_listeners:
listener.close()
self.__interval_mutated_listeners = list()
def reattach():
detach()
interval_descriptors = list()
if self.__source:
for region in self.__source.graphics:
if isinstance(region, Graphics.IntervalGraphic):
interval_descriptor = {"interval": region.interval, "color": "
interval_descriptors.append(interval_descriptor)
self.__interval_mutated_listeners.append(region.property_changed_event.listen(lambda k: reattach()))
if self.__target:
self.__target.interval_descriptors = interval_descriptors
def item_inserted(key, value, before_index):
if key == "graphics" and self.__target:
reattach()
def item_removed(key, value, index):
if key == "graphics" and self.__target:
reattach()
def source_registered(source):
self.__source = source
self.__item_inserted_event_listener = self.__source.item_inserted_event.listen(item_inserted)
self.__item_removed_event_listener = self.__source.item_removed_event.listen(item_removed)
reattach()
def target_registered(target):
self.__target = target
reattach()
def unregistered(source=None):
if self.__item_inserted_event_listener:
self.__item_inserted_event_listener.close()
self.__item_inserted_event_listener = None
if self.__item_removed_event_listener:
self.__item_removed_event_listener.close()
self.__item_removed_event_listener = None
if self.persistent_object_context:
self.persistent_object_context.subscribe(self.source_uuid, source_registered, unregistered)
self.persistent_object_context.subscribe(self.target_uuid, target_registered, unregistered)
else:
unregistered()
|
Override from PersistentObject.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.