Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
377,900 | def cli_main():
import argparse
import os
def is_file_or_pipe(arg):
if not os.path.exists(arg) or os.path.isdir(arg):
parser.error(.format(arg))
else:
return arg
def is_dir(arg):
if not os.path.isdir(arg):
parser.error(.format(arg))
else:
return arg
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(, , action=,
version=version)
parser.add_argument(, help=,
type=is_file_or_pipe)
parser.add_argument(, , dest=,
help=,
type=is_file_or_pipe, default={})
parser.add_argument(, , dest=,
help=,
type=is_dir, default=)
parser.add_argument(, , dest=,
help=mustache\,
default=)
parser.add_argument(, , dest=,
help=,
default=)
parser.add_argument(, , dest=,
help=,
default=)
args = vars(parser.parse_args())
try:
sys.stdout.write(main(**args))
sys.stdout.flush()
except SyntaxError as e:
print()
print( + .join(e.args[0].split()))
exit(1) | Render mustache templates using json files |
377,901 | def checkin_boardingpass(self, code, passenger_name, seat_class,
etkt_bnr, seat=, gate=, boarding_time=None,
is_cancel=False, qrcode_data=None, card_id=None):
data = {
: code,
: passenger_name,
: seat_class,
: etkt_bnr,
: seat,
: gate,
: is_cancel
}
if boarding_time:
data[] = boarding_time
if qrcode_data:
data[] = qrcode_data
if card_id:
data[] = card_id
return self._post(
,
data=data
) | 飞机票接口 |
377,902 | def _import_bin(filename):
fid = open(filename, )
def fget(fid, fmt, tsize):
buffer = fid.read(tsize)
result_raw = struct.unpack(fmt, buffer)
if len(result_raw) == 1:
return result_raw[0]
else:
return result_raw
fid.seek(0, 2)
total_size = fid.tell()
fid.seek(0)
buffer = fid.read(4)
version = struct.unpack(, buffer)
buffer = fid.read(1)
typedesyscal = struct.unpack(, buffer)[0]
syscal_type = int.from_bytes(typedesyscal, )
buffer = fid.read(1024)
comment_raw = struct.iter_unpack(, buffer)
comment = .join([x[0].decode() for x in comment_raw])
metadata = {
: version,
: syscal_type,
: comment,
}
measurements = []
counter = 0
while(fid.tell() < total_size):
buffer = fid.read(2)
array_type = struct.unpack(, buffer)
array_type
moretmeasure = fget(fid, , 2)
moretmeasure
mtime = fget(fid, , 4)
mdelay = fget(fid, , 4)
TypeCpXyz = fget(fid, , 2)
assert TypeCpXyz == 1
fget(fid, , 2)
xpos = fget(fid, , 16)
ypos = fget(fid, , 16)
zpos = fget(fid, , 16)
sp = fget(fid, , 4)
vp = fget(fid, , 4)
Iab = fget(fid, , 4)
rho = fget(fid, , 4)
m = fget(fid, , 4)
q = fget(fid, , 4)
Tm = fget(fid, , 20 * 4)
Tm = np.array(Tm)
Mx = fget(fid, , 20 * 4)
Mx = np.array(Mx)
buffer = fid.read(1)
buffer_bin = bin(ord(buffer))[2:].rjust(8, )
channel = int(buffer_bin[4:], 2)
channelnb = int(buffer_bin[0:4], 2)
buffer = fid.read(1)
buffer_bin = bin(ord(buffer))[2:].rjust(8, )
overload = bool(int(buffer_bin[4]))
channel_valid = bool(int(buffer_bin[5]))
channel_sync = bool(int(buffer_bin[6]))
gap_filler = bool(int(buffer_bin[7]))
measurement_num = fget(fid, , 2)
filename = fget(fid, , 12)
latitude = fget(fid, , 4)
longitude = fget(fid, , 4)
NbCren = fget(fid, , 4)
RsChk = fget(fid, , 4)
vab = fget(fid, , 4)
batTX = fget(fid, , 4)
batRX = fget(fid, , 4)
temperature = fget(fid, , 4)
b = struct.unpack(, fid.read(2 * 4))
b
measurements.append({
: version,
: mtime,
: xpos[0],
: xpos[1],
: xpos[2],
: xpos[3],
: ypos[0],
: ypos[1],
: ypos[2],
: ypos[3],
: zpos[0],
: zpos[1],
: zpos[2],
: zpos[3],
: mdelay,
: vp,
: q,
: overload,
: channel_valid,
: channel_sync,
: gap_filler,
: NbCren,
: m,
: Tm,
: Mx,
: measurement_num,
: vab,
: channel,
: sp,
: Iab,
: rho,
: latitude,
: longitude,
: channelnb,
: RsChk,
: batTX,
: batRX,
: temperature,
: measurement_num,
})
counter += 1
df = pd.DataFrame(
measurements
).reset_index()
return metadata, df | Read a .bin file generated by the IRIS Instruments Syscal Pro System
Parameters
----------
filename : string
Path to input filename
Returns
-------
metadata : dict
General information on the measurement
df : :py:class:`pandas.DataFrame`
dataframe containing all measurement data |
377,903 | def refresh_db(root=None):
<database name>*
salt.utils.pkg.clear_rtag(__opts__)
ret = {}
out = __zypper__(root=root).refreshable.call(, )
for line in out.splitlines():
if not line:
continue
if line.strip().startswith() and \)[1].strip()
if in line:
ret[key] = False
except IndexError:
continue
elif line.strip().startswith() and \)[1].strip()
if in line:
ret[key] = True
return ret | Force a repository refresh by calling ``zypper refresh --force``, return a dict::
{'<database name>': Bool}
root
operate on a different root directory.
CLI Example:
.. code-block:: bash
salt '*' pkg.refresh_db |
377,904 | def create_db_schema(cls, cur, schema_name):
create_schema_script = "CREATE SCHEMA {0} ;\n".format(schema_name)
cur.execute(create_schema_script) | Create Postgres schema script and execute it on cursor |
377,905 | def hello_user(api_client):
try:
response = api_client.get_user_profile()
except (ClientError, ServerError) as error:
fail_print(error)
return
else:
profile = response.json
first_name = profile.get()
last_name = profile.get()
email = profile.get()
message =
message = message.format(first_name, last_name, email)
success_print(message)
success_print(profile)
success_print()
response = api_client.get_home_address()
address = response.json
success_print(address)
success_print()
response = api_client.get_user_activity()
history = response.json
success_print(history) | Use an authorized client to fetch and print profile information.
Parameters
api_client (UberRidesClient)
An UberRidesClient with OAuth 2.0 credentials. |
377,906 | def rc4_encrypt(key, data):
if len(key) < 5 or len(key) > 16:
raise ValueError(pretty_message(
,
len(key)
))
return _encrypt(Security.kSecAttrKeyTypeRC4, key, data, None, None) | Encrypts plaintext using RC4 with a 40-128 bit key
:param key:
The encryption key - a byte string 5-16 bytes long
:param data:
The plaintext - a byte string
:raises:
ValueError - when any of the parameters contain an invalid value
TypeError - when any of the parameters are of the wrong type
OSError - when an error is returned by the OS crypto library
:return:
A byte string of the ciphertext |
377,907 | def read_char(self, c: str) -> bool:
if self.read_eof():
return False
self._stream.save_context()
if c == self._stream.peek_char:
self._stream.incpos()
return self._stream.validate_context()
return self._stream.restore_context() | Consume the c head byte, increment current index and return True
else return False. It use peekchar and it's the same as '' in BNF. |
377,908 | def get_absolute(self, points):
is_list = isinstance(points, list)
points = ensure_numeric(points, num.float)
if len(points.shape) == 1:
msg =
if not len(points) == 2:
raise ValueError(msg)
msg =
msg += %points.shape
if not points.shape[1] == 2:
raise ValueError(msg)
if not self.is_absolute():
points = copy.copy(points)
points[:,0] += self.xllcorner
points[:,1] += self.yllcorner
if is_list:
points = points.tolist()
return points | Given a set of points geo referenced to this instance,
return the points as absolute values. |
377,909 | def FMErrorByNum( num ):
if not num in FMErrorNum.keys():
raise FMServerError, (num, FMErrorNum[-1])
elif num == 102:
raise FMFieldError, (num, FMErrorNum[num])
else:
raise FMServerError, (num, FMErrorNum[num]) | This function raises an error based on the specified error code. |
377,910 | async def send_media_group(self, chat_id: typing.Union[base.Integer, base.String],
media: typing.Union[types.MediaGroup, typing.List],
disable_notification: typing.Union[base.Boolean, None] = None,
reply_to_message_id: typing.Union[base.Integer,
None] = None) -> typing.List[types.Message]:
if isinstance(media, list):
media = types.MediaGroup(media)
files = dict(media.get_files())
media = prepare_arg(media)
payload = generate_payload(**locals(), exclude=[])
result = await self.request(api.Methods.SEND_MEDIA_GROUP, payload, files)
return [types.Message(**message) for message in result] | Use this method to send a group of photos or videos as an album.
Source: https://core.telegram.org/bots/api#sendmediagroup
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param media: A JSON-serialized array describing photos and videos to be sent
:type media: :obj:`typing.Union[types.MediaGroup, typing.List]`
:param disable_notification: Sends the message silently. Users will receive a notification with no sound
:type disable_notification: :obj:`typing.Union[base.Boolean, None]`
:param reply_to_message_id: If the message is a reply, ID of the original message
:type reply_to_message_id: :obj:`typing.Union[base.Integer, None]`
:return: On success, an array of the sent Messages is returned
:rtype: typing.List[types.Message] |
377,911 | def diff_config(jaide, second_host, mode):
try:
output = .join([diff for diff in
jaide.diff_config(second_host, mode.lower())])
except errors.SSHError:
output = color( %
(str(jaide.port), second_host), )
except errors.AuthenticationError:
output = color( %
second_host, )
except AuthenticationException:
output = color( %
second_host, )
except SSHException as e:
output = color( %
(second_host, str(e)), )
except socket.timeout:
output = color( %
second_host, )
except socket.gaierror:
output = color( %
second_host, )
except socket.error:
output = color(
% jaide.port, )
if output.strip() == :
output = color("There were no config differences between %s and %s\n" %
(jaide.host, second_host), )
else:
return color_diffs(output)
return output | Perform a show | compare with some set commands.
@param jaide: The jaide connection to the device.
@type jaide: jaide.Jaide object
@param second_host: The device IP or hostname of the second host to
| compare with.
@type second_host: str
@param mode: How to compare the configuration, either in 'set' mode or
| 'stanza' mode.
@type mode: str
@returns: The comparison between the two devices.
@rtype str |
377,912 | def rerun(client, revision, roots, siblings, inputs, paths):
graph = Graph(client)
outputs = graph.build(paths=paths, revision=revision)
outputs = siblings(graph, outputs)
output_paths = {node.path for node in outputs}
roots = {graph.normalize_path(root) for root in roots}
assert not roots & output_paths,
workflow = inputs(
client,
graph.ascwl(
input_paths=roots,
output_paths=output_paths,
outputs=outputs,
)
)
client.pull_paths_from_storage(
*(path for _, path in workflow.iter_input_files(client.workflow_path))
)
import yaml
output_file = client.workflow_path / .format(uuid.uuid4().hex)
with output_file.open() as f:
f.write(
yaml.dump(
ascwl(
workflow,
filter=lambda _, x: x is not None,
basedir=client.workflow_path,
),
default_flow_style=False
)
)
from ._cwl import execute
execute(
client,
output_file,
output_paths=output_paths,
) | Recreate files generated by a sequence of ``run`` commands. |
377,913 | def load_keypair(keypair_file):
from Crypto.PublicKey import RSA
with open(keypair_file, ) as filey:
key = RSA.import_key(filey.read())
return quote_plus(key.publickey().exportKey().decode()) | load a keypair from a keypair file. We add attributes key (the raw key)
and public_key (the url prepared public key) to the client.
Parameters
==========
keypair_file: the pem file to load. |
377,914 | def delete_group(self, group_id):
route_values = {}
if group_id is not None:
route_values[] = self._serialize.url(, group_id, )
self._send(http_method=,
location_id=,
version=,
route_values=route_values) | DeleteGroup.
:param str group_id: |
377,915 | def content():
message = m.Message()
paragraph = m.Paragraph(
m.Image(
% resources_path()),
style_class=
)
message.add(paragraph)
body = tr(
shakemap\
)
message.add(body)
tips = m.BulletedList()
tips.add(tr(
))
tips.add(tr(
))
tips.add(tr(
))
tips.add(tr(
))
message.add(tips)
return message | Helper method that returns just the content.
This method was added so that the text could be reused in the
dock_help module.
.. versionadded:: 3.2.2
:returns: A message object without brand element.
:rtype: safe.messaging.message.Message |
377,916 | def compare(self, origin, pattern):
if origin is None or pattern is None:
return False
return re.match(pattern, origin) is not None | Args:
origin (:obj:`str`): original string
pattern (:obj:`str`): Regexp pattern string
Returns:
bool: True if matches otherwise False. |
377,917 | def compute_pscale(self,cd11,cd21):
return N.sqrt(N.power(cd11,2)+N.power(cd21,2)) * 3600. | Compute the pixel scale based on active WCS values. |
377,918 | def _convert_to_folder(self, packages):
url =
with ThreadPoolExecutor(max_workers=12) as executor:
futures = []
for package in packages:
future = executor.submit(self._get, url, package)
futures.append({
: future,
: package
})
folders = []
for i, future in enumerate(futures, start=1):
r = future[].result()
package = future[]
if not in r.text:
folder_name = package.split()[1]
else:
splat = list(filter(None, re.split(r, r.text)))
folder_name = splat[splat.index() + 1]
if not folder_name in folders:
folders.append(folder_name)
else:
print("Folder %s is duplicated (current %s, previous %s)" % (folder_name,
package, folders.index(folder_name)))
if i % 25 == 0:
print("Done %s." % i)
return folders | Silverstripe's page contains a list of composer packages. This
function converts those to folder names. These may be different due
to installer-name.
Implemented exponential backoff in order to prevent packager from
being overly sensitive about the number of requests I was making.
@see: https://github.com/composer/installers#custom-install-names
@see: https://github.com/richardsjoqvist/silverstripe-localdate/issues/7 |
377,919 | def to_unitary_matrix(
self,
qubit_order: ops.QubitOrderOrList = ops.QubitOrder.DEFAULT,
qubits_that_should_be_present: Iterable[ops.Qid] = (),
ignore_terminal_measurements: bool = True,
dtype: Type[np.number] = np.complex128) -> np.ndarray:
if not ignore_terminal_measurements and any(
protocols.is_measurement(op)
for op in self.all_operations()):
raise ValueError()
if not self.are_all_measurements_terminal():
raise ValueError()
qs = ops.QubitOrder.as_qubit_order(qubit_order).order_for(
self.all_qubits().union(qubits_that_should_be_present))
n = len(qs)
state = np.eye(1 << n, dtype=np.complex128)
state.shape = (2,) * (2 * n)
result = _apply_unitary_circuit(self, state, qs, dtype)
return result.reshape((1 << n, 1 << n)) | Converts the circuit into a unitary matrix, if possible.
Args:
qubit_order: Determines how qubits are ordered when passing matrices
into np.kron.
qubits_that_should_be_present: Qubits that may or may not appear
in operations within the circuit, but that should be included
regardless when generating the matrix.
ignore_terminal_measurements: When set, measurements at the end of
the circuit are ignored instead of causing the method to
fail.
dtype: The numpy dtype for the returned unitary. Defaults to
np.complex128. Specifying np.complex64 will run faster at the
cost of precision. `dtype` must be a complex np.dtype, unless
all operations in the circuit have unitary matrices with
exclusively real coefficients (e.g. an H + TOFFOLI circuit).
Returns:
A (possibly gigantic) 2d numpy array corresponding to a matrix
equivalent to the circuit's effect on a quantum state.
Raises:
ValueError: The circuit contains measurement gates that are not
ignored.
TypeError: The circuit contains gates that don't have a known
unitary matrix, e.g. gates parameterized by a Symbol. |
377,920 | def curve_to(self, x1, y1, x2, y2, x3, y3):
cairo.cairo_curve_to(self._pointer, x1, y1, x2, y2, x3, y3)
self._check_status() | Adds a cubic Bézier spline to the path
from the current point
to position ``(x3, y3)`` in user-space coordinates,
using ``(x1, y1)`` and ``(x2, y2)`` as the control points.
After this call the current point will be ``(x3, y3)``.
If there is no current point before the call to :meth:`curve_to`
this method will behave as if preceded by
a call to ``context.move_to(x1, y1)``.
:param x1: The X coordinate of the first control point.
:param y1: The Y coordinate of the first control point.
:param x2: The X coordinate of the second control point.
:param y2: The Y coordinate of the second control point.
:param x3: The X coordinate of the end of the curve.
:param y3: The Y coordinate of the end of the curve.
:type x1: float
:type y1: float
:type x2: float
:type y2: float
:type x3: float
:type y3: float |
377,921 | def _check_operator(self, operator):
if not isinstance(operator, type(None)):
tree = [obj.__name__ for obj in getmro(operator.__class__)]
if not any([parent in tree for parent in self._op_parents]):
warn(
.format(str(operator.__class__))) | Check Set-Up
This method checks algorithm operator against the expected parent
classes
Parameters
----------
operator : str
Algorithm operator to check |
377,922 | def _get_NTLMv2_response(user_name, password, domain_name,
server_challenge, client_challenge, timestamp,
target_info):
nt_hash = comphash._ntowfv2(user_name, password, domain_name)
temp = ComputeResponse._get_NTLMv2_temp(timestamp, client_challenge,
target_info)
nt_proof_str = hmac.new(nt_hash,
(server_challenge + temp),
digestmod=hashlib.md5).digest()
response = nt_proof_str + temp
session_base_key = hmac.new(nt_hash, nt_proof_str,
digestmod=hashlib.md5).digest()
return response, session_base_key | [MS-NLMP] v28.0 2016-07-14
2.2.2.8 NTLM V2 Response: NTLMv2_RESPONSE
The NTLMv2_RESPONSE strucutre defines the NTLMv2 authentication
NtChallengeResponse in the AUTHENTICATE_MESSAGE. This response is used
only when NTLMv2 authentication is configured.
The guide on how this is computed is in 3.3.2 NTLM v2 Authentication.
:param user_name: The user name of the user we are trying to
authenticate with
:param password: The password of the user we are trying to authenticate
with
:param domain_name: The domain name of the user account we are
authenticated with
:param server_challenge: A random 8-byte response generated by the
server in the CHALLENGE_MESSAGE
:param client_challenge: A random 8-byte response generated by the
client for the AUTHENTICATE_MESSAGE
:param timestamp: An 8-byte timestamp in windows format, 100
nanoseconds since 1601-01-01
:param target_info: The target_info structure from the
CHALLENGE_MESSAGE with the CBT attached if required
:return response: NtChallengeResponse to the server_challenge
:return session_base_key: A session key calculated from the user
password challenge |
377,923 | def smart_text(s, encoding=, strings_only=False, errors=):
if isinstance(s, Promise):
return s
return force_text(s, encoding, strings_only, errors) | Returns a text object representing 's' -- unicode on Python 2 and str on
Python 3. Treats bytestrings using the 'encoding' codec.
If strings_only is True, don't convert (some) non-string-like objects. |
377,924 | def transfer_multiple(self, destinations,
priority=prio.NORMAL, payment_id=None, unlock_time=0,
relay=True):
return self._backend.transfer(
destinations,
priority,
payment_id,
unlock_time,
account=self.index,
relay=relay) | Sends a batch of transfers. Returns a list of resulting transactions.
:param destinations: a list of destination and amount pairs:
[(:class:`Address <monero.address.Address>`, `Decimal`), ...]
:param priority: transaction priority, implies fee. The priority can be a number
from 1 to 4 (unimportant, normal, elevated, priority) or a constant
from `monero.prio`.
:param payment_id: ID for the payment (must be None if
:class:`IntegratedAddress <monero.address.IntegratedAddress>`
is used as the destination)
:param unlock_time: the extra unlock delay
:param relay: if `True`, the wallet will relay the transaction(s) to the network
immediately; when `False`, it will only return the transaction(s)
so they might be broadcasted later
:rtype: list of :class:`Transaction <monero.transaction.Transaction>` |
377,925 | def OnShiftVideo(self, event):
length = self.player.get_length()
time = self.player.get_time()
if event.GetWheelRotation() < 0:
target_time = max(0, time-length/100.0)
elif event.GetWheelRotation() > 0:
target_time = min(length, time+length/100.0)
self.player.set_time(int(target_time)) | Shifts through the video |
377,926 | def tob32(val):
ret = bytearray(4)
ret[0] = (val>>24)&M8
ret[1] = (val>>16)&M8
ret[2] = (val>>8)&M8
ret[3] = val&M8
return ret | Return provided 32 bit value as a string of four bytes. |
377,927 | def map(self, fn, *iterables, timeout=None, chunksize=1, prefetch=None):
if timeout is not None: end_time = timeout + time.time()
if prefetch is None: prefetch = self._max_workers
if prefetch < 0: raise ValueError("prefetch count may not be negative")
argsiter = zip(*iterables)
fs = collections.deque(self.submit(fn, *args) for args in itertools.islice(argsiter, self._max_workers+prefetch))
def result_iterator():
nonlocal argsiter
try:
while fs:
res = fs[0].result() if timeout is None else fs[0].result(end_time-time.time())
del fs[0]
if argsiter:
try:
args = next(argsiter)
except StopIteration:
argsiter = None
else:
fs.append(self.submit(fn, *args))
yield res
finally:
for future in fs: future.cancel()
return result_iterator() | Collects iterables lazily, rather than immediately.
Docstring same as parent: https://docs.python.org/3/library/concurrent.futures.html#concurrent.futures.Executor
Implmentation taken from this PR: https://github.com/python/cpython/pull/707 |
377,928 | def loadInstance(self):
if self._loaded:
return
self._loaded = True
module_path = self.modulePath()
package = projex.packageFromPath(module_path)
path = os.path.normpath(projex.packageRootPath(module_path))
if path in sys.path:
sys.path.remove(path)
sys.path.insert(0, path)
try:
__import__(package)
except Exception, e:
err = Plugin(self.name(), self.version())
err.setError(e)
err.setFilepath(module_path)
self._instance = err
self.setError(e)
msg = "%s.plugin() errored loading instance from %s"
opts = (self.proxyClass().__name__, self.name(), module_path)
logger.warning(msg % opts)
logger.error(e) | Loads the plugin from the proxy information that was created from the
registry file. |
377,929 | def read_excel_file(inputfile, sheet_name):
workbook = xlrd.open_workbook(inputfile)
output = []
found = False
for sheet in workbook.sheets():
if sheet.name == sheet_name:
found = True
for row in range(sheet.nrows):
values = []
for col in range(sheet.ncols):
values.append(sheet.cell(row, col).value)
output.append(values)
if not found:
raise MQ2Exception()
return output | Return a matrix containing all the information present in the
excel sheet of the specified excel document.
:arg inputfile: excel document to read
:arg sheetname: the name of the excel sheet to return |
377,930 | def _set_up_pool_config(self):
self._max_conns = self.settings_dict[].get(, pool_config_defaults[])
self._min_conns = self.settings_dict[].get(, self._max_conns)
self._test_on_borrow = self.settings_dict["OPTIONS"].get(,
pool_config_defaults[])
if self._test_on_borrow:
self._test_on_borrow_query = self.settings_dict["OPTIONS"].get(,
pool_config_defaults[])
else:
self._test_on_borrow_query = None | Helper to configure pool options during DatabaseWrapper initialization. |
377,931 | def _as_symbol(value, is_symbol_value=True):
try:
return value.as_symbol()
except AttributeError:
assert isinstance(value, SymbolToken)
if not is_symbol_value:
try:
return value.regular_token()
except AttributeError:
pass
return value | Converts the input to a :class:`SymbolToken` suitable for being emitted as part of a :class:`IonEvent`.
If the input has an `as_symbol` method (e.g. :class:`CodePointArray`), it will be converted using that method.
Otherwise, it must already be a `SymbolToken`. In this case, there is nothing to do unless the input token is not a
symbol value and it is an :class:`_IVMToken`. This requires the `_IVMToken` to be converted to a regular
`SymbolToken`. |
377,932 | def i2c_slave_read(self):
data = array.array(, (0,) * self.BUFFER_SIZE)
status, addr, rx_len = api.py_aa_i2c_slave_read_ext(self.handle,
self.BUFFER_SIZE, data)
_raise_i2c_status_code_error_if_failure(status)
if addr == 0x80:
addr = 0x00
del data[rx_len:]
return (addr, bytes(data)) | Read the bytes from an I2C slave reception.
The bytes are returned as a string object. |
377,933 | def _get_pdf_filenames_at(source_directory):
if not os.path.isdir(source_directory):
raise ValueError("%s is not a directory!" % source_directory)
return [os.path.join(source_directory, filename)
for filename in os.listdir(source_directory)
if filename.endswith(PDF_EXTENSION)] | Find all PDF files in the specified directory.
Args:
source_directory (str): The source directory.
Returns:
list(str): Filepaths to all PDF files in the specified directory.
Raises:
ValueError |
377,934 | def parse_changes(json):
changes = []
dates = len(json)
for date in range(1, dates):
last_close = json[date - 1][]
now_close = json[date][]
changes.append(now_close - last_close)
logger.debug(.format(changes))
return changes | Gets price changes from JSON
Args:
json: JSON data as a list of dict dates, where the keys are
the raw market statistics.
Returns:
List of floats of price changes between entries in JSON. |
377,935 | def find_by_task(self, task, params={}, **options):
path = "/tasks/%s/attachments" % (task)
return self.client.get_collection(path, params, **options) | Returns the compact records for all attachments on the task.
Parameters
----------
task : {Id} Globally unique identifier for the task.
[params] : {Object} Parameters for the request |
377,936 | def getionimage(p, mz_value, tol=0.1, z=1, reduce_func=sum):
tol = abs(tol)
im = np.zeros((p.imzmldict["max count of pixels y"], p.imzmldict["max count of pixels x"]))
for i, (x, y, z_) in enumerate(p.coordinates):
if z_ == 0:
UserWarning("z coordinate = 0 present, if you're getting blank images set getionimage(.., .., z=0)")
if z_ == z:
mzs, ints = map(lambda x: np.asarray(x), p.getspectrum(i))
min_i, max_i = _bisect_spectrum(mzs, mz_value, tol)
im[y - 1, x - 1] = reduce_func(ints[min_i:max_i+1])
return im | Get an image representation of the intensity distribution
of the ion with specified m/z value.
By default, the intensity values within the tolerance region are summed.
:param p:
the ImzMLParser (or anything else with similar attributes) for the desired dataset
:param mz_value:
m/z value for which the ion image shall be returned
:param tol:
Absolute tolerance for the m/z value, such that all ions with values
mz_value-|tol| <= x <= mz_value+|tol| are included. Defaults to 0.1
:param z:
z Value if spectrogram is 3-dimensional.
:param reduce_func:
the bahaviour for reducing the intensities between mz_value-|tol| and mz_value+|tol| to a single value. Must
be a function that takes a sequence as input and outputs a number. By default, the values are summed.
:return:
numpy matrix with each element representing the ion intensity in this
pixel. Can be easily plotted with matplotlib |
377,937 | def dispatch(self, *args, **kwargs) -> Awaitable[bool]:
event = self.event_class(self.source(), cast(str, self.topic), *args, **kwargs)
return self.dispatch_raw(event) | Create and dispatch an event.
This method constructs an event object and then passes it to :meth:`dispatch_event` for
the actual dispatching.
:param args: positional arguments to the constructor of the associated event class
:param kwargs: keyword arguments to the constructor of the associated event class
:returns: an awaitable that completes when all the callbacks have been called (and any
awaitables waited on) and resolves to ``True`` if there were no exceptions raised by
the callbacks, ``False`` otherwise |
377,938 | def load(self, config, file_object, prefer=None):
return self.loads(config, file_object.read(), prefer=prefer) | An abstract method that loads from a given file object.
:param class config: The config class to load into
:param file file_object: The file object to load from
:param str prefer: The preferred serialization module name
:returns: A dictionary converted from the content of the given file object
:rtype: dict |
377,939 | def func_load(code, defaults=None, closure=None, globs=None):
if isinstance(code, (tuple, list)):
code, defaults, closure = code
code = marshal.loads(code.encode())
if globs is None:
globs = globals()
return python_types.FunctionType(code, globs,
name=code.co_name,
argdefs=defaults,
closure=closure) | Deserialize user defined function. |
377,940 | def get_context(self, template):
context = {}
for regex, context_generator in self.contexts:
if re.match(regex, template.name):
if inspect.isfunction(context_generator):
if _has_argument(context_generator):
context.update(context_generator(template))
else:
context.update(context_generator())
else:
context.update(context_generator)
if not self.mergecontexts:
break
return context | Get the context for a template.
If no matching value is found, an empty context is returned.
Otherwise, this returns either the matching value if the value is
dictionary-like or the dictionary returned by calling it with
*template* if the value is a function.
If several matching values are found, the resulting dictionaries will
be merged before being returned if mergecontexts is True. Otherwise,
only the first matching value is returned.
:param template: the template to get the context for |
377,941 | async def get_target(config, url):
previous = config.cache.get(
, url, schema_version=SCHEMA_VERSION) if config.cache else None
headers = previous.caching if previous else None
request = await utils.retry_get(config, url, headers=headers)
if not request or not request.success:
return previous
if request.cached:
return previous
current = Target(request)
if config.cache:
config.cache.set(, url, current)
return current | Given a URL, get the webmention endpoint |
377,942 | def call(self, method, args={}, retry=False, retry_policy=None,
ticket=None, **props):
ticket = ticket or uuid()
reply_q = self.get_reply_queue(ticket)
self.cast(method, args, declare=[reply_q], reply_to=ticket, **props)
return self.AsyncResult(ticket, self) | Send message to the same actor and return :class:`AsyncResult`. |
377,943 | def _sift_and_init_configs(self, input_dict):
configs = {}
for k, v in iteritems(input_dict):
if (k not in map(str.lower, self.format_order) and
any([f_order.lower() in k for f_order in self.format_order])):
try:
self.CFG.get(self.CONFIG_PATH + [k])
except errors.ResourceNotFoundError:
pass
finally:
configs[k] = v
for key, val in iteritems(configs):
input_dict.pop(key, None)
if configs:
self.initialize_overall_config_settings(input_dict=configs) | Removes all key/v for keys that exist in the overall config and activates them.
Used to weed out config keys from tokens in a given input. |
377,944 | def chunk(seq: ActualIterable[T]) -> ActualIterable[ActualIterable[T]]:
seq = iter(seq)
try:
head = next(seq)
except StopIteration:
return iter(seq)
current_status = head
group = [head]
for each in seq:
status = each
if status != current_status:
yield group
group = [each]
else:
group.append(each)
current_status = status
if group:
yield group | >>> from Redy.Collections import Traversal, Flow
>>> x = [1, 1, 2]
>>> assert Flow(x)[Traversal.chunk][list].unbox == [[1, 1], [2]]
>>> assert Flow([])[Traversal.chunk][list].unbox == [] |
377,945 | def plotting_context(context=None, font_scale=1, rc=None):
if context is None:
context_dict = {k: mpl.rcParams[k] for k in _context_keys}
elif isinstance(context, dict):
context_dict = context
else:
contexts = ["paper", "notebook", "talk", "poster"]
if context not in contexts:
raise ValueError("context must be in %s" % ", ".join(contexts))
base_context = {
"figure.figsize": np.array([8, 5.5]),
"font.size": 12,
"axes.labelsize": 11,
"axes.titlesize": 12,
"xtick.labelsize": 10,
"ytick.labelsize": 10,
"legend.fontsize": 10,
"grid.linewidth": 1,
"lines.linewidth": 1.75,
"patch.linewidth": .3,
"lines.markersize": 7,
"lines.markeredgewidth": 0,
"xtick.major.width": 1,
"ytick.major.width": 1,
"xtick.minor.width": .5,
"ytick.minor.width": .5,
"xtick.major.pad": 7,
"ytick.major.pad": 7,
}
scaling = dict(paper=.8, notebook=1, talk=1.3, poster=1.6)[context]
context_dict = {k: v * scaling for k, v in base_context.items()}
font_keys = ["axes.labelsize", "axes.titlesize", "legend.fontsize",
"xtick.labelsize", "ytick.labelsize", "font.size"]
font_dict = {k: context_dict[k] * font_scale for k in font_keys}
context_dict.update(font_dict)
if mpl.__version__ == "1.4.2":
context_dict["lines.markeredgewidth"] = 0.01
if rc is not None:
rc = {k: v for k, v in rc.items() if k in _context_keys}
context_dict.update(rc)
context_object = _PlottingContext(context_dict)
return context_object | Return a parameter dict to scale elements of the figure.
This affects things like the size of the labels, lines, and other
elements of the plot, but not the overall style. The base context
is "notebook", and the other contexts are "paper", "talk", and "poster",
which are version of the notebook parameters scaled by .8, 1.3, and 1.6,
respectively.
This function returns an object that can be used in a ``with`` statement
to temporarily change the context parameters.
Parameters
----------
context : dict, None, or one of {paper, notebook, talk, poster}
A dictionary of parameters or the name of a preconfigured set.
font_scale : float, optional
Separate scaling factor to independently scale the size of the
font elements.
rc : dict, optional
Parameter mappings to override the values in the preset seaborn
context dictionaries. This only updates parameters that are
considered part of the context definition.
Examples
--------
>>> c = plotting_context("poster")
>>> c = plotting_context("notebook", font_scale=1.5)
>>> c = plotting_context("talk", rc={"lines.linewidth": 2})
>>> import matplotlib.pyplot as plt
>>> with plotting_context("paper"):
... f, ax = plt.subplots()
... ax.plot(x, y) # doctest: +SKIP
See Also
--------
set_context : set the matplotlib parameters to scale plot elements
axes_style : return a dict of parameters defining a figure style
color_palette : define the color palette for a plot |
377,946 | def get_namespace_statistics(self, namespace, start_offset, end_offset):
cursor = self.cursor
cursor.execute(
,
(namespace, start_offset, end_offset))
return [long(count or 0) for count in cursor.fetchone()] | Get namespace statistics for the period between start_offset and
end_offset (inclusive) |
377,947 | def init_account(self):
ghuser = self.api.me()
hook_token = ProviderToken.create_personal(
,
self.user_id,
scopes=[],
is_internal=True,
)
self.account.extra_data = dict(
id=ghuser.id,
login=ghuser.login,
name=ghuser.name,
tokens=dict(
webhook=hook_token.id,
),
repos=dict(),
last_sync=iso_utcnow(),
)
db.session.add(self.account)
self.sync(hooks=False) | Setup a new GitHub account. |
377,948 | def netspeed_by_name(self, hostname):
addr = self._gethostbyname(hostname)
return self.netspeed_by_addr(addr) | Returns NetSpeed name from hostname. Can be Unknown, Dial-up,
Cable, or Corporate.
:arg hostname: Hostname (e.g. example.com) |
377,949 | def from_array(array):
if array is None or not array:
return None
assert_type_or_raise(array, dict, parameter_name="array")
from pytgbot.api_types.sendable.reply_markup import KeyboardButton
data = {}
data[] = KeyboardButton.from_array_list(array.get(), list_level=2)
data[] = bool(array.get()) if array.get() is not None else None
data[] = bool(array.get()) if array.get() is not None else None
data[] = bool(array.get()) if array.get() is not None else None
instance = ReplyKeyboardMarkup(**data)
instance._raw = array
return instance | Deserialize a new ReplyKeyboardMarkup from a given dictionary.
:return: new ReplyKeyboardMarkup instance.
:rtype: ReplyKeyboardMarkup |
377,950 | def get_command(self, ctx, cmd_name):
cmd_name = self.MAP.get(cmd_name, cmd_name)
return super(AliasedGroup, self).get_command(ctx, cmd_name) | Map some aliases to their 'real' names. |
377,951 | def imagetransformer_sep_channels():
hparams = imagetransformer_base()
hparams.num_heads = 4
hparams.attention_key_channels = hparams.attention_value_channels = 0
hparams.hidden_size = 256
hparams.filter_size = 512
hparams.num_hidden_layers = 6
return hparams | separate rgb embeddings. |
377,952 | def _GenerateFleetspeakConfig(self, template_dir, rpm_build_dir):
source_config = os.path.join(
template_dir, "fleetspeak",
os.path.basename(
config.CONFIG.Get(
"ClientBuilder.fleetspeak_config_path", context=self.context)))
fleetspeak_service_dir = config.CONFIG.Get(
"ClientBuilder.fleetspeak_service_dir", context=self.context)
dest_config_dir = os.path.join(rpm_build_dir, fleetspeak_service_dir[1:])
utils.EnsureDirExists(dest_config_dir)
dest_config_path = os.path.join(
dest_config_dir,
config.CONFIG.Get(
"Client.fleetspeak_unsigned_config_fname", context=self.context))
self.GenerateFile(
input_filename=source_config, output_filename=dest_config_path) | Generates a Fleetspeak config for GRR. |
377,953 | def do_json_set_many(self, params):
if len(params.keys_values_types) % 3 != 0:
self.show_output()
return
for key, _, _ in grouper(params.keys_values_types, 3):
try:
Keys.validate(key)
except Keys.Bad as ex:
self.show_output(str(ex))
return
jstr, stat = self._zk.get(params.path)
try:
obj_src = json_deserialize(jstr)
except BadJSON:
self.show_output("Path %s has bad JSON.", params.path)
obj_dst = copy.deepcopy(obj_src)
for key, value, ptype in grouper(params.keys_values_types, 3):
try:
Keys.set(obj_dst, key, to_type(value, ptype))
except Keys.Missing as ex:
self.show_output("Path %s is missing key %s.", params.path, ex)
return
except ValueError:
self.show_output("Bad value_type")
return
self.set(params.path, json.dumps(obj_dst), version=stat.version) | \x1b[1mNAME\x1b[0m
json_set_many - like `json_set`, but for multiple key/value pairs
\x1b[1mSYNOPSIS\x1b[0m
json_set_many <path> <keys> <value> <value_type> <keys1> <value1> <value_type1> ...
\x1b[1mDESCRIPTION\x1b[0m
If the key exists and the value is different, the znode will be updated with the key set to its new value.
If the key does not exist, it'll be created and the znode will be updated with the serialized version of
the new object. The value's type will be determined by the value_type parameter.
This is an atomic operation, either all given keys are set in one ZK operation or none are.
\x1b[1mEXAMPLES\x1b[0m
> create /props '{"a": {"b": 4}}'
> json_cat /props
{
"a": {
"b": 4
}
}
> json_set_many /props a.b 5 int a.c.d true bool
> json_cat /props
{
"a": {
"c": {
"d": true
},
"b": 5
}
} |
377,954 | def main():
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file",
required=True,
help="input file",
type=str)
parser.add_argument("-l", "--locus",
required=True,
help="Locus",
type=str)
parser.add_argument("-k", "--kir",
help="Option for running with KIR",
action=)
parser.add_argument("-s", "--server",
help="Option for running with a server",
action=)
parser.add_argument("-v", "--verbose",
help="Option for running in verbose",
action=)
args = parser.parse_args()
fastafile = args.file
locus = args.locus
verbose = False
if args.verbose:
verbose = True
verbose = False
if args.verbose:
verbose = True
kir = False
if args.kir:
kir = True
serv = False
if args.server:
serv = True
if verbose:
logging.basicConfig(format=,
datefmt=,
level=logging.INFO)
server = None
if serv:
server = BioSeqDatabase.open_database(driver="pymysql", user="root",
passwd="", host="localhost",
db="bioseqdb")
seqann = BioSeqAnn(verbose=True, kir=kir)
for seq in SeqIO.parse(fastafile, "fasta"):
ann = seqann.annotate(seq, locus=locus)
print(.format("", str(seq.description), ""))
l = 0
for f in ann.annotation:
if isinstance(ann.annotation[f], DBSeq):
print(f, ann.method, str(ann.annotation[f]), sep="\t")
l += len(ann.annotation[f])
else:
print(f, ann.method, str(ann.annotation[f].seq), sep="\t")
l += len(ann.annotation[f].seq)
print("")
if serv:
server.close() | This is run if file is directly executed, but not if imported as
module. Having this in a separate function allows importing the file
into interactive python, and still able to execute the
function for testing |
377,955 | def version():
with open(os.path.join(, )) as input_file:
for line in input_file:
if line.startswith():
return ast.parse(line).body[0].value.s | Return version string. |
377,956 | def retract_project_bid(session, bid_id):
headers = {
:
}
bid_data = {
:
}
endpoint = .format(bid_id)
response = make_put_request(session, endpoint, headers=headers,
params_data=bid_data)
json_data = response.json()
if response.status_code == 200:
return json_data[]
else:
json_data = response.json()
raise BidNotRetractedException(message=json_data[],
error_code=json_data[],
request_id=json_data[]) | Retract a bid on a project |
377,957 | def custom_code(self, mask: str = ,
char: str = , digit: str = ) -> str:
char_code = ord(char)
digit_code = ord(digit)
code = bytearray(len(mask))
def random_int(a: int, b: int) -> int:
b = b - a
return int(self.random() * b) + a
_mask = mask.encode()
for i, p in enumerate(_mask):
if p == char_code:
a = random_int(65, 91)
elif p == digit_code:
a = random_int(48, 58)
else:
a = p
code[i] = a
return code.decode() | Generate custom code using ascii uppercase and random integers.
:param mask: Mask of code.
:param char: Placeholder for characters.
:param digit: Placeholder for digits.
:return: Custom code. |
377,958 | def process(self, event):
logger.info(f"{self}: put {event.src_path}")
self.queue.put(os.path.basename(event.src_path)) | Put and process tasks in queue. |
377,959 | def retrieveJsonResponseFromServer(url):
jsonData = None
try:
data = urllib.urlopen(url)
jsonData = simplejson.load(data)
except Exception as ex:
raise Sitools2Exception(ex)
return jsonData | Retrieves a JSON response from the server.
Input parameters
----------------
url : url to call for retrieving the JSON response
Return
------
A dictionary
Exception
---------
SITools2Exception when a problem during the download or parsing happens |
377,960 | def read_tcp(self, length):
if length is None:
length = len(self)
_srcp = self._read_unpack(2)
_dstp = self._read_unpack(2)
_seqn = self._read_unpack(4)
_ackn = self._read_unpack(4)
_lenf = self._read_binary(1)
_flag = self._read_binary(1)
_wins = self._read_unpack(2)
_csum = self._read_fileng(2)
_urgp = self._read_unpack(2)
tcp = dict(
srcport=_srcp,
dstport=_dstp,
seq=_seqn,
ack=_ackn,
hdr_len=int(_lenf[:4], base=2) * 4,
flags=dict(
ns=True if int(_lenf[7]) else False,
cwr=True if int(_flag[0]) else False,
ece=True if int(_flag[1]) else False,
urg=True if int(_flag[2]) else False,
ack=True if int(_flag[3]) else False,
psh=True if int(_flag[4]) else False,
rst=True if int(_flag[5]) else False,
syn=True if int(_flag[6]) else False,
fin=True if int(_flag[7]) else False,
),
window_size=_wins,
checksum=_csum,
urgent_pointer=_urgp,
)
self._syn = True if int(_flag[6]) else False
self._ack = True if int(_flag[3]) else False
_hlen = tcp[]
_optl = _hlen - 20
if _optl:
options = self._read_tcp_options(_optl)
tcp[] = options[0]
tcp.update(options[1])
length -= _hlen
tcp[] = self._read_packet(header=_hlen, payload=length)
return self._decode_next_layer(tcp, None, length) | Read Transmission Control Protocol (TCP).
Structure of TCP header [RFC 793]:
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Source Port | Destination Port |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Sequence Number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Acknowledgement Number |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Data | |U|A|P|R|S|F| |
| Offset| Reserved |R|C|S|S|Y|I| Window |
| | |G|K|H|T|N|N| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Checksum | Urgent Pointer |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Options | Padding |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| data |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
Octets Bits Name Description
0 0 tcp.srcport Source Port
2 16 tcp.dstport Destination Port
4 32 tcp.seq Sequence Number
8 64 tcp.ack Acknowledgement Number (if ACK set)
12 96 tcp.hdr_len Data Offset
12 100 - Reserved (must be zero)
12 103 tcp.flags.ns ECN Concealment Protection (NS)
13 104 tcp.flags.cwr Congestion Window Reduced (CWR)
13 105 tcp.flags.ece ECN-Echo (ECE)
13 106 tcp.flags.urg Urgent (URG)
13 107 tcp.flags.ack Acknowledgement (ACK)
13 108 tcp.flags.psh Push Function (PSH)
13 109 tcp.flags.rst Reset Connection (RST)
13 110 tcp.flags.syn Synchronize Sequence Numbers (SYN)
13 111 tcp.flags.fin Last Packet from Sender (FIN)
14 112 tcp.window_size Size of Receive Window
16 128 tcp.checksum Checksum
18 144 tcp.urgent_pointer Urgent Pointer (if URG set)
20 160 tcp.opt TCP Options (if data offset > 5) |
377,961 | def build_schema(self, fields):
content_field_name =
schema_fields = [
{: ID,
: ,
: ,
: 0},
{: DJANGO_ID,
: ,
: ,
: 1},
{: DJANGO_CT,
: ,
: ,
: 2},
]
self._columns[ID] = 0
self._columns[DJANGO_ID] = 1
self._columns[DJANGO_CT] = 2
column = len(schema_fields)
for field_name, field_class in sorted(list(fields.items()), key=lambda n: n[0]):
if field_class.document is True:
content_field_name = field_class.index_fieldname
if field_class.indexed is True:
field_data = {
: field_class.index_fieldname,
: ,
: ,
: column,
}
if field_class.field_type == :
field_data[] =
elif field_class.field_type == :
field_data[] =
elif field_class.field_type == :
field_data[] =
elif field_class.field_type == :
field_data[] =
elif field_class.field_type == :
field_data[] =
elif field_class.field_type == :
field_data[] =
elif field_class.field_type == :
field_data[] =
if field_class.is_multivalued:
field_data[] =
schema_fields.append(field_data)
self._columns[field_data[]] = column
column += 1
return content_field_name, schema_fields | Build the schema from fields.
:param fields: A list of fields in the index
:returns: list of dictionaries
Each dictionary has the keys
field_name: The name of the field index
type: what type of value it is
'multi_valued': if it allows more than one value
'column': a number identifying it
'type': the type of the field
'multi_valued': 'false', 'column': 0} |
377,962 | def reflection_matrix_pow(reflection_matrix: np.ndarray, exponent: float):
squared_phase = np.dot(reflection_matrix[:, 0],
reflection_matrix[0, :])
phase = complex(np.sqrt(squared_phase))
i = np.eye(reflection_matrix.shape[0]) * phase
pos_part = (i + reflection_matrix) * 0.5
neg_part = (i - reflection_matrix) * 0.5
pos_factor = phase**(exponent - 1)
neg_factor = pos_factor * complex(-1)**exponent
pos_part_raised = pos_factor * pos_part
neg_part_raised = neg_part * neg_factor
return pos_part_raised + neg_part_raised | Raises a matrix with two opposing eigenvalues to a power.
Args:
reflection_matrix: The matrix to raise to a power.
exponent: The power to raise the matrix to.
Returns:
The given matrix raised to the given power. |
377,963 | def get_app(self, app_id, embed_tasks=False, embed_counts=False,
embed_deployments=False, embed_readiness=False,
embed_last_task_failure=False, embed_failures=False,
embed_task_stats=False):
params = {}
embed_params = {
: embed_tasks,
: embed_counts,
: embed_deployments,
: embed_readiness,
: embed_last_task_failure,
: embed_failures,
: embed_task_stats
}
filtered_embed_params = [k for (k, v) in embed_params.items() if v]
if filtered_embed_params:
params[] = filtered_embed_params
response = self._do_request(
, .format(app_id=app_id), params=params)
return self._parse_response(response, MarathonApp, resource_name=) | Get a single app.
:param str app_id: application ID
:param bool embed_tasks: embed tasks in result
:param bool embed_counts: embed all task counts
:param bool embed_deployments: embed all deployment identifier
:param bool embed_readiness: embed all readiness check results
:param bool embed_last_task_failure: embeds the last task failure
:param bool embed_failures: shorthand for embed_last_task_failure
:param bool embed_task_stats: embed task stats in result
:returns: application
:rtype: :class:`marathon.models.app.MarathonApp` |
377,964 | def disassemble(co, lasti=-1):
out = StringIO()
code = co.co_code
labels = dis.findlabels(code)
linestarts = dict(dis.findlinestarts(co))
n = len(code)
i = 0
extended_arg = 0
free = None
while i < n:
c = code[i]
op = ord(c)
if i in linestarts:
if i > 0:
print(end=u, file=out)
print(u % linestarts[i], end=u, file=out)
else:
print(u, end=u, file=out)
if i == lasti:
print(u, end=u, file=out)
else:
print(u, end=u, file=out)
if i in labels:
print(u, end=u, file=out)
else:
print(u, end=u, file=out)
print(u % i, end=u, file=out)
print(u % dis.opname[op], end=u, file=out)
i = i + 1
if op >= dis.HAVE_ARGUMENT:
oparg = ord(code[i]) + ord(code[i + 1]) * 256 + extended_arg
extended_arg = 0
i = i + 2
if op == dis.EXTENDED_ARG:
extended_arg = oparg * 65536
print(u % oparg, end=u, file=out)
if op in dis.hasconst:
print(u % co.co_consts[oparg], end=u, file=out)
elif op in dis.hasname:
print(u % co.co_names[oparg], end=u, file=out)
elif op in dis.hasjrel:
print(u % (i + oparg), end=u, file=out)
elif op in dis.haslocal:
print(u % co.co_varnames[oparg], end=u, file=out)
elif op in dis.hascompare:
print(u % dis.cmp_op[oparg], end=u, file=out)
elif op in dis.hasfree:
if free is None:
free = co.co_cellvars + co.co_freevars
print(u % free[oparg], end=u, file=out)
print(end=u, file=out)
return out.getvalue() | Disassemble a code object. |
377,965 | def log_y_cb(self, w, val):
self.tab_plot.logy = val
self.plot_two_columns() | Toggle linear/log scale for Y-axis. |
377,966 | def changelist_view(self, request, extra_context=None):
if extra_context is None:
extra_context = {}
response = self.adv_filters_handle(request,
extra_context=extra_context)
if response:
return response
return super(AdminAdvancedFiltersMixin, self
).changelist_view(request, extra_context=extra_context) | Add advanced_filters form to changelist context |
377,967 | def _encode(s, encoding=None, errors=None):
if encoding is None:
encoding = ENCODING
if errors is None:
errors = ENCODING_ERRORS
return s.encode(encoding, errors) if isinstance(s, unicode) else s | Encodes *s*. |
377,968 | def ExtractConfig(self):
logging.info("Extracting config file from .pkg.")
pkg_path = os.environ.get("PACKAGE_PATH", None)
if pkg_path is None:
logging.error("Could not locate package, giving up.")
return
zf = zipfile.ZipFile(pkg_path, mode="r")
fd = zf.open("config.yaml")
install_dir = os.path.dirname(config.CONFIG.parser.filename)
installer_config = os.path.join(install_dir, "installer_config.yaml")
with open(installer_config, "wb") as f:
f.write(fd.read())
packaged_config = config.CONFIG.MakeNewConfig()
packaged_config.Initialize(
filename=installer_config, parser=config_lib.YamlParser)
new_config = config.CONFIG.MakeNewConfig()
new_config.SetWriteBack(config.CONFIG["Config.writeback"])
for info in config.CONFIG.type_infos:
try:
new_value = packaged_config.GetRaw(info.name, None)
except type_info.TypeValueError:
continue
try:
old_value = config.CONFIG.GetRaw(info.name, None)
if not new_value or new_value == old_value:
continue
except type_info.TypeValueError:
pass
new_config.SetRaw(info.name, new_value)
new_config.Write()
logging.info("Config file extracted successfully.")
logging.info("Extracting additional files.")
for zinfo in zf.filelist:
basename = os.path.basename(zinfo.filename)
if basename != "config.yaml":
with open(os.path.join(install_dir, basename), "wb") as f:
f.write(zf.open(zinfo.filename).read()) | This installer extracts a config file from the .pkg file. |
377,969 | def get_params(self):
value = self._get_lookup(self.operator, self.value)
self.params.append(self.value)
return self.params | returns a list |
377,970 | def get_instance(self, payload):
return StreamMessageInstance(
self._version,
payload,
service_sid=self._solution[],
stream_sid=self._solution[],
) | Build an instance of StreamMessageInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageInstance
:rtype: twilio.rest.sync.v1.service.sync_stream.stream_message.StreamMessageInstance |
377,971 | def update_webhook(self, webhook, name=None, metadata=None):
return self.manager.update_webhook(self.scaling_group, policy=self,
webhook=webhook, name=name, metadata=metadata) | Updates the specified webhook. One or more of the parameters may be
specified. |
377,972 | def create(self, name, targetUrl, resource, event,
filter=None, secret=None, **request_parameters):
check_type(name, basestring, may_be_none=False)
check_type(targetUrl, basestring, may_be_none=False)
check_type(resource, basestring, may_be_none=False)
check_type(event, basestring, may_be_none=False)
check_type(filter, basestring)
check_type(secret, basestring)
post_data = dict_from_items_with_values(
request_parameters,
name=name,
targetUrl=targetUrl,
resource=resource,
event=event,
filter=filter,
secret=secret,
)
json_data = self._session.post(API_ENDPOINT, json=post_data)
return self._object_factory(OBJECT_TYPE, json_data) | Create a webhook.
Args:
name(basestring): A user-friendly name for this webhook.
targetUrl(basestring): The URL that receives POST requests for
each event.
resource(basestring): The resource type for the webhook.
event(basestring): The event type for the webhook.
filter(basestring): The filter that defines the webhook scope.
secret(basestring): The secret used to generate payload signature.
**request_parameters: Additional request parameters (provides
support for parameters that may be added in the future).
Returns:
Webhook: A Webhook object with the details of the created webhook.
Raises:
TypeError: If the parameter types are incorrect.
ApiError: If the Webex Teams cloud returns an error. |
377,973 | def main():
FIG = {}
FIG[] = 1
dir_path =
plot, fmt = 0,
units = ,
XLP = []
norm = 1
LP = "LP-IRM"
if len(sys.argv) > 1:
if in sys.argv:
print(main.__doc__)
sys.exit()
data_model = int(pmag.get_named_arg("-DM", 3))
if in sys.argv:
norm = 0
if in sys.argv:
plot = 1
if in sys.argv:
ind = sys.argv.index("-fmt")
fmt = sys.argv[ind + 1]
if data_model == 3:
in_file = pmag.get_named_arg("-f", )
else:
in_file = pmag.get_named_arg("-f", )
if in sys.argv:
ind = sys.argv.index()
dir_path = sys.argv[ind + 1]
dir_path = os.path.realpath(dir_path)
in_file = pmag.resolve_file_name(in_file, dir_path)
if not in sys.argv:
dir_path = os.path.split(in_file)[0]
plot_by = pmag.get_named_arg("-obj", "loc")
if data_model == 3:
plot_key =
if plot_by == :
plot_key =
if plot_by == :
plot_key =
if plot_by == :
plot_key =
else:
plot_key =
if plot_by == :
plot_key =
if plot_by == :
plot_key =
if plot_by == :
plot_key =
if data_model == 3:
dmag_key =
else:
dmag_key =
if data_model == 3 and plot_key != :
print(.format(plot_key))
print()
print(.format(dir_path))
con = cb.Contribution(dir_path)
meas_df = con.propagate_location_to_measurements()
if meas_df is None:
print(.format(dir_path))
return
if plot_key not in meas_df.columns:
print()
print()
return
else:
print(.format(plot_key))
data = con.tables[].convert_to_pmag_data_list()
file_type = "measurements"
else:
data, file_type = pmag.magic_read(in_file)
sids = pmag.get_specs(data)
pmagplotlib.plot_init(FIG[], 6, 6)
plotlist = []
if data_model == 3:
intlist = [, , , ]
else:
intlist = [, ,
, ]
IntMeths = []
if data_model == 3:
data = pmag.get_dictitem(data, , LP, )
else:
data = pmag.get_dictitem(data, , LP, )
Ints = {}
NoInts, int_key = 1, ""
for key in intlist:
Ints[key] = pmag.get_dictitem(data, key, , )
if len(Ints[key]) > 0:
NoInts = 0
if int_key == "":
int_key = key
if NoInts == 1:
print()
sys.exit()
for rec in Ints[int_key]:
if rec[plot_key] not in plotlist:
plotlist.append(rec[plot_key])
plotlist.sort()
for plt in plotlist:
print(plt)
INTblock = []
data = pmag.get_dictitem(Ints[int_key], plot_key, plt, )
sids = pmag.get_specs(data)
if len(sids) > 0:
title = data[0][plot_key]
for s in sids:
INTblock = []
if data_model == 3:
sdata = pmag.get_dictitem(data, , s, )
else:
sdata = pmag.get_dictitem(data, , s, )
for rec in sdata:
INTblock.append([float(rec[dmag_key]), 0, 0,
float(rec[int_key]), 1, ])
pmagplotlib.plot_mag(FIG[], INTblock, title, 0, units, norm)
files = {}
for key in list(FIG.keys()):
files[key] = title + + LP + + fmt
if plot == 0:
pmagplotlib.draw_figs(FIG)
ans = input(" S[a]ve to save plot, [q]uit, Return to continue: ")
if ans == :
sys.exit()
if ans == "a":
pmagplotlib.save_plots(FIG, files)
if plt != plotlist[-1]: | NAME
irmaq_magic.py
DESCRIPTION
plots IRM acquisition curves from measurements file
SYNTAX
irmaq_magic [command line options]
INPUT
takes magic formatted magic_measurements.txt files
OPTIONS
-h prints help message and quits
-f FILE: specify input file, default is: magic_measurements.txt/measurements.txt
-obj OBJ: specify object [loc, sit, sam, spc] for plot, default is by location
-N ; do not normalize by last point - use original units
-fmt [png,jpg,eps,pdf] set plot file format [default is svg]
-sav save plot[s] and quit
-DM MagIC data model number, default is 3
NOTE
loc: location (study); sit: site; sam: sample; spc: specimen |
377,974 | def fix_config(self, options):
options = super(Trigger, self).fix_config(options)
opt = "condition"
if opt not in options:
options[opt] = "True"
if opt not in self.help:
self.help[opt] = "The (optional) condition for teeing off the tokens; uses the method, "\
"ie the expression must evaluate to a boolean value; storage values placeholders "\
" get replaced with their string representations before evaluating the "\
"expression (string)."
return options | Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict |
377,975 | def add_children(self, *children, **kwargs):
for child in children:
self.add_child(child, **kwargs) | Conveniience function: Adds objects as children in the scene graph. |
377,976 | def saveSettings(self, settings):
profile = self.saveProfile()
key = self.objectName()
settings.setValue( % key, wrapVariant(profile.toString()))
for viewType in self.viewTypes():
viewType.saveGlobalSettings(settings) | Records the current structure of the view widget to the inputed \
settings instance.
:param settings | <QSettings> |
377,977 | def get_requirements(lookup=None):
if lookup == None:
lookup = get_lookup()
install_requires = []
for module in lookup[]:
module_name = module[0]
module_meta = module[1]
if "exact_version" in module_meta:
dependency = "%s==%s" %(module_name,module_meta[])
elif "min_version" in module_meta:
if module_meta[] == None:
dependency = module_name
else:
dependency = "%s>=%s" %(module_name,module_meta[])
install_requires.append(dependency)
return install_requires | get_requirements reads in requirements and versions from
the lookup obtained with get_lookup |
377,978 | def _set_exception(self):
assert not self.ready()
self._data = sys.exc_info()
self._success = False
self._event.set()
if self._collector is not None:
self._collector.notify_ready(self) | Called by a Job object to tell that an exception occured
during the processing of the function. The object will become
ready but not successful. The collector's notify_ready()
method will be called, but NOT the callback method |
377,979 | def load_sbml(filename):
import libsbml
document = libsbml.readSBML(filename)
document.validateSBML()
num_errors = (document.getNumErrors(libsbml.LIBSBML_SEV_ERROR)
+ document.getNumErrors(libsbml.LIBSBML_SEV_FATAL))
if num_errors > 0:
messages = "The generated document is not valid."
messages += " {} errors were found:\n".format(num_errors)
for i in range(document.getNumErrors(libsbml.LIBSBML_SEV_ERROR)):
err = document.getErrorWithSeverity(i, libsbml.LIBSBML_SEV_ERROR)
messages += "{}: {}\n".format(err.getSeverityAsString(), err.getShortMessage())
for i in range(document.getNumErrors(libsbml.LIBSBML_SEV_FATAL)):
err = document.getErrorWithSeverity(i, libsbml.LIBSBML_SEV_FATAL)
messages += "{}: {}\n".format(err.getSeverityAsString(), err.getShortMessage())
raise RuntimeError(messages)
return import_sbml(document) | Load a model from a SBML file.
Parameters
----------
filename : str
The input SBML filename.
Returns
-------
model : NetworkModel
y0 : dict
Initial condition.
volume : Real or Real3, optional
A size of the simulation volume. |
377,980 | def fdr(p, q=.05):
s = np.sort(p)
nvox = p.shape[0]
null = np.array(range(1, nvox + 1), dtype=) * q / nvox
below = np.where(s <= null)[0]
return s[max(below)] if len(below) else -1 | Determine FDR threshold given a p value array and desired false
discovery rate q. |
377,981 | def send(self, request, ordered=False):
if not self._user_connected:
raise ConnectionError()
if not utils.is_list_like(request):
state = RequestState(request, self._loop)
self._send_queue.append(state)
return state.future
else:
states = []
futures = []
state = None
for req in request:
state = RequestState(req, self._loop, after=ordered and state)
states.append(state)
futures.append(state.future)
self._send_queue.extend(states)
return futures | This method enqueues the given request to be sent. Its send
state will be saved until a response arrives, and a ``Future``
that will be resolved when the response arrives will be returned:
.. code-block:: python
async def method():
# Sending (enqueued for the send loop)
future = sender.send(request)
# Receiving (waits for the receive loop to read the result)
result = await future
Designed like this because Telegram may send the response at
any point, and it can send other items while one waits for it.
Once the response for this future arrives, it is set with the
received result, quite similar to how a ``receive()`` call
would otherwise work.
Since the receiving part is "built in" the future, it's
impossible to await receive a result that was never sent. |
377,982 | def frequent_signups(self):
key = "{}:frequent_signups".format(self.username)
cached = cache.get(key)
if cached:
return cached
freq_signups = self.eighthsignup_set.exclude(scheduled_activity__activity__administrative=True).exclude(
scheduled_activity__activity__special=True).exclude(scheduled_activity__activity__restricted=True).exclude(
scheduled_activity__activity__deleted=True).values().annotate(
count=Count()).filter(count__gte=settings.SIMILAR_THRESHOLD).order_by()
cache.set(key, freq_signups, timeout=60 * 60 * 24 * 7)
return freq_signups | Return a QuerySet of activity id's and counts for the activities that a given user
has signed up for more than `settings.SIMILAR_THRESHOLD` times |
377,983 | def set_prompt(self, prompt_command="", position=0):
self.description_docs = u.format(prompt_command)
self.cli.current_buffer.reset(
initial_document=Document(
self.description_docs,
cursor_position=position))
self.cli.request_redraw() | writes the prompt line |
377,984 | def scan_in_memory(node, env, path=()):
try:
entries = node.entries
except AttributeError:
return []
entry_list = sorted(filter(do_not_scan, list(entries.keys())))
return [entries[n] for n in entry_list] | "Scans" a Node.FS.Dir for its in-memory entries. |
377,985 | def symmetrize_compact_force_constants(force_constants,
primitive,
level=1):
s2p_map = primitive.get_supercell_to_primitive_map()
p2s_map = primitive.get_primitive_to_supercell_map()
p2p_map = primitive.get_primitive_to_primitive_map()
permutations = primitive.get_atomic_permutations()
s2pp_map, nsym_list = get_nsym_list_and_s2pp(s2p_map,
p2p_map,
permutations)
try:
import phonopy._phonopy as phonoc
phonoc.perm_trans_symmetrize_compact_fc(force_constants,
permutations,
s2pp_map,
p2s_map,
nsym_list,
level)
except ImportError:
text = ("Import error at phonoc.perm_trans_symmetrize_compact_fc. "
"Corresponding pytono code is not implemented.")
raise RuntimeError(text) | Symmetry force constants by translational and permutation symmetries
Parameters
----------
force_constants: ndarray
Compact force constants. Symmetrized force constants are overwritten.
dtype=double
shape=(n_patom,n_satom,3,3)
primitive: Primitive
Primitive cell
level: int
Controls the number of times the following steps repeated:
1) Subtract drift force constants along row and column
2) Average fc and fc.T |
377,986 | def marvcli_comment_list(datasets):
app = create_app()
ids = parse_setids(datasets, dbids=True)
comments = db.session.query(Comment)\
.options(db.joinedload(Comment.dataset))\
.filter(Comment.dataset_id.in_(ids))
for comment in sorted(comments, key=lambda x: (x.dataset._setid, x.id)):
print(comment.dataset.setid, comment.id,
datetime.datetime.fromtimestamp(int(comment.time_added / 1000)),
comment.author, repr(comment.text)) | Lists comments for datasets.
Output: setid comment_id date time author message |
377,987 | def add_membership(self, subject_descriptor, container_descriptor):
route_values = {}
if subject_descriptor is not None:
route_values[] = self._serialize.url(, subject_descriptor, )
if container_descriptor is not None:
route_values[] = self._serialize.url(, container_descriptor, )
response = self._send(http_method=,
location_id=,
version=,
route_values=route_values)
return self._deserialize(, response) | AddMembership.
[Preview API] Create a new membership between a container and subject.
:param str subject_descriptor: A descriptor to a group or user that can be the child subject in the relationship.
:param str container_descriptor: A descriptor to a group that can be the container in the relationship.
:rtype: :class:`<GraphMembership> <azure.devops.v5_1.graph.models.GraphMembership>` |
377,988 | def fftconv(a, b, axes=(0, 1)):
if np.isrealobj(a) and np.isrealobj(b):
fft = rfftn
ifft = irfftn
else:
fft = fftn
ifft = ifftn
dims = np.maximum([a.shape[i] for i in axes], [b.shape[i] for i in axes])
af = fft(a, dims, axes)
bf = fft(b, dims, axes)
return ifft(af * bf, dims, axes) | Compute a multi-dimensional convolution via the Discrete Fourier
Transform. Note that the output has a phase shift relative to the
output of :func:`scipy.ndimage.convolve` with the default ``origin``
parameter.
Parameters
----------
a : array_like
Input array
b : array_like
Input array
axes : sequence of ints, optional (default (0, 1))
Axes on which to perform convolution
Returns
-------
ab : ndarray
Convolution of input arrays, a and b, along specified axes |
377,989 | def mendelian_check(tp1, tp2, tpp, is_xlinked=False):
call_to_ints = lambda x: tuple(int(_) for _ in x.split("|") if _ != ".")
tp1_sex, tp1_call = tp1[:2]
tp2_sex, tp2_call = tp2[:2]
tpp_sex, tpp_call = tpp[:2]
tp1_call = call_to_ints(tp1_call)
tp2_call = call_to_ints(tp2_call)
tpp_call = call_to_ints(tpp_call)
possible_progenies = set(tuple(sorted(x)) \
for x in product(tp1_call, tp2_call))
if is_xlinked and tpp_sex == "Male":
possible_progenies = set(tuple((x,)) for x in tp1_call)
if -1 in tp1_call or -1 in tp2_call or -1 in tpp_call:
tag = "Missing"
else:
tag = "Correct" if tpp_call in possible_progenies else "Error"
return tag | Compare TRED calls for Parent1, Parent2 and Proband. |
377,990 | def __set_components(self, requisite=True):
components = self.__components_manager.list_components()
candidate_components = \
getattr(set(components), "intersection" if requisite else "difference")(self.__requisite_components)
deactivated_components = self.__settings.get_key("Settings", "deactivated_components").toString().split(",")
candidate_components = \
sorted(filter(lambda x: x not in deactivated_components, candidate_components), key=(components).index)
for component in candidate_components:
try:
profile = self.__components_manager.components[component]
interface = self.__components_manager.get_interface(component)
setattr(self,
"_{0}__{1}".format(self.__class__.__name__, foundations.namespace.get_leaf(component, ".")),
interface)
self.__splashscreen and self.__splashscreen.show_message(
"{0} - {1} | Activating {2}.".format(self.__class__.__name__, Constants.version, component))
interface.activate(self)
if profile.category in ("Default", "QObject"):
interface.initialize()
elif profile.category == "QWidget":
interface.add_widget()
interface.initialize_ui()
except Exception as error:
if requisite:
message = " Component failed to activate!\nException raised: {1}"
handler = umbra.reporter.system_exit_exception_handler
else:
message = " Component failed to activate, unexpected behavior may occur!\nException raised: {1}"
handler = umbra.reporter.base_exception_handler
exception = manager.exceptions.ComponentActivationError(message.format(component, error))
handler(exception) | Sets the Components.
:param requisite: Set only requisite Components.
:type requisite: bool |
377,991 | def from_lambda(cls, name, lambda_):
if PY2:
a = inspect.getargspec(lambda_)
varargs, varkw, defaults, kwonlyargs = (
a.varargs, a.keywords, a.defaults, None,
)
else:
a = inspect.getfullargspec(lambda_)
varargs, varkw, defaults, kwonlyargs = (
a.varargs, a.varkw, a.defaults, a.kwonlyargs,
)
if varargs or varkw or defaults or kwonlyargs:
raise TypeError(
.format(
name, lambda_,
),
)
return cls(name, a.args, lambda_) | Make a :class:`SassFunction` object from the given ``lambda_``
function. Since lambda functions don't have their name, it need
its ``name`` as well. Arguments are automatically inspected.
:param name: the function name
:type name: :class:`str`
:param lambda_: the actual lambda function to be called
:type lambda_: :class:`types.LambdaType`
:returns: a custom function wrapper of the ``lambda_`` function
:rtype: :class:`SassFunction` |
377,992 | def version_binary(self):
try:
item_value, item_type = self.__reg_query_value(self.__reg_uninstall_handle, )
except pywintypes.error as exc:
if exc.winerror == winerror.ERROR_FILE_NOT_FOUND:
return ,
version_binary_text =
version_src =
if item_value:
if item_type == win32con.REG_DWORD:
if isinstance(item_value, six.integer_types):
version_binary_raw = item_value
if version_binary_raw:
version_binary_text = .format(
version_binary_raw >> 24 & 0xff,
version_binary_raw >> 16 & 0xff,
version_binary_raw & 0xffff)
version_src =
elif (item_type == win32con.REG_SZ and
isinstance(item_value, six.string_types) and
self.__version_pattern.match(item_value) is not None):
version_binary_text = item_value.strip()
version_src =
return (version_binary_text, version_src) | Return version number which is stored in binary format.
Returns:
str: <major 0-255>.<minior 0-255>.<build 0-65535> or None if not found |
377,993 | def delete_migration(connection, basename):
sql = "DELETE FROM migrations_applied WHERE name = %s"
with connection.cursor() as cursor:
cursor.execute(sql, (basename,))
connection.commit()
return True | Delete a migration in `migrations_applied` table |
377,994 | def _structure_default(self, obj, cl):
if cl is Any or cl is Optional:
return obj
msg = (
"Unsupported type: {0}. Register a structure hook for "
"it.".format(cl)
)
raise ValueError(msg) | This is the fallthrough case. Everything is a subclass of `Any`.
A special condition here handles ``attrs`` classes.
Bare optionals end here too (optionals with arguments are unions.) We
treat bare optionals as Any. |
377,995 | def stop(self):
dd = time() - self._start
self.ms = int(round(1000 * dd)) | Stop the timer. |
377,996 | def get_(key, recurse=False, profile=None, **kwargs):
client = __utils__[](__opts__, profile, **kwargs)
if recurse:
return client.tree(key)
else:
return client.get(key, recurse=recurse) | .. versionadded:: 2014.7.0
Get a value from etcd, by direct path. Returns None on failure.
CLI Examples:
.. code-block:: bash
salt myminion etcd.get /path/to/key
salt myminion etcd.get /path/to/key profile=my_etcd_config
salt myminion etcd.get /path/to/key recurse=True profile=my_etcd_config
salt myminion etcd.get /path/to/key host=127.0.0.1 port=2379 |
377,997 | def update_form_labels(self, request=None, obj=None, form=None):
for form_label in self.custom_form_labels:
if form_label.field in form.base_fields:
label = form_label.get_form_label(
request=request, obj=obj, model=self.model, form=form
)
if label:
form.base_fields[form_label.field].label = mark_safe(label)
return form | Returns a form obj after modifying form labels
referred to in custom_form_labels. |
377,998 | def load(pathtovector,
wordlist=(),
num_to_load=None,
truncate_embeddings=None,
unk_word=None,
sep=" "):
r
vectors, items = Reach._load(pathtovector,
wordlist,
num_to_load,
truncate_embeddings,
sep)
if unk_word is not None:
if unk_word not in set(items):
unk_vec = np.zeros((1, vectors.shape[1]))
vectors = np.concatenate([unk_vec, vectors], 0)
items = [unk_word] + items
unk_index = 0
else:
unk_index = items.index(unk_word)
else:
unk_index = None
return Reach(vectors,
items,
name=os.path.split(pathtovector)[-1],
unk_index=unk_index) | r"""
Read a file in word2vec .txt format.
The load function will raise a ValueError when trying to load items
which do not conform to line lengths.
Parameters
----------
pathtovector : string
The path to the vector file.
header : bool
Whether the vector file has a header of the type
(NUMBER OF ITEMS, SIZE OF VECTOR).
wordlist : iterable, optional, default ()
A list of words you want loaded from the vector file. If this is
None (default), all words will be loaded.
num_to_load : int, optional, default None
The number of items to load from the file. Because loading can take
some time, it is sometimes useful to onlyl load the first n items
from a vector file for quick inspection.
truncate_embeddings : int, optional, default None
If this value is not None, the vectors in the vector space will
be truncated to the number of dimensions indicated by this value.
unk_word : object
The object to treat as UNK in your vector space. If this is not
in your items dictionary after loading, we add it with a zero
vector.
Returns
-------
r : Reach
An initialized Reach instance. |
377,999 | def set_actuator_control_target_encode(self, time_usec, group_mlx, target_system, target_component, controls):
return MAVLink_set_actuator_control_target_message(time_usec, group_mlx, target_system, target_component, controls) | Set the vehicle attitude and body angular rates.
time_usec : Timestamp (micros since boot or Unix epoch) (uint64_t)
group_mlx : Actuator group. The "_mlx" indicates this is a multi-instance message and a MAVLink parser should use this field to difference between instances. (uint8_t)
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
controls : Actuator controls. Normed to -1..+1 where 0 is neutral position. Throttle for single rotation direction motors is 0..1, negative range for reverse direction. Standard mapping for attitude controls (group 0): (index 0-7): roll, pitch, yaw, throttle, flaps, spoilers, airbrakes, landing gear. Load a pass-through mixer to repurpose them as generic outputs. (float) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.