Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
386,100 | def _travis_job_state(state):
if state in [None, , , ]:
return colorama.Fore.YELLOW, , True
elif state in [, ]:
return colorama.Fore.LIGHTYELLOW_EX, , True
elif state == :
return colorama.Fore.LIGHTGREEN_EX, , False
elif state == :
return colorama.Fore.LIGHTRED_EX, , False
elif state == :
return colorama.Fore.LIGHTRED_EX, , False
elif state == :
return colorama.Fore.LIGHTBLACK_EX, , False
else:
raise RuntimeError( % str(state)) | Converts a Travis state into a state character, color,
and whether it's still running or a stopped state. |
386,101 | def _get_principal(self, principal_arn):
if in principal_arn:
certs = [_ for _ in self.certificates.values() if _.arn == principal_arn]
if len(certs) == 0:
raise ResourceNotFoundException()
principal = certs[0]
return principal
else:
pass
raise ResourceNotFoundException() | raise ResourceNotFoundException |
386,102 | def match_grade_system_id(self, grade_system_id, match):
self._add_match(, str(grade_system_id), bool(match)) | Sets the grade system ``Id`` for this query.
arg: grade_system_id (osid.id.Id): a grade system ``Id``
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: NullArgument - ``grade_system_id`` is ``null``
*compliance: mandatory -- This method must be implemented.* |
386,103 | def decode(in_bytes):
if isinstance(in_bytes, str):
raise TypeError()
in_bytes_mv = _get_buffer_view(in_bytes)
out_bytes = bytearray()
idx = 0
if len(in_bytes_mv) > 0:
while True:
length = ord(in_bytes_mv[idx])
if length == 0:
raise DecodeError("zero byte found in input")
idx += 1
end = idx + length - 1
copy_mv = in_bytes_mv[idx:end]
if b in copy_mv:
raise DecodeError("zero byte found in input")
out_bytes += copy_mv
idx = end
if idx > len(in_bytes_mv):
raise DecodeError("not enough input bytes for length code")
if idx < len(in_bytes_mv):
if length < 0xFF:
out_bytes.append(0)
else:
break
return bytes(out_bytes) | Decode a string using Consistent Overhead Byte Stuffing (COBS).
Input should be a byte string that has been COBS encoded. Output
is also a byte string.
A cobs.DecodeError exception will be raised if the encoded data
is invalid. |
386,104 | def covariance_eigvals(self):
if not np.isnan(np.sum(self.covariance)):
eigvals = np.linalg.eigvals(self.covariance)
if np.any(eigvals < 0):
return (np.nan, np.nan) * u.pix**2
return (np.max(eigvals), np.min(eigvals)) * u.pix**2
else:
return (np.nan, np.nan) * u.pix**2 | The two eigenvalues of the `covariance` matrix in decreasing
order. |
386,105 | def set_auth_key(
user,
key,
enc=,
comment=,
options=None,
config=,
cache_keys=None,
fingerprint_hash_type=None):
*<key>dsa
if cache_keys is None:
cache_keys = []
if len(key.split()) > 1:
return
enc = _refine_enc(enc)
uinfo = __salt__[](user)
if not uinfo:
return
_fh.seek(0, 2)
if _fh.tell() > 0:
| Add a key to the authorized_keys file. The "key" parameter must only be the
string of text that is the encoded key. If the key begins with "ssh-rsa"
or ends with user@host, remove those from the key before passing it to this
function.
CLI Example:
.. code-block:: bash
salt '*' ssh.set_auth_key <user> '<key>' enc='dsa' |
386,106 | def update_role_config_group(resource_root, service_name, name, apigroup,
cluster_name="default"):
return call(resource_root.put,
_get_role_config_group_path(cluster_name, service_name, name),
ApiRoleConfigGroup, data=apigroup, api_version=3) | Update a role config group by name.
@param resource_root: The root Resource object.
@param service_name: Service name.
@param name: Role config group name.
@param apigroup: The updated role config group.
@param cluster_name: Cluster name.
@return: The updated ApiRoleConfigGroup object.
@since: API v3 |
386,107 | def hicup_stats_table(self):
headers = OrderedDict()
headers[] = {
: ,
: ,
: 100,
: 0,
: ,
:
}
headers[] = {
: .format(config.read_count_prefix),
: .format(config.read_count_desc),
: 0,
: ,
: lambda x: x * config.read_count_multiplier,
:
}
headers[] = {
: ,
: ,
: 100,
: 0,
: ,
: ,
: lambda x: 100 - x
}
headers[] = {
: .format(config.read_count_prefix),
: .format(config.read_count_desc),
: 0,
: ,
: lambda x: x * config.read_count_multiplier,
:
}
headers[] = {
: ,
: ,
: 100,
: 0,
: ,
:
}
headers[] = {
: .format(config.read_count_prefix),
: .format(config.read_count_desc),
: 0,
: ,
: lambda x: x * config.read_count_multiplier,
:
}
headers[] = {
: ,
: ,
: 100,
: 0,
: ,
:
}
self.general_stats_addcols(self.hicup_data, headers, ) | Add core HiCUP stats to the general stats table |
386,108 | def lrelu_sq(x):
dim = len(x.get_shape()) - 1
return tf.concat(dim, [lrelu(x), tf.minimum(tf.abs(x), tf.square(x))]) | Concatenates lrelu and square |
386,109 | def sum_transactions(transactions):
workdays_per_year = 250
previous_date = None
rate = 0
day_sum = 0
for transaction in transactions:
date, action, value = _parse_transaction_entry(transaction)
if previous_date is None:
previous_date = date
elapsed = workdays.networkdays(previous_date, date, stat_holidays()) - 1
if action == :
rate = float(value) / workdays_per_year
elif action == :
elapsed -= 1
day_sum = value
previous_date = date
return day_sum | Sums transactions into a total of remaining vacation days. |
386,110 | def GET(self):
if self.user_manager.session_logged_in() or not self.app.allow_registration:
raise web.notfound()
error = False
reset = None
msg = ""
data = web.input()
if "activate" in data:
msg, error = self.activate_user(data)
elif "reset" in data:
msg, error, reset = self.get_reset_data(data)
return self.template_helper.get_renderer().register(reset, msg, error) | Handles GET request |
386,111 | def _bind_ith_exec(self, i, data_shapes, label_shapes, shared_group):
shared_exec = None if shared_group is None else shared_group.execs[i]
context = self.contexts[i]
shared_data_arrays = self.shared_data_arrays[i]
input_shapes = dict(data_shapes)
if label_shapes is not None:
input_shapes.update(dict(label_shapes))
input_types = {x.name: x.dtype for x in data_shapes}
if label_shapes is not None:
input_types.update({x.name: x.dtype for x in label_shapes})
group2ctx = self.group2ctxs[i]
executor = self.symbol.simple_bind(ctx=context, grad_req=self.grad_req,
type_dict=input_types, shared_arg_names=self.param_names,
shared_exec=shared_exec, group2ctx=group2ctx,
shared_buffer=shared_data_arrays, **input_shapes)
self._total_exec_bytes += int(executor.debug_str().split()[-3].split()[1])
return executor | Internal utility function to bind the i-th executor.
This function utilizes simple_bind python interface. |
386,112 | def default_get_arg_names_from_class_name(class_name):
parts = []
rest = class_name
if rest.startswith():
rest = rest[1:]
while True:
m = re.match(r, rest)
if m is None:
break
parts.append(m.group(1))
rest = m.group(2)
if not parts:
return []
return [.join(part.lower() for part in parts)] | Converts normal class names into normal arg names.
Normal class names are assumed to be CamelCase with an optional leading
underscore. Normal arg names are assumed to be lower_with_underscores.
Args:
class_name: a class name, e.g., "FooBar" or "_FooBar"
Returns:
all likely corresponding arg names, e.g., ["foo_bar"] |
386,113 | def clean(ctx):
ctx.run(f)
dist = ROOT.joinpath()
print(f)
if dist.exists():
shutil.rmtree(str(dist)) | Clean previously built package artifacts. |
386,114 | def copy_folder(self, dest_folder_id, source_folder_id):
path = {}
data = {}
params = {}
path["dest_folder_id"] = dest_folder_id
data["source_folder_id"] = source_folder_id
self.logger.debug("POST /api/v1/folders/{dest_folder_id}/copy_folder with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/folders/{dest_folder_id}/copy_folder".format(**path), data=data, params=params, single_item=True) | Copy a folder.
Copy a folder (and its contents) from elsewhere in Canvas into a folder.
Copying a folder across contexts (between courses and users) is permitted,
but the source and destination must belong to the same institution.
If the source and destination folders are in the same context, the
source folder may not contain the destination folder. A folder will be
renamed at its destination if another folder with the same name already
exists. |
386,115 | def get_name_str(self, element):
if element.get() == :
return self.model2.get_name_str(element)
else:
return self.model1.get_name_str(element) | get_name_str
High-level api: Produce a string that represents the name of a node.
Parameters
----------
element : `Element`
A node in model tree.
Returns
-------
str
A string that represents the name of a node. |
386,116 | def get_files_types(self):
if self._files == {}:
for i in self.get_files():
buffer = self.zip.read(i)
self.files_crc32[i] = crc32(buffer)
self._files[i] = self._get_file_magic_name(buffer)
return self._files | Return the files inside the APK with their associated types (by using python-magic)
:rtype: a dictionnary |
386,117 | def ascii2h5(bh_dir=None):
if bh_dir is None:
bh_dir = os.path.join(data_dir_default, )
fname = os.path.join(bh_dir, )
f = h5py.File(, )
for region in (, ):
data = np.loadtxt(fname.format(region), dtype=)
data.shape = (210, 201)
data = data[:201]
data[data < -9000] = np.nan
dset = f.create_dataset(
region,
data=data,
chunks=True,
compression=,
compression_opts=3
)
dset.attrs[] = (, )
dset.attrs[] = (
)
for region in (, ):
data = np.loadtxt(fname.format(region), dtype=)
data.shape = (94, 1200)
data = data[:93]
data[data < -9000] = np.nan
dset = f.create_dataset(
region,
data=data,
chunks=True,
compression=,
compression_opts=3
)
dset.attrs[] = (, )
dset.attrs[] = (
)
f.attrs[] = (
)
f.close() | Convert the Burstein & Heiles (1982) dust map from ASCII to HDF5. |
386,118 | def enable(self, cmd="sudo su", pattern="ssword", re_flags=re.IGNORECASE):
delay_factor = self.select_delay_factor(delay_factor=0)
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
time.sleep(0.3 * delay_factor)
try:
output += self.read_channel()
if re.search(pattern, output, flags=re_flags):
self.write_channel(self.normalize_cmd(self.secret))
self.set_base_prompt()
except socket.timeout:
raise NetMikoTimeoutException(
"Timed-out reading channel, data not available."
)
if not self.check_enable_mode():
msg = (
"Failed to enter enable mode. Please ensure you pass "
"the argument to ConnectHandler."
)
raise ValueError(msg)
return output | Attempt to become root. |
386,119 | def get_revocation_time(self):
if self.revoked is False:
return
if timezone.is_aware(self.revoked_date):
return timezone.make_naive(self.revoked_date, pytz.utc)
return self.revoked_date | Get the revocation time as naive datetime.
Note that this method is only used by cryptography>=2.4. |
386,120 | def invertible_total_flatten(unflat_list):
r
import utool as ut
next_list = unflat_list
scalar_flags = [not ut.isiterable(item) for item in next_list]
invert_stack = []
while not all(scalar_flags):
unflattenized = [[item] if flag else item
for flag, item in zip(scalar_flags, next_list)]
flatter_list, invert_part = ut.invertible_flatten1(unflattenized)
for idx in ut.where(scalar_flags):
invert_part[idx] = invert_part[idx][0]
invert_stack.append(invert_part)
next_list = flatter_list
scalar_flags = [not ut.isiterable(item) for item in next_list]
invert_levels = invert_stack[::-1]
flat_list = next_list
return flat_list, invert_levels | r"""
Args:
unflat_list (list):
Returns:
tuple: (flat_list, invert_levels)
CommandLine:
python -m utool.util_list --exec-invertible_total_flatten --show
Example:
>>> # DISABLE_DOCTEST
>>> from utool.util_list import * # NOQA
>>> import utool as ut
>>> unflat_list = [0, [[1, 2, 3], 4, 5], 9, [2, 3], [1, [2, 3, 4]], 1, 2, 3]
>>> print('unflat_list = %r' % (unflat_list,))
>>> (flat_list, invert_levels) = invertible_total_flatten(unflat_list)
>>> print('flat_list = %r' % (flat_list,))
>>> unflat_list2 = total_unflatten(flat_list, invert_levels)
>>> print('unflat_list2 = %r' % (unflat_list2,))
>>> assert unflat_list2 == unflat_list
>>> assert ut.depth_profile(flat_list) == 16 |
386,121 | def _ParseFileData(self, knowledge_base, file_object):
text_file_object = dfvfs_text_file.TextFile(file_object, encoding=)
system_product = text_file_object.readline()
if system_product.startswith():
system_product, _, _ = system_product.partition()
system_product = system_product.rstrip()
else:
system_product = None
if not knowledge_base.GetValue():
if system_product:
knowledge_base.SetValue(, system_product) | Parses file content (data) for system product preprocessing attribute.
Args:
knowledge_base (KnowledgeBase): to fill with preprocessing information.
file_object (dfvfs.FileIO): file-like object that contains the artifact
value data.
Raises:
errors.PreProcessFail: if the preprocessing fails. |
386,122 | def apt_add_repository_from_apt_string(apt_string, apt_file):
apt_file_path = % apt_file
if not file_contains(apt_file_path, apt_string.lower(), use_sudo=True):
file_append(apt_file_path, apt_string.lower(), use_sudo=True)
with hide(, ):
sudo("DEBIAN_FRONTEND=noninteractive apt-get update") | adds a new repository file for apt |
386,123 | def write_cookies_to_cache(cj, username):
mkdir_p(PATH_COOKIES, 0o700)
path = get_cookies_cache_path(username)
cached_cj = cookielib.MozillaCookieJar()
for cookie in cj:
cached_cj.set_cookie(cookie)
cached_cj.save(path) | Save RequestsCookieJar to disk in Mozilla's cookies.txt file format.
This prevents us from repeated authentications on the
accounts.coursera.org and class.coursera.org/class_name sites. |
386,124 | def load_inhibit(self, train=True, test=True) -> tuple:
def loader(kws: list, nkws: list):
from precise.params import pr
inputs = np.empty((0, pr.n_features, pr.feature_size))
outputs = np.zeros((len(kws), 1))
for f in kws:
if not isfile(f):
continue
new_vec = load_vector(f, vectorize_inhibit)
inputs = np.concatenate([inputs, new_vec])
return self.merge((inputs, outputs), self.__load_files(kws, nkws))
return self.__load(loader, train, test) | Generate data with inhibitory inputs created from wake word samples |
386,125 | async def send_request(self):
if self._send_request_done:
raise ProtocolError()
with self._wrapper:
protocol = await self._channel.__connect__()
stream = protocol.processor.connection\
.create_stream(wrapper=self._wrapper)
headers = [
(, ),
(, self._channel._scheme),
(, self._method_name),
(, self._channel._authority),
]
if self._deadline is not None:
timeout = self._deadline.time_remaining()
headers.append((, encode_timeout(timeout)))
content_type = (GRPC_CONTENT_TYPE
+ + self._codec.__content_subtype__)
headers.extend((
(, ),
(, content_type),
(, USER_AGENT),
))
metadata, = await self._dispatch.send_request(
self._metadata,
method_name=self._method_name,
deadline=self._deadline,
content_type=content_type,
)
headers.extend(encode_metadata(metadata))
release_stream = await stream.send_request(
headers, _processor=protocol.processor,
)
self._stream = stream
self._release_stream = release_stream
self._send_request_done = True | Coroutine to send request headers with metadata to the server.
New HTTP/2 stream will be created during this coroutine call.
.. note:: This coroutine will be called implicitly during first
:py:meth:`send_message` coroutine call, if not called before
explicitly. |
386,126 | def _cast_expected_to_returned_type(expected, returned):
ret_type = type(returned)
new_expected = expected
if expected == "False" and ret_type == bool:
expected = False
try:
new_expected = ret_type(expected)
except ValueError:
log.info("Unable to cast expected into type of returned")
log.info("returned = %s", returned)
log.info("type of returned = %s", type(returned))
log.info("expected = %s", expected)
log.info("type of expected = %s", type(expected))
return new_expected | Determine the type of variable returned
Cast the expected to the type of variable returned |
386,127 | def append(self, cls, infer_hidden: bool = False, **kwargs) -> Encoder:
params = dict(kwargs)
if infer_hidden:
params[] = self.get_num_hidden()
sig_params = inspect.signature(cls.__init__).parameters
if in sig_params and not in kwargs:
params[] = self.dtype
encoder = cls(**params)
self.encoders.append(encoder)
return encoder | Extends sequence with new Encoder. 'dtype' gets passed into Encoder instance if not present in parameters
and supported by specific Encoder type.
:param cls: Encoder type.
:param infer_hidden: If number of hidden should be inferred from previous encoder.
:param kwargs: Named arbitrary parameters for Encoder.
:return: Instance of Encoder. |
386,128 | def _save_npz(self):
d = dict(self.__dict__)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
d.pop(, None)
np.savez(os.path.join(self.dir, self.name + ), **d) | Saves all of the de-trending information to disk in an `npz` file |
386,129 | def processDefines(defs):
if SCons.Util.is_List(defs):
l = []
for d in defs:
if d is None:
continue
elif SCons.Util.is_List(d) or isinstance(d, tuple):
if len(d) >= 2:
l.append(str(d[0]) + + str(d[1]))
else:
l.append(str(d[0]))
elif SCons.Util.is_Dict(d):
for macro,value in d.items():
if value is not None:
l.append(str(macro) + + str(value))
else:
l.append(str(macro))
elif SCons.Util.is_String(d):
l.append(str(d))
else:
raise SCons.Errors.UserError("DEFINE %s is not a list, dict, string or None."%repr(d))
elif SCons.Util.is_Dict(defs):
else:
l = [str(defs)]
return l | process defines, resolving strings, lists, dictionaries, into a list of
strings |
386,130 | def from_csv(cls, path:PathOrStr, csv_name, valid_pct:float=0.2, test:Optional[str]=None,
tokenizer:Tokenizer=None, vocab:Vocab=None, classes:Collection[str]=None, delimiter:str=None, header=,
text_cols:IntsOrStrs=1, label_cols:IntsOrStrs=0, label_delim:str=None,
chunksize:int=10000, max_vocab:int=60000, min_freq:int=2,
mark_fields:bool=False, include_bos:bool=True, include_eos:bool=False, **kwargs) -> DataBunch:
"Create a `TextDataBunch` from texts in csv files. `kwargs` are passed to the dataloader creation."
df = pd.read_csv(Path(path)/csv_name, header=header, delimiter=delimiter)
df = df.iloc[np.random.permutation(len(df))]
cut = int(valid_pct * len(df)) + 1
train_df, valid_df = df[cut:], df[:cut]
test_df = None if test is None else pd.read_csv(Path(path)/test, header=header, delimiter=delimiter)
return cls.from_df(path, train_df, valid_df, test_df, tokenizer=tokenizer, vocab=vocab, classes=classes, text_cols=text_cols,
label_cols=label_cols, label_delim=label_delim, chunksize=chunksize, max_vocab=max_vocab,
min_freq=min_freq, mark_fields=mark_fields,
include_bos=include_bos, include_eos=include_eos, **kwargs) | Create a `TextDataBunch` from texts in csv files. `kwargs` are passed to the dataloader creation. |
386,131 | def search_responsify(serializer, mimetype):
def view(pid_fetcher, search_result, code=200, headers=None, links=None,
item_links_factory=None):
response = current_app.response_class(
serializer.serialize_search(pid_fetcher, search_result,
links=links,
item_links_factory=item_links_factory),
mimetype=mimetype)
response.status_code = code
if headers is not None:
response.headers.extend(headers)
if links is not None:
add_link_header(response, links)
return response
return view | Create a Records-REST search result response serializer.
:param serializer: Serializer instance.
:param mimetype: MIME type of response.
:returns: Function that generates a record HTTP response. |
386,132 | def emulate_mouse(self, key_code, x_val, y_val, data):
else:
if key_code == 0x020B and data == 2:
key_code = 0x020B2
elif key_code == 0x020C and data == 2:
key_code = 0x020C2
code, value, scan_code = self.mouse_codes[key_code]
scan_event, key_event = self.emulate_press(
code, scan_code, value, self.timeval)
events.append(scan_event)
events.append(key_event)
x_event, y_event = self.emulate_abs(x_val, y_val, self.timeval)
events.append(x_event)
events.append(y_event)
events.append(self.sync_marker(self.timeval))
self.write_to_pipe(events) | Emulate the ev codes using the data Windows has given us.
Note that by default in Windows, to recognise a double click,
you just notice two clicks in a row within a reasonablely
short time period.
However, if the application developer sets the application
window's class style to CS_DBLCLKS, the operating system will
notice the four button events (down, up, down, up), intercept
them and then send a single key code instead.
There are no such special double click codes on other
platforms, so not obvious what to do with them. It might be
best to just convert them back to four events.
Currently we do nothing.
((0x0203, 'WM_LBUTTONDBLCLK'),
(0x0206, 'WM_RBUTTONDBLCLK'),
(0x0209, 'WM_MBUTTONDBLCLK'),
(0x020D, 'WM_XBUTTONDBLCLK')) |
386,133 | def _runcmd(progargs, stdinput=None):
stdin = None
if stdinput is not None:
assert(isinstance(stdinput, list))
stdin=PIPE
err = 0
output = b
log_debug("Calling {} with input {}".format(.join(progargs), stdinput))
try:
p = Popen(progargs, shell=True, stdin=stdin,
stderr=STDOUT, stdout=PIPE, universal_newlines=True)
if stdinput is not None:
for cmd in stdinput:
print(cmd, file=p.stdin)
p.stdin.close()
output = p.stdout.read()
p.stdout.close()
err = p.wait(timeout=1.0)
except OSError as e:
err = e.errno
log_warn("Error calling {}: {}".format(progargs, e.stderror))
except Exception as e:
errstr = str(e)
log_warn("Error calling {}: {}".format(progargs, errstr))
err = -1
log_debug("Result of command (errcode {}): {}".format(err, output))
return err,output | Run the command progargs with optional input to be fed in to stdin. |
386,134 | def get_schema_descendant(
self, route: SchemaRoute) -> Optional[SchemaNode]:
node = self
for p in route:
node = node.get_child(*p)
if node is None:
return None
return node | Return descendant schema node or ``None`` if not found.
Args:
route: Schema route to the descendant node
(relative to the receiver). |
386,135 | def transformer_ada_lmpackedbase_dialog():
hparams = transformer_base_vq_ada_32ex_packed()
hparams.max_length = 1024
hparams.ffn_layer = "dense_relu_dense"
hparams.batch_size = 4096
return hparams | Set of hyperparameters. |
386,136 | def get_start_time(self):
if not isinstance(self.duration, tuple):
return None
if self.duration[0] is not None:
return self.duration[0]
else:
if (self.previous_entry and
isinstance(self.previous_entry.duration, tuple) and
self.previous_entry.duration[1] is not None):
return self.previous_entry.duration[1]
return None | Return the start time of the entry as a :class:`datetime.time` object.
If the start time is `None`, the end time of the previous entry will be
returned instead. If the current entry doesn't have a duration in the
form of a tuple, if there's no previous entry or if the previous entry
has no end time, the value `None` will be returned. |
386,137 | def delete_message_from_handle(self, queue, receipt_handle):
params = { : receipt_handle}
return self.get_status(, params, queue.id) | Delete a message from a queue, given a receipt handle.
:type queue: A :class:`boto.sqs.queue.Queue` object
:param queue: The Queue from which messages are read.
:type receipt_handle: str
:param receipt_handle: The receipt handle for the message
:rtype: bool
:return: True if successful, False otherwise. |
386,138 | def save_figure_raw_data(figure="gcf", **kwargs):
path = _s.dialogs.Save(**kwargs)
if path=="": return "aborted."
if figure=="gcf": figure = _pylab.gcf()
for n in range(len(figure.axes)):
a = figure.axes[n]
for m in range(len(a.lines)):
l = a.lines[m]
x = l.get_xdata()
y = l.get_ydata()
p = _os.path.split(path)
p = _os.path.join(p[0], "axes" + str(n) + " line" + str(m) + " " + p[1])
print(p)
f = open(p, )
for j in range(0, len(x)):
f.write(str(x[j]) + "\t" + str(y[j]) + "\n")
f.close() | This will just output an ascii file for each of the traces in the shown figure.
**kwargs are sent to dialogs.Save() |
386,139 | def snapshot (self):
nextSnapshotNum = self.nextSnapshotNum
nextSnapshotPath = self.getFullPathToSnapshot(nextSnapshotNum)
if os.path.lexists(nextSnapshotPath):
self.rmR(nextSnapshotPath)
self.mkdirp(os.path.join(nextSnapshotPath, ".experiment"))
return self.dump(nextSnapshotPath).__markLatest(nextSnapshotNum) | Take a snapshot of the experiment.
Returns `self`. |
386,140 | def camera_info(self, camera_ids, **kwargs):
api = self._api_info[]
payload = dict({
: self._sid,
: api[],
: ,
: api[],
: .join(str(id) for id in camera_ids),
}, **kwargs)
response = self._get_json_with_retry(api[], payload)
cameras = []
for data in response[][]:
cameras.append(Camera(data, self._video_stream_url))
return cameras | Return a list of cameras matching camera_ids. |
386,141 | def add(self, chassis):
self.chassis_chain[chassis] = IxeChassis(self.session, chassis, len(self.chassis_chain) + 1)
self.chassis_chain[chassis].connect() | add chassis.
:param chassis: chassis IP address. |
386,142 | def _set_up_schema_elements_of_kind(self, class_name_to_definition, kind, class_names):
allowed_duplicated_edge_property_names = frozenset({
EDGE_DESTINATION_PROPERTY_NAME, EDGE_SOURCE_PROPERTY_NAME
})
orientdb_base_classes = frozenset({
ORIENTDB_BASE_VERTEX_CLASS_NAME,
ORIENTDB_BASE_EDGE_CLASS_NAME,
})
for class_name in class_names:
class_definition = class_name_to_definition[class_name]
class_fields = class_definition.get()
if class_fields is None:
class_fields = dict()
abstract = class_definition[]
if class_name in orientdb_base_classes:
.format(property_name, class_name))
self._elements[class_name] = SchemaElement(class_name, kind, abstract,
property_name_to_descriptor, class_fields) | Load all schema classes of the given kind. Used as part of __init__. |
386,143 | def respond_unauthorized(self, request_authentication=False):
headers = {}
if request_authentication:
headers[] = + self.__config[] +
self.send_response_full(b, status=401, headers=headers)
return | Respond to the client that the request is unauthorized.
:param bool request_authentication: Whether to request basic authentication information by sending a WWW-Authenticate header. |
386,144 | def nonzero(self):
df = self.loc[:,self.pst.nnz_obs_names]
return ObservationEnsemble.from_dataframe(df=df,
pst=self.pst.get(obs_names=self.pst.nnz_obs_names)) | property decorated method to get a new ObservationEnsemble
of only non-zero weighted observations
Returns
-------
ObservationEnsemble : ObservationEnsemble |
386,145 | def list_hosting_device_handled_by_config_agent(
self, client, cfg_agent_id, **_params):
return client.get((ConfigAgentHandlingHostingDevice.resource_path +
CFG_AGENT_HOSTING_DEVICES) % cfg_agent_id,
params=_params) | Fetches a list of hosting devices handled by a config agent. |
386,146 | def set_request(self, method=None, sub_url="", data=None, params=None,
proxies=None):
self.method = method or self.method
self.url = self.base_uri and (self.base_uri + sub_url) or sub_url
self.params.update(params or {})
self.proxies = proxies or self.proxies
if self.params:
self.url += + .join(
[ % (k, v) for k, v in self.params.items()])
self.request_data = deepcopy(data)
self._timestamps[] = time.time()
if isinstance(data, dict) and not in self.headers:
self.headers[] =
self.json = data
self.accepted_return]
self._stage = STAGE_SET | :param method: str of the method of the api_call
:param sub_url: str of the url after the uri
:param data: dict of form data to be sent with the request
:param params: dict of additional data to be sent as the request args
:param proxies: str of the proxie to use
:return: None |
386,147 | def write(self, outfile):
hdu_pri = fits.PrimaryHDU()
hdu_exp = self._create_exp_hdu(self.data)
hdu_exp.name =
hdu_exp_wt = self._create_exp_hdu(self._data_wt)
hdu_exp_wt.name =
cols = [Column(name=, dtype=,
data=self.costh_edges[:-1][::-1]),
Column(name=, dtype=,
data=self.costh_edges[1:][::-1]), ]
hdu_bnds = fits.table_to_hdu(Table(cols))
hdu_bnds.name =
hdu_gti = fits.table_to_hdu(self._tab_gti)
hdu_gti.name =
hdus = [hdu_pri, hdu_exp, hdu_exp_wt,
hdu_bnds, hdu_gti]
for hdu in hdus:
hdu.header[] = self.tstart
hdu.header[] = self.tstop
with fits.HDUList(hdus) as hdulist:
hdulist.writeto(outfile, clobber=True) | Write the livetime cube to a FITS file. |
386,148 | def _compute_B_statistics(self, K, W, log_concave, *args, **kwargs):
if not log_concave:
W = np.clip(W, 1e-6, 1e+30)
if np.any(np.isnan(W)):
raise ValueError()
W_12 = np.sqrt(W)
B = np.eye(K.shape[0]) + W_12*K*W_12.T
L = jitchol(B)
LiW12, _ = dtrtrs(L, np.diagflat(W_12), lower=1, trans=0)
K_Wi_i = np.dot(LiW12.T, LiW12)
C = np.dot(LiW12, K)
Ki_W_i = K - C.T.dot(C)
I_KW_i = np.eye(K.shape[0]) - np.dot(K, K_Wi_i)
logdet_I_KW = 2*np.sum(np.log(np.diag(L)))
return K_Wi_i, logdet_I_KW, I_KW_i, Ki_W_i | Rasmussen suggests the use of a numerically stable positive definite matrix B
Which has a positive diagonal elements and can be easily inverted
:param K: Prior Covariance matrix evaluated at locations X
:type K: NxN matrix
:param W: Negative hessian at a point (diagonal matrix)
:type W: Vector of diagonal values of Hessian (1xN)
:returns: (W12BiW12, L_B, Li_W12) |
386,149 | def _RunActions(self, rule, client_id):
actions_count = 0
for action in rule.actions:
try:
token = self.token.Copy()
token.username = "Foreman"
if action.HasField("hunt_id"):
if self._CheckIfHuntTaskWasAssigned(client_id, action.hunt_id):
logging.info(
"Foreman: ignoring hunt %s on client %s: was started "
"here before", client_id, action.hunt_id)
else:
logging.info("Foreman: Starting hunt %s on client %s.",
action.hunt_id, client_id)
flow_cls = registry.AFF4FlowRegistry.FlowClassByName(
action.hunt_name)
flow_cls.StartClients(action.hunt_id, [client_id])
actions_count += 1
else:
flow.StartAFF4Flow(
client_id=client_id,
flow_name=action.flow_name,
token=token,
**action.argv.ToDict())
actions_count += 1
except Exception as e:
logging.exception("Failure running foreman action on client %s: %s",
action.hunt_id, e)
return actions_count | Run all the actions specified in the rule.
Args:
rule: Rule which actions are to be executed.
client_id: Id of a client where rule's actions are to be executed.
Returns:
Number of actions started. |
386,150 | def GetNextWrittenEventSource(self):
if not self._storage_file:
raise IOError()
event_source = self._storage_file.GetEventSourceByIndex(
self._written_event_source_index)
if event_source:
self._written_event_source_index += 1
return event_source | Retrieves the next event source that was written after open.
Returns:
EventSource: event source or None if there are no newly written ones.
Raises:
IOError: when the storage writer is closed.
OSError: when the storage writer is closed. |
386,151 | def copy_to(self, destination):
target = self.__get_destination__(destination)
logger.info("Copying %s to %s" % (self, target))
shutil.copy(self.path, unicode(destination))
return target | Copies the file to the given destination. Returns a File
object that represents the target file. `destination` must
be a File or Folder object. |
386,152 | def factory(cls, note, fn=None):
def decorator(f):
provider = cls.factory_provider.bind(f)
cls.register(note, provider)
return f
if fn is not None:
decorator(fn)
else:
return decorator | Register a function as a provider.
Function (name support is optional)::
from jeni import Injector as BaseInjector
from jeni import Provider
class Injector(BaseInjector):
pass
@Injector.factory('echo')
def echo(name=None):
return name
Registration can be a decorator or a direct method call::
Injector.factory('echo', echo) |
386,153 | def HashIt(self):
while True:
interval = self._GetNextInterval()
if interval is None:
break
self.file.seek(interval.start, os.SEEK_SET)
block = self.file.read(interval.end - interval.start)
if len(block) != interval.end - interval.start:
raise RuntimeError()
self._HashBlock(block, interval.start, interval.end)
self._AdjustIntervals(interval.start, interval.end)
results = []
for finger in self.fingers:
res = {}
leftover = finger.CurrentRange()
if leftover:
if (len(finger.ranges) > 1 or leftover.start != self.filelength or
leftover.end != self.filelength):
raise RuntimeError()
res.update(finger.metadata)
for hasher in finger.hashers:
res[hasher.name] = hasher.digest()
results.append(res)
self.fingers = []
return sorted(results, key=lambda r: r[]) | Finalizing function for the Fingerprint class.
This method applies all the different hash functions over the
previously specified different ranges of the input file, and
computes the resulting hashes.
After calling HashIt, the state of the object is reset to its
initial state, with no fingers defined.
Returns:
An array of dicts, with each dict containing name of fingerprint
type, names of hashes and values, and additional, type-dependent
key / value pairs, such as an array of SignedData tuples for the
PE/COFF fingerprint type.
Raises:
RuntimeError: when internal inconsistencies occur. |
386,154 | def short_title(self, key, value):
short_title = value.get()
title_variants = self.get(, [])
if value.get():
short_title = value.get()
title_variants.append(value.get())
self[] = title_variants
return short_title | Populate the ``short_title`` key.
Also populates the ``title_variants`` key through side effects. |
386,155 | def install(name=None, refresh=False, pkgs=None, version=None, test=False, **kwargs):
<package>old<old-version>new<new-version>******["foo", "bar"]
targets = salt.utils.args.split_input(pkgs) if pkgs else [name]
if not targets:
return {}
if pkgs:
log.debug(, name, targets)
old = list_pkgs()
errors = []
for target in targets:
filename = os.path.basename(target)
if filename.endswith():
if _is_installed_rpm(filename.split()[0]):
continue
cmdflags =
if test:
cmdflags +=
cmd = [, cmdflags, target]
out = __salt__[](cmd, output_loglevel=)
else:
if _is_installed(target):
continue
cmd =
if test:
cmd +=
cmd +=
dirpath = os.path.dirname(target)
cmd += dirpath ++ filename
out = __salt__[](cmd, output_loglevel=)
if 0 != out[]:
errors.append(out[])
__context__.pop(, None)
new = list_pkgs()
ret = salt.utils.data.compare_dicts(old, new)
if errors:
raise CommandExecutionError(
,
info={
: ret,
: errors
}
)
if test:
return
return ret | Install the named fileset(s)/rpm package(s).
name
The name of the fileset or rpm package to be installed.
refresh
Whether or not to update the yum database before executing.
Multiple Package Installation Options:
pkgs
A list of filesets and/or rpm packages to install.
Must be passed as a python list. The ``name`` parameter will be
ignored if this option is passed.
version
Install a specific version of a fileset/rpm package.
(Unused at present).
test
Verify that command functions correctly:
Returns a dict containing the new fileset(s)/rpm package(s) names and versions:
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkg.install /stage/middleware/AIX/bash-4.2-3.aix6.1.ppc.rpm
salt '*' pkg.install /stage/middleware/AIX/bash-4.2-3.aix6.1.ppc.rpm refresh=True
salt '*' pkg.install /stage/middleware/AIX/VIOS2211_update/tpc_4.1.1.85.bff
salt '*' pkg.install /stage/middleware/AIX/Xlc/usr/sys/inst.images/xlC.rte
salt '*' pkg.install /stage/middleware/AIX/Firefox/ppc-AIX53/Firefox.base
salt '*' pkg.install pkgs='["foo", "bar"]' |
386,156 | def _create_parsing_plan(self, desired_type: Type[T], filesystem_object: PersistedObject, logger: Logger,
log_only_last: bool = False):
logger.debug( + get_parsing_plan_log_str(filesystem_object, desired_type,
log_only_last=log_only_last, parser=self))
return AnyParser._RecursiveParsingPlan(desired_type, filesystem_object, self, logger) | Adds a log message and creates a recursive parsing plan.
:param desired_type:
:param filesystem_object:
:param logger:
:param log_only_last: a flag to only log the last part of the file path (default False)
:return: |
386,157 | def CheckApproversForLabel(self, token, client_urn, requester, approvers,
label):
auth = self.reader.GetAuthorizationForSubject(label)
if not auth:
return True
if auth.requester_must_be_authorized:
if not self.CheckPermissions(requester, label):
raise access_control.UnauthorizedAccess(
"User %s not in %s or groups:%s for %s" % (requester, auth.users,
auth.groups, label),
subject=client_urn,
requested_access=token.requested_access)
approved_count = 0
for approver in approvers:
if self.CheckPermissions(approver, label) and approver != requester:
approved_count += 1
if approved_count < auth.num_approvers_required:
raise access_control.UnauthorizedAccess(
"Found %s approvers for %s, needed %s" %
(approved_count, label, auth.num_approvers_required),
subject=client_urn,
requested_access=token.requested_access)
return True | Checks if requester and approvers have approval privileges for labels.
Checks against list of approvers for each label defined in approvers.yaml to
determine if the list of approvers is sufficient.
Args:
token: user token
client_urn: ClientURN object of the client
requester: username string of person requesting approval.
approvers: list of username strings that have approved this client.
label: label strings to check approval privs for.
Returns:
True if access is allowed, raises otherwise. |
386,158 | def deleteGenome(species, name) :
printf( % (species, name))
conf.db.beginTransaction()
objs = []
allGood = True
try :
genome = Genome_Raba(name = name, species = species.lower())
objs.append(genome)
pBar = ProgressBar(label = )
for typ in (Chromosome_Raba, Gene_Raba, Transcript_Raba, Exon_Raba, Protein_Raba) :
pBar.update()
f = RabaQuery(typ, namespace = genome._raba_namespace)
f.addFilter({ : genome})
for e in f.iterRun() :
objs.append(e)
pBar.close()
pBar = ProgressBar(nbEpochs = len(objs), label = )
for e in objs :
pBar.update()
e.delete()
pBar.close()
except KeyError as e :
raise KeyError("\tWARNING, couldns not there: ", e)
allGood = False
printf()
try :
shutil.rmtree(conf.getGenomeSequencePath(species, name))
except OSError as e:
OSError(, e)
allGood = False
conf.db.endTransaction()
return allGood | Removes a genome from the database |
386,159 | def street_address(self):
pattern = self.random_element(self.street_address_formats)
return self.generator.parse(pattern) | :example '791 Crist Parks' |
386,160 | def _get_audio_sample_rate(self, audio_abs_path):
sample_rate = int(
subprocess.check_output(
(
).format(audio_abs_path, "Sample Rate"),
shell=True, universal_newlines=True).rstrip())
return sample_rate | Parameters
----------
audio_abs_path : str
Returns
-------
sample_rate : int |
386,161 | def verify_oauth2_token(id_token, request, audience=None):
return verify_token(
id_token, request, audience=audience,
certs_url=_GOOGLE_OAUTH2_CERTS_URL) | Verifies an ID Token issued by Google's OAuth 2.0 authorization server.
Args:
id_token (Union[str, bytes]): The encoded token.
request (google.auth.transport.Request): The object used to make
HTTP requests.
audience (str): The audience that this token is intended for. This is
typically your application's OAuth 2.0 client ID. If None then the
audience is not verified.
Returns:
Mapping[str, Any]: The decoded token. |
386,162 | def _add_conversation(self, conversation):
handler = functools.partial(
self._handle_conversation_exit,
conversation
)
tokens = []
def linked_token(signal, handler):
return signal, signal.connect(handler)
tokens.append(linked_token(conversation.on_exit, handler))
tokens.append(linked_token(conversation.on_failure, handler))
tokens.append(linked_token(conversation.on_message, functools.partial(
self.on_message,
conversation,
)))
self._conversation_meta[conversation] = (
tokens,
)
self._conversation_map[conversation.jid] = conversation
self.on_conversation_added(conversation) | Add the conversation and fire the :meth:`on_conversation_added` event.
:param conversation: The conversation object to add.
:type conversation: :class:`~.AbstractConversation`
The conversation is added to the internal list of conversations which
can be queried at :attr:`conversations`. The
:meth:`on_conversation_added` event is fired.
In addition, the :class:`ConversationService` subscribes to the
:meth:`~.AbstractConversation.on_exit` event to remove the conversation
from the list automatically. There is no need to remove a conversation
from the list explicitly. |
386,163 | def is_ge(dicom_input):
header = dicom_input[0]
if not in header or not in header:
return False
if header.Modality.upper() != :
return False
if not in header.Manufacturer.upper():
return False
return True | Use this function to detect if a dicom series is a GE dataset
:param dicom_input: list with dicom objects |
386,164 | def add_filter(self, property_name, operator, value):
if self.OPERATORS.get(operator) is None:
error_message = % (operator,)
choices_message = "Please use one of: =, <, <=, >, >=."
raise ValueError(error_message, choices_message)
if property_name == "__key__" and not isinstance(value, Key):
raise ValueError( % value)
self._filters.append((property_name, operator, value)) | Filter the query based on a property name, operator and a value.
Expressions take the form of::
.add_filter('<property>', '<operator>', <value>)
where property is a property stored on the entity in the datastore
and operator is one of ``OPERATORS``
(ie, ``=``, ``<``, ``<=``, ``>``, ``>=``)::
>>> from google.cloud import datastore
>>> client = datastore.Client()
>>> query = client.query(kind='Person')
>>> query.add_filter('name', '=', 'James')
>>> query.add_filter('age', '>', 50)
:type property_name: str
:param property_name: A property name.
:type operator: str
:param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
:type value: :class:`int`, :class:`str`, :class:`bool`,
:class:`float`, :class:`NoneType`,
:class:`datetime.datetime`,
:class:`google.cloud.datastore.key.Key`
:param value: The value to filter on.
:raises: :class:`ValueError` if ``operation`` is not one of the
specified values, or if a filter names ``'__key__'`` but
passes an invalid value (a key is required). |
386,165 | def get_highest_build_tool(sdk_version=None):
if sdk_version is None:
sdk_version = config.sdk_version
android_home = os.environ.get(, os.environ.get())
build_tool_folder = % android_home
folder_list = os.listdir(build_tool_folder)
versions = [folder for folder in folder_list if folder.startswith( % sdk_version)]
if len(versions) == 0:
return config.build_tool_version
return versions[::-1][0] | Gets the highest build tool version based on major version sdk version.
:param sdk_version(int) - sdk version to be used as the marjor build tool version context.
Returns:
A string containg the build tool version (default is 23.0.2 if none is found) |
386,166 | def get_login_info():
connections = {}
_defaults = {}
_defaults[] =
_defaults[] =
config = _config_file()
_config_test(config)
juicer.utils.Log.log_debug("Loading connection information:")
for section in config.sections():
cfg = dict(config.items(section))
connections[section] = Connectors(cfg)
if in cfg:
_defaults[] = cfg[]
if in cfg:
_defaults[] = cfg[]
juicer.utils.Log.log_debug("[%s] username: %s, base_url: %s" % \
(section, \
cfg[], \
cfg[]))
_defaults[] = config.sections()
return (connections, _defaults) | Give back an array of dicts with the connection
information for all the environments. |
386,167 | def migrate_window(bg):
"Take a pythoncard background resource and convert to a gui2py window"
ret = {}
for k, v in bg.items():
if k == :
v = WIN_MAP[v]._meta.name
elif k == :
menus = v[]
v = [migrate_control(menu) for menu in menus]
elif k == :
v = [migrate_control(comp) for comp in v]
else:
k = SPEC_MAP[].get(k, k)
ret[k] = v
return ret | Take a pythoncard background resource and convert to a gui2py window |
386,168 | def get_tissue_specificities(cls, entry):
tissue_specificities = []
query = "./comment[@type=]/text"
for ts in entry.iterfind(query):
tissue_specificities.append(models.TissueSpecificity(comment=ts.text))
return tissue_specificities | get list of :class:`pyuniprot.manager.models.TissueSpecificity` object from XML node entry
:param entry: XML node entry
:return: models.TissueSpecificity object |
386,169 | def get_meta(self):
rdf = self.get_meta_rdf(fmt=)
return PointMeta(self, rdf, self._client.default_lang, fmt=) | Get the metadata object for this Point
Returns a [PointMeta](PointMeta.m.html#IoticAgent.IOT.PointMeta.PointMeta) object - OR -
Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException)
containing the error if the infrastructure detects a problem
Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException)
if there is a communications problem between you and the infrastructure |
386,170 | def logs(self):
if self._resources is None:
self.__init()
if "logs" in self._resources:
url = self._url + "/logs"
return _logs.Log(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port,
initialize=True)
else:
return None | returns an object to work with the site logs |
386,171 | def __send_log_set_exc_and_wait(self, msg, level=None, wait_seconds=CONN_RETRY_DELAY_SECONDS):
logger.log(
((logging.DEBUG if self.__send_exc else logging.ERROR) if level is None else level),
msg,
exc_info=DEBUG_ENABLED
)
self.__send_exc_time = monotonic()
self.__send_exc = exc_info()[1]
self.__end.wait(wait_seconds) | To be called in exception context only.
msg - message to log
level - logging level. If not specified, ERROR unless it is a repeated failure in which case DEBUG. If
specified, the given level will always be used.
wait_seconds - how long to pause for (so retry is not triggered immediately) |
386,172 | def fence_status_encode(self, breach_status, breach_count, breach_type, breach_time):
return MAVLink_fence_status_message(breach_status, breach_count, breach_type, breach_time) | Status of geo-fencing. Sent in extended status stream when fencing
enabled
breach_status : 0 if currently inside fence, 1 if outside (uint8_t)
breach_count : number of fence breaches (uint16_t)
breach_type : last breach type (see FENCE_BREACH_* enum) (uint8_t)
breach_time : time of last breach in milliseconds since boot (uint32_t) |
386,173 | def add(self, *l):
for a in flatten(l):
self._add([self.Inner(a)], self.l) | add inner to outer
Args:
*l: element that is passed into Inner init |
386,174 | def save(self,callit="misc",closeToo=True,fullpath=False):
if fullpath is False:
fname=self.abf.outPre+"plot_"+callit+".jpg"
else:
fname=callit
if not os.path.exists(os.path.dirname(fname)):
os.mkdir(os.path.dirname(fname))
plt.savefig(fname)
self.log.info("saved [%s]",os.path.basename(fname))
if closeToo:
plt.close() | save the existing figure. does not close it. |
386,175 | def run_plugins(context_obj, boto3_clients):
def print_if_verbose(message):
if context_obj.verbose:
print(message)
service_name = os.path.basename(sys.argv[0]).replace(".py", "")
try:
import plugins
except ImportError:
print_if_verbose("no plugins detected.")
return
else:
for plugin_importer, plugin_name, plugin_ispkg in pkgutil.iter_modules(plugins.__path__):
if plugin_ispkg:
plugin_package = importlib.import_module("plugins.{}".format(plugin_name))
for importer, modname, ispkg in pkgutil.iter_modules(plugin_package.__path__):
plugin_module = importlib.import_module("plugins.{}.{}".format(plugin_name, modname))
for name, obj in inspect.getmembers(plugin_module):
if inspect.isclass(obj) and obj.__name__ == "EFPlugin":
plugin_class = getattr(plugin_module, name)
plugin_instance = plugin_class(context=context_obj, clients=boto3_clients)
if plugin_instance.service == service_name:
print_if_verbose("plugin loaded".format(plugin_name))
if not context_obj.commit:
print_if_verbose("dryrun: skipping plugin execution.")
else:
try:
plugin_instance.run()
except AttributeError:
print("error executing plugin ".format(modname)) | Executes all loaded plugins designated for the service calling the function.
Args:
context_obj (obj:EFContext): The EFContext object created by the service.
boto3_clients (dict): Dictionary of boto3 clients created by ef_utils.create_aws_clients() |
386,176 | def delete_userpass(self, username, mount_point=):
return self._adapter.delete(.format(mount_point, username)) | DELETE /auth/<mount point>/users/<username>
:param username:
:type username:
:param mount_point:
:type mount_point:
:return:
:rtype: |
386,177 | def setup_tree(ctx, verbose=None, root=None, tree_dir=None, modules_dir=None):
print()
ctx.run(.format(tree_dir, root, modules_dir)) | Sets up the SDSS tree enviroment |
386,178 | def GetFieldValuesTuple(self, trip_id):
result = []
for fn in self._FIELD_NAMES:
if fn == :
result.append(trip_id)
else:
return tuple(result) | Return a tuple that outputs a row of _FIELD_NAMES to be written to a
GTFS file.
Arguments:
trip_id: The trip_id of the trip to which this StopTime corresponds.
It must be provided, as it is not stored in StopTime. |
386,179 | def FrameworkDir32(self):
guess_fw = os.path.join(self.WinDir, r)
return self.ri.lookup(self.ri.vc, ) or guess_fw | Microsoft .NET Framework 32bit directory. |
386,180 | def getTypeWidth(self, dtype: HdlType, do_eval=False) -> Tuple[int, Union[int, RtlSignal], bool]:
width = dtype.width
if isinstance(width, int):
widthStr = str(width)
else:
widthStr = self.getExprVal(width, do_eval=do_eval)
return width, widthStr, False | :see: doc of method on parent class |
386,181 | def get_source_lane(fork_process, pipeline_list):
fork_source = fork_process[-1]
fork_sig = [x for x in fork_process if x != "__init__"]
for position, p in enumerate(pipeline_list[::-1]):
if p["output"]["process"] == fork_source:
lane = p["output"]["lane"]
logger.debug("Possible source match found in position {} in lane"
" {}".format(position, lane))
lane_sequence = [x["output"]["process"] for x in pipeline_list
if x["output"]["lane"] == lane]
logger.debug("Testing lane sequence against fork signature"
" ".format(lane_sequence, fork_sig))
if lane_sequence == fork_sig:
return p["output"]["lane"]
return 0 | Returns the lane of the last process that matches fork_process
Parameters
----------
fork_process : list
List of processes before the fork.
pipeline_list : list
List with the pipeline connection dictionaries.
Returns
-------
int
Lane of the last process that matches fork_process |
386,182 | def _nick(self, nick):
with self.lock:
self.send( % nick)
if self.readable():
msg = self._recv(expected_replies=)
if msg[0] == :
if not self.hide_called_events:
self.stepback()
for channel in self.channels:
if in self.channels[channel]:
priv_level = \
self.channels[channel][][self.current_nick]
del self.channels[channel][][self.current_nick]
self.channels[channel][][nick] = priv_level
self.current_nick = nick | Sets your nick.
Required arguments:
* nick - New nick. |
386,183 | def list_lbaas_loadbalancers(self, retrieve_all=True, **_params):
return self.list(, self.lbaas_loadbalancers_path,
retrieve_all, **_params) | Fetches a list of all lbaas_loadbalancers for a project. |
386,184 | def get_nameserver_detail_output_show_nameserver_nameserver_cascaded(self, **kwargs):
config = ET.Element("config")
get_nameserver_detail = ET.Element("get_nameserver_detail")
config = get_nameserver_detail
output = ET.SubElement(get_nameserver_detail, "output")
show_nameserver = ET.SubElement(output, "show-nameserver")
nameserver_portid_key = ET.SubElement(show_nameserver, "nameserver-portid")
nameserver_portid_key.text = kwargs.pop()
nameserver_cascaded = ET.SubElement(show_nameserver, "nameserver-cascaded")
nameserver_cascaded.text = kwargs.pop()
callback = kwargs.pop(, self._callback)
return callback(config) | Auto Generated Code |
386,185 | def greenhall_sz(t, F, alpha, d):
if d == 1:
a = 2*greenhall_sx(t, F, alpha)
b = greenhall_sx(t-1.0, F, alpha)
c = greenhall_sx(t+1.0, F, alpha)
return a-b-c
elif d == 2:
a = 6*greenhall_sx(t, F, alpha)
b = 4*greenhall_sx(t-1.0, F, alpha)
c = 4*greenhall_sx(t+1.0, F, alpha)
dd = greenhall_sx(t-2.0, F, alpha)
e = greenhall_sx(t+2.0, F, alpha)
return a-b-c+dd+e
elif d == 3:
a = 20.0*greenhall_sx(t, F, alpha)
b = 15.0*greenhall_sx(t-1.0, F, alpha)
c = 15.0*greenhall_sx(t+1.0, F, alpha)
dd = 6.0*greenhall_sx(t-2.0, F, alpha)
e = 6.0*greenhall_sx(t+2.0, F, alpha)
f = greenhall_sx(t-3.0, F, alpha)
g = greenhall_sx(t+3.0, F, alpha)
return a-b-c+dd+e-f-g
assert(0) | Eqn (9) from Greenhall2004 |
386,186 | def validate(method):
name_error =
@functools.wraps(method)
def validator(self, name, *args):
if name not in self.allowed_opts:
raise ValueError(name_error.format(name))
return method(self, name, *args)
return validator | Config option name value validator decorator. |
386,187 | def main():
for flag in (,):
if flag not in sys.argv:
sys.argv.append(flag)
obj = PlaybookCLI(sys.argv)
obj.parse()
obj.run() | Run playbook |
386,188 | def codestr2rst(codestr, lang=):
code_directive = "\n.. code-block:: {0}\n\n".format(lang)
indented_block = indent(codestr, * 4)
return code_directive + indented_block | Return reStructuredText code block from code string |
386,189 | def p_statement_border(p):
p[0] = make_sentence(,
make_typecast(TYPE.ubyte, p[2], p.lineno(1))) | statement : BORDER expr |
386,190 | def auth(self, username, password):
binddn = self._get_user(self._byte_p2(username), NO_ATTR)
if binddn is not None:
ldap_client = self._connect()
try:
ldap_client.simple_bind_s(
self._byte_p2(binddn),
self._byte_p2(password)
)
except ldap.INVALID_CREDENTIALS:
ldap_client.unbind_s()
return False
ldap_client.unbind_s()
return True
else:
return False | Authentication of a user |
386,191 | def _currentLineExtraSelections(self):
if self._currentLineColor is None:
return []
def makeSelection(cursor):
selection = QTextEdit.ExtraSelection()
selection.format.setBackground(self._currentLineColor)
selection.format.setProperty(QTextFormat.FullWidthSelection, True)
cursor.clearSelection()
selection.cursor = cursor
return selection
rectangularSelectionCursors = self._rectangularSelection.cursors()
if rectangularSelectionCursors:
return [makeSelection(cursor) \
for cursor in rectangularSelectionCursors]
else:
return [makeSelection(self.textCursor())] | QTextEdit.ExtraSelection, which highlightes current line |
386,192 | def starlike(x):
"weird things happen to cardinality when working with * in comma-lists. this detects when to do that."
return isinstance(x,sqparse2.AsterX) or isinstance(x,sqparse2.AttrX) and isinstance(x.attr,sqparse2.AsterX) | weird things happen to cardinality when working with * in comma-lists. this detects when to do that. |
386,193 | def map_list(key_map, *inputs, copy=False, base=None):
d = {} if base is None else base
for m, v in zip(key_map, inputs):
if isinstance(m, dict):
map_dict(m, v, base=d)
elif isinstance(m, list):
map_list(m, *v, base=d)
else:
d[m] = v
return combine_dicts(copy=copy, base=d) | Returns a new dict.
:param key_map:
A list that maps the dict keys ({old key: new key}
:type key_map: list[str | dict | list]
:param inputs:
A sequence of data.
:type inputs: iterable | dict | int | float | list | tuple
:param copy:
If True, it returns a deepcopy of input values.
:type copy: bool, optional
:param base:
Base dict where combine multiple dicts in one.
:type base: dict, optional
:return:
A unique dict with new values.
:rtype: dict
Example::
>>> key_map = [
... 'a',
... {'a': 'c'},
... [
... 'a',
... {'a': 'd'}
... ]
... ]
>>> inputs = (
... 2,
... {'a': 3, 'b': 2},
... [
... 1,
... {'a': 4}
... ]
... )
>>> d = map_list(key_map, *inputs)
>>> sorted(d.items())
[('a', 1), ('b', 2), ('c', 3), ('d', 4)] |
386,194 | def partsphere(self, x):
self.counter += 1
dim = len(x)
x = array([x[i % dim] for i in xrange(2 * dim)])
N = 8
i = self.counter % dim
f = sum(x[np.random.randint(dim, size=N)]**2)
return f | Sphere (squared norm) test objective function |
386,195 | def create(self, **kwargs):
auth = None
if in kwargs:
auth = kwargs[]
del(kwargs[])
self._url = % (self.base_uri, , self.signature_id)
return self.embedded.sign_url.get(auth=auth, **kwargs) | returns the JSON object
{'embedded': {
'sign_url': 'https://www.hellosign.com/editor/embeddedSign?signature_id={signature_id}&token={token}',
'expires_at': {timestamp}
}} |
386,196 | def safe_dump_pk(obj, abspath, pk_protocol=pk_protocol, compress=False,
enable_verbose=True):
abspath = str(abspath)
temp_abspath = "%s.tmp" % abspath
dump_pk(obj, temp_abspath, pk_protocol=pk_protocol,
replace=True, compress=compress, enable_verbose=enable_verbose)
shutil.move(temp_abspath, abspath) | A stable version of dump_pk, silently overwrite existing file.
When your program been interrupted, you lose nothing. Typically if your
program is interrupted by any reason, it only leaves a incomplete file.
If you use replace=True, then you also lose your old file.
So a bettr way is to:
1. dump pickle to a temp file.
2. when it's done, rename it to #abspath, overwrite the old one.
This way guarantee atomic write.
:param obj: Picklable Python Object.
:param abspath: ``save as`` path, file extension has to be ``.pickle`` or
``.gz`` (for compressed Pickle).
:type abspath: string
:param pk_protocol: (default your python version) use 2, to make a
py2.x/3.x compatible pickle file. But 3 is faster.
:type pk_protocol: int
:param compress: (default False) If ``True``, use GNU program gzip to
compress the Pickle file. Disk usage can be greatly reduced. But you
have to use :func:`load_pk(abspath, compress=True)<load_pk>` in loading.
:type compress: boolean
:param enable_verbose: (default True) Trigger for message.
:type enable_verbose: boolean
Usage::
>>> from weatherlab.lib.dataIO.pk import safe_dump_pk
>>> pk = {"a": 1, "b": 2}
>>> safe_dump_pk(pk, "test.pickle")
Dumping to test.pickle...
Complete! Elapse 0.001763 sec
**中文文档**
在对文件进行写入时, 如果程序中断, 则会留下一个不完整的文件。如果你使用了覆盖式
写入, 则你同时也丢失了原文件。所以为了保证写操作的原子性(要么全部完成, 要么全部
都不完成), 更好的方法是: 首先将文件写入一个临时文件中, 完成后再讲文件重命名,
覆盖旧文件。这样即使中途程序被中断, 也仅仅是留下了一个未完成的临时文件而已, 不会
影响原文件。
参数列表
:param obj: 可Pickle化的Python对象
:param abspath: 写入文件的路径。扩展名必须为 ``.pickle`` 或 ``.gz`` , 其中gz用于被压
缩的Pickle
:type abspath: ``字符串``
:param pk_protocol: (默认 等于你Python的大版本号) 使用2可以使得保存的文件能被
py2.x/3.x都能读取。但是协议3的速度更快, 体积更小, 性能更高。
:type pk_protocol: ``整数``
:param compress: (默认 False) 当为 ``True`` 时, 使用开源压缩标准gzip压缩Pickle文件。
通常能让文件大小缩小10-20倍不等。如要读取文件, 则需要使用函数
:func:`load_pk(abspath, compress=True)<load_pk>`.
:type compress: ``布尔值``
:param enable_verbose: (默认 True) 是否打开信息提示开关, 批处理时建议关闭.
:type enable_verbose: ``布尔值`` |
386,197 | def get_firewall_rule(self, datacenter_id,
server_id, nic_id, firewall_rule_id):
response = self._perform_request(
% (
datacenter_id,
server_id,
nic_id,
firewall_rule_id))
return response | Retrieves a single firewall rule by ID.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param server_id: The unique ID of the server.
:type server_id: ``str``
:param nic_id: The unique ID of the NIC.
:type nic_id: ``str``
:param firewall_rule_id: The unique ID of the firewall rule.
:type firewall_rule_id: ``str`` |
386,198 | def dug(obj, key, value):
array = key.split(".")
return _dug(obj, value, *array) | Inverse of dig: recursively set a value in a dictionary, using
dot notation.
>>> test = {"a":{"b":{"c":1}}}
>>> dug(test, "a.b.c", 10)
>>> test
{'a': {'b': {'c': 10}}} |
386,199 | def enable_argscope_for_module(module, log_shape=True):
if is_tfv2() and module == tf.layers:
module = tf.compat.v1.layers
for name, obj in getmembers(module):
if isfunction(obj):
setattr(module, name, enable_argscope_for_function(obj,
log_shape=log_shape)) | Overwrite all functions of a given module to support argscope.
Note that this function monkey-patches the module and therefore could
have unexpected consequences.
It has been only tested to work well with ``tf.layers`` module.
Example:
.. code-block:: python
import tensorflow as tf
enable_argscope_for_module(tf.layers)
Args:
log_shape (bool): print input/output shapes of each function. |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.