Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
377,500 | def GetDomain(self):
return (self.knots[self.degree - 1],
self.knots[len(self.knots) - self.degree]) | Returns the domain of the B-Spline |
377,501 | def DEFINE_choice(self, name, default, choices, help, constant=False):
self.AddOption(
type_info.Choice(
name=name, default=default, choices=choices, description=help),
constant=constant) | A helper for defining choice string options. |
377,502 | def connectionLost(self, reason):
try:
self.connected = False
if debug:
print(self.log_entry("CLOSED =", "none"))
t = time.time()
if time.time() - self.factory.last_cleanup >= self.cleanup:
self.factory.last_cleanup = t
old_node_ips = []
for node_ip in list(self.factory.nodes["passive"]):
passive_node = self.factory.nodes["passive"][node_ip]
if t - passive_node["time"] >= self.node_lifetime:
old_node_ips.append(node_ip)
for node_ip in old_node_ips:
del self.factory.nodes["passive"][node_ip]
old_node_ips = []
for node_ip in list(self.factory.nodes["simultaneous"]):
simultaneous_node =\
self.factory.nodes["simultaneous"][node_ip]
if t - simultaneous_node["time"] >= self.node_lifetime:
old_node_ips.append(node_ip)
for node_ip in old_node_ips:
del self.factory.nodes["simultaneous"][node_ip]
old_node_ips = []
for node_ip in list(self.factory.candidates):
old_candidates = []
for candidate in self.factory.candidates[node_ip]:
if node_ip not in self.factory.nodes["simultaneous"] \
and t - candidate["time"] >= self.challenge_timeout * 5:
old_candidates.append(candidate)
for candidate in old_candidates:
self.factory.candidates[node_ip].remove(candidate)
if not len(self.factory.candidates[node_ip]) and \
node_ip not in self.factory.nodes["simultaneous"]:
old_node_ips.append(node_ip)
for node_ip in old_node_ips:
del self.factory.candidates[node_ip]
except Exception as e:
error = parse_exception(e)
log_exception(error_log_path, error)
print(self.log_entry("ERROR =", error)) | Mostly handles clean-up of node + candidate structures.
Avoids memory exhaustion for a large number of connections. |
377,503 | def consume_token(self, tokens, index, tokens_len):
del tokens_len
if tokens[index].type == TokenType.EndInlineRST:
return _paste_tokens_line_by_line(tokens,
TokenType.RST,
self.begin,
index + 1) | Consume a token.
Returns a tuple of (tokens, tokens_len, index) when consumption is
completed and tokens have been merged together. |
377,504 | def boolean_flag(parser, name, default=False, help=None):
dest = name.replace(, )
parser.add_argument("--" + name, action="store_true", default=default, dest=dest, help=help)
parser.add_argument("--no-" + name, action="store_false", dest=dest) | Add a boolean flag to argparse parser.
Parameters
----------
parser: argparse.Parser
parser to add the flag to
name: str
--<name> will enable the flag, while --no-<name> will disable it
default: bool or None
default value of the flag
help: str
help string for the flag |
377,505 | def del_permission_role(self, role, perm_view):
if perm_view in role.permissions:
try:
role.permissions.remove(perm_view)
self.get_session.merge(role)
self.get_session.commit()
log.info(
c.LOGMSG_INF_SEC_DEL_PERMROLE.format(str(perm_view), role.name)
)
except Exception as e:
log.error(c.LOGMSG_ERR_SEC_DEL_PERMROLE.format(str(e)))
self.get_session.rollback() | Remove permission-ViewMenu object to Role
:param role:
The role object
:param perm_view:
The PermissionViewMenu object |
377,506 | def worker_id(self):
if self._worker_id is not None: return self._worker_id
return self._get_worker_id(self._conn()) | A unique identifier for this queue instance and the items it owns. |
377,507 | def one(self, filter_by=None):
return Queryset(self, records=self._records.values()).one(filter_by=filter_by) | Parameters
----------
filter_by: callable, default None
Callable must take one argument (a record of table), and return True to keep record, or False to skip it.
Example : .one(lambda x: x.name == "my_name").
If None, records are not filtered.
Returns
-------
Record instance if one and only one record is found. Else raises.
Raises
------
RecordDoesNotExistError if no record is found
MultipleRecordsReturnedError if multiple records are found |
377,508 | def gradient(self, ts):
gradient = self._jmodel.gradient(_py2java(self._ctx, Vectors.dense(ts)))
return _java2py(self._ctx, gradient) | Find the gradient of the log likelihood with respect to the given time series.
Based on http://www.unc.edu/~jbhill/Bollerslev_GARCH_1986.pdf
Returns an 3-element array containing the gradient for the alpha, beta, and omega parameters. |
377,509 | def quantity(*args):
if len(args) == 1:
if isinstance(args[0], str):
return Quantity(from_string(args[0]))
elif isinstance(args[0], dict):
if hasattr(args[0]["value"], "__len__"):
return QuantVec(from_dict_v(args[0]))
else:
return Quantity(from_dict(args[0]))
elif isinstance(args[0], Quantity) or isinstance(args[0], QuantVec):
return args[0]
else:
raise TypeError("Invalid argument type for")
else:
if hasattr(args[0], "__len__"):
return QuantVec(*args)
else:
return Quantity(*args) | Create a quantity. This can be from a scalar or vector.
Example::
q1 = quantity(1.0, "km/s")
q2 = quantity("1km/s")
q1 = quantity([1.0,2.0], "km/s") |
377,510 | def load_and_init(self, modules):
self.load(modules)
self.get_instances()
return len(self.configuration_errors) == 0 | Import, instantiate & "init" the modules we manage
:param modules: list of the managed modules
:return: True if no errors |
377,511 | def register_hook(self, hook_name, fn):
if hook_name not in self._hooks:
self._hooks[hook_name] = fn
else:
raise Exception( % hook_name) | Register a function to be called on a GitHub event. |
377,512 | def _create_prelim(self):
self._verify(self.payload)
if "key" in self.payload[0] and self.payload[0]["key"]:
if next((i for i in self.payload if "key" not in i), False):
raise ze.UnsupportedParams(
"Cant do anything if payload comes with keys
liblevel = "/{t}/{u}/items"
headers = {"Zotero-Write-Token": token(), "Content-Type": "application/json"}
headers.update(self.zinstance.default_headers())
if self.parentid:
for child in self.payload:
child["parentItem"] = self.parentid
to_send = json.dumps(self.payload)
req = requests.post(
url=self.zinstance.endpoint
+ liblevel.format(
t=self.zinstance.library_type, u=self.zinstance.library_id
),
data=to_send,
headers=headers,
)
try:
req.raise_for_status()
except requests.exceptions.HTTPError:
error_handler(req)
data = req.json()
for k in data["success"]:
self.payload[int(k)]["key"] = data["success"][k]
return data | Step 0: Register intent to upload files |
377,513 | def clear_feature(dev, feature, recipient = None):
r
if feature == ENDPOINT_HALT:
dev.clear_halt(recipient)
else:
bmRequestType, wIndex = _parse_recipient(recipient, util.CTRL_OUT)
dev.ctrl_transfer(bmRequestType = bmRequestType,
bRequest = 0x01,
wIndex = wIndex,
wValue = feature) | r"""Clear/disable a specific feature.
dev is the Device object to which the request will be
sent to.
feature is the feature you want to disable.
The recipient can be None (on which the status will be queried
from the device), an Interface or Endpoint descriptors. |
377,514 | def check_ace(path, objectType, user, permission=None, acetype=None, propagation=None, exactPermissionMatch=False):
Existsminion-id
ret = {: False,
: False,
: }
dc = daclConstants()
objectTypeBit = dc.getObjectTypeBit(objectType)
path = dc.processPath(path, objectTypeBit)
permission = permission.upper() if permission else None
acetype = acetype.upper() if permission else None
propagation = propagation.upper() if propagation else None
permissionbit = dc.getPermissionBit(objectTypeBit, permission) if permission else None
acetypebit = dc.getAceTypeBit(acetype) if acetype else None
propagationbit = dc.getPropagationBit(objectTypeBit, propagation) if propagation else None
sidRet = _getUserSid(user)
if not sidRet[]:
return sidRet
dacls = _get_dacl(path, objectTypeBit)
ret[] = True
if dacls:
for counter in range(0, dacls.GetAceCount()):
ace = dacls.GetAce(counter)
if ace[2] == sidRet[]:
if not acetypebit or ace[0][0] == acetypebit:
if not propagationbit or (ace[0][1] & propagationbit) == propagationbit:
if not permissionbit:
ret[] = True
return ret
if exactPermissionMatch:
if ace[1] == permissionbit:
ret[] = True
return ret
else:
if (ace[1] & permissionbit) == permissionbit:
ret[] = True
return ret
else:
ret[] =
return ret | Checks a path to verify the ACE (access control entry) specified exists
Args:
path: path to the file/reg key
objectType: The type of object (FILE, DIRECTORY, REGISTRY)
user: user that the ACL is for
permission: permission to test for (READ, FULLCONTROL, etc)
acetype: the type of ACE (ALLOW or DENY)
propagation: the propagation type of the ACE (FILES, FOLDERS, KEY, KEY&SUBKEYS, SUBKEYS, etc)
exactPermissionMatch: the ACL must match exactly, IE if READ is specified, the user must have READ exactly and not FULLCONTROL (which also has the READ permission obviously)
Returns (dict): 'Exists' true if the ACE exists, false if it does not
CLI Example:
.. code-block:: bash
salt 'minion-id' win_dacl.check_ace c:\temp directory <username> fullcontrol |
377,515 | def plot(args):
from jcvi.graphics.base import savefig
p = OptionParser(plot.__doc__)
opts, args, iopts = p.set_image_options(args, figsize="8x7", format="png")
if len(args) != 3:
sys.exit(not p.print_help())
workdir, sample_key, chrs = args
chrs = chrs.split(",")
hmm = CopyNumberHMM(workdir=workdir)
hmm.plot(sample_key, chrs=chrs)
image_name = sample_key + "_cn." + iopts.format
savefig(image_name, dpi=iopts.dpi, iopts=iopts) | %prog plot workdir sample chr1,chr2
Plot some chromosomes for visual proof. Separate multiple chromosomes with
comma. Must contain folder workdir/sample-cn/. |
377,516 | def construct_channel(self, *args, **kwargs):
channel = self.get_channel(*args, **kwargs)
_build_tree(channel, SAMPLE_TREE)
raise_for_invalid_channel(channel)
return channel | Create ChannelNode and build topic tree. |
377,517 | def get_transition_viewset_method(transition_name, **kwargs):
@detail_route(methods=[], **kwargs)
def inner_func(self, request, pk=None, **kwargs):
object = self.get_object()
transition_method = getattr(object, transition_name)
transition_method(by=self.request.user)
if self.save_after_transition:
object.save()
serializer = self.get_serializer(object)
return Response(serializer.data)
return inner_func | Create a viewset method for the provided `transition_name` |
377,518 | def _wrap_layer(name, input_layer, build_func, dropout_rate=0.0, trainable=True):
build_output = build_func(input_layer)
if dropout_rate > 0.0:
dropout_layer = keras.layers.Dropout(
rate=dropout_rate,
name= % name,
)(build_output)
else:
dropout_layer = build_output
if isinstance(input_layer, list):
input_layer = input_layer[0]
add_layer = keras.layers.Add(name= % name)([input_layer, dropout_layer])
normal_layer = LayerNormalization(
trainable=trainable,
name= % name,
)(add_layer)
return normal_layer | Wrap layers with residual, normalization and dropout.
:param name: Prefix of names for internal layers.
:param input_layer: Input layer.
:param build_func: A callable that takes the input tensor and generates the output tensor.
:param dropout_rate: Dropout rate.
:param trainable: Whether the layers are trainable.
:return: Output layer. |
377,519 | def get(self, specification, *args, **kwargs):
arguments = dict(enumerate(args))
arguments.update(kwargs)
return self.acquire(specification, arguments=arguments) | A more convenient version of :py:meth:`acquire()` for when you can
provide positional arguments in a right order. |
377,520 | def contour_mask(self, contour):
new_data = np.zeros(self.data.shape)
num_boundary = contour.boundary_pixels.shape[0]
boundary_px_ij_swapped = np.zeros([num_boundary, 1, 2])
boundary_px_ij_swapped[:, 0, 0] = contour.boundary_pixels[:, 1]
boundary_px_ij_swapped[:, 0, 1] = contour.boundary_pixels[:, 0]
cv2.fillPoly(
new_data, pts=[
boundary_px_ij_swapped.astype(
np.int32)], color=(
BINARY_IM_MAX_VAL, BINARY_IM_MAX_VAL, BINARY_IM_MAX_VAL))
orig_zeros = np.where(self.data == 0)
new_data[orig_zeros[0], orig_zeros[1]] = 0
return BinaryImage(new_data.astype(np.uint8), frame=self._frame) | Generates a binary image with only the given contour filled in. |
377,521 | def cmServiceAbort():
a = TpPd(pd=0x5)
b = MessageType(mesType=0x23)
packet = a / b
return packet | CM SERVICE ABORT Section 9.2.7 |
377,522 | def step_command_output_should_not_contain_log_records(context):
assert context.table, "REQUIRE: context.table"
context.table.require_columns(["category", "level", "message"])
format = getattr(context, "log_record_format", context.config.logging_format)
for row in context.table.rows:
output = LogRecordTable.make_output_for_row(row, format)
context.execute_steps(u.format(expected_output=output)) | Verifies that the command output contains the specified log records
(in any order).
.. code-block: gherkin
Then the command output should contain the following log records:
| category | level | message |
| bar | CURRENT | xxx | |
377,523 | def OnButtonCell(self, event):
if self.button_cell_button_id == event.GetId():
if event.IsChecked():
label = self._get_button_label()
post_command_event(self, self.ButtonCellMsg, text=label)
else:
post_command_event(self, self.ButtonCellMsg, text=False)
event.Skip() | Event handler for cell button toggle button |
377,524 | def kappa_se_calc(PA, PE, POP):
try:
result = math.sqrt((PA * (1 - PA)) / (POP * ((1 - PE)**2)))
return result
except Exception:
return "None" | Calculate kappa standard error.
:param PA: observed agreement among raters (overall accuracy)
:type PA : float
:param PE: hypothetical probability of chance agreement (random accuracy)
:type PE : float
:param POP: population
:type POP:int
:return: kappa standard error as float |
377,525 | def syslog(server, enable=True):
if enable and __execute_cmd():
return __execute_cmd(.format(server))
return __execute_cmd() | Configure syslog remote logging, by default syslog will automatically be
enabled if a server is specified. However, if you want to disable syslog
you will need to specify a server followed by False
CLI Example:
.. code-block:: bash
salt dell drac.syslog [SYSLOG IP] [ENABLE/DISABLE]
salt dell drac.syslog 0.0.0.0 False |
377,526 | def Lewis(D=None, alpha=None, Cp=None, k=None, rho=None):
rs Chemical Engineers
if k and Cp and rho:
alpha = k/(rho*Cp)
elif alpha:
pass
else:
raise Exception()
return alpha/D | r'''Calculates Lewis number or `Le` for a fluid with the given parameters.
.. math::
Le = \frac{k}{\rho C_p D} = \frac{\alpha}{D}
Inputs can be either of the following sets:
* Diffusivity and Thermal diffusivity
* Diffusivity, heat capacity, thermal conductivity, and density
Parameters
----------
D : float
Diffusivity of a species, [m^2/s]
alpha : float, optional
Thermal diffusivity, [m^2/s]
Cp : float, optional
Heat capacity, [J/kg/K]
k : float, optional
Thermal conductivity, [W/m/K]
rho : float, optional
Density, [kg/m^3]
Returns
-------
Le : float
Lewis number []
Notes
-----
.. math::
Le=\frac{\text{Thermal diffusivity}}{\text{Mass diffusivity}} =
\frac{Sc}{Pr}
An error is raised if none of the required input sets are provided.
Examples
--------
>>> Lewis(D=22.6E-6, alpha=19.1E-6)
0.8451327433628318
>>> Lewis(D=22.6E-6, rho=800., k=.2, Cp=2200)
0.00502815768302494
References
----------
.. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook,
Eighth Edition. McGraw-Hill Professional, 2007.
.. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and
Applications. Boston: McGraw Hill Higher Education, 2006.
.. [3] Gesellschaft, V. D. I., ed. VDI Heat Atlas. 2nd edition.
Berlin; New York:: Springer, 2010. |
377,527 | def parse_media_type(media_type):
AcceptContent-Type
media_type, sep, parameter = str(media_type).partition()
media_type, sep, subtype = media_type.partition()
return tuple(x.strip() or None for x in (media_type, subtype, parameter)) | Returns type, subtype, parameter tuple from an http media_type.
Can be applied to the 'Accept' or 'Content-Type' http header fields. |
377,528 | def sample_indexes_by_sequence(indexes, sequence):
N = len(sequence)
res = np.zeros((N,2), dtype=int)
for t in range(N):
s = sequence[t]
i = np.random.randint(indexes[s].shape[0])
res[t,:] = indexes[s][i,:]
return res | Samples trajectory/time indexes according to the given sequence of states
Parameters
----------
indexes : list of ndarray( (N_i, 2) )
For each state, all trajectory and time indexes where this state occurs.
Each matrix has a number of rows equal to the number of occurrences of the corresponding state,
with rows consisting of a tuple (i, t), where i is the index of the trajectory and t is the time index
within the trajectory.
sequence : array of integers
A sequence of discrete states. For each state, a trajectory/time index will be sampled at which dtrajs
have an occurrences of this state
Returns
-------
indexes : ndarray( (N, 2) )
The sampled index sequence.
Index array with a number of rows equal to N=len(sequence), with rows consisting of a tuple (i, t),
where i is the index of the trajectory and t is the time index within the trajectory. |
377,529 | def receive(self):
try:
msg, addr = self.skt.recvfrom(self.buffer_size)
except socket.error as error:
log.error(, error, exc_info=True)
raise ListenerException(error)
log.debug(, msg, addr, time.time())
return msg, addr[0] | Return the message received and the address. |
377,530 | def get_segmentize_value(input_file=None, tile_pyramid=None):
warnings.warn(DeprecationWarning("get_segmentize_value() has moved to mapchete.io"))
return io.get_segmentize_value(input_file, tile_pyramid) | Return the recommended segmentation value in input file units.
It is calculated by multiplyling raster pixel size with tile shape in
pixels.
Parameters
----------
input_file : str
location of a file readable by rasterio
tile_pyramied : ``TilePyramid`` or ``BufferedTilePyramid``
tile pyramid to estimate target tile size
Returns
-------
segmenize value : float
length suggested of line segmentation to reproject file bounds |
377,531 | def main(global_config, **settings):
if sys.version_info[0] < 3:
reload(sys)
sys.setdefaultencoding()
settings = dict(settings)
create_engine(settings, scoped=True)
authn_policy = RouteSwitchAuthPolicy(secret=settings[],
callback=groupfinder)
authz_policy = ACLPolicy()
route_prefix = settings.get()
config = Configurator(settings=settings,
root_factory=RootFactory,
route_prefix=route_prefix,
locale_negotiator=locale_negotiator,
authentication_policy=authn_policy,
authorization_policy=authz_policy)
config.end()
return config.make_wsgi_app() | Get a PyShop WSGI application configured with settings. |
377,532 | def clear_messages(self):
while len(self._messages):
msg = self._messages.pop(0)
usd = msg.block.userData()
if usd and hasattr(usd, ):
usd.messages[:] = []
if msg.decoration:
self.editor.decorations.remove(msg.decoration) | Clears all messages. |
377,533 | def p_block_replace(self, p):
m = p[1].parse(None)
block = self.scope.blocks(m.raw())
if block:
p[0] = block.copy_inner(self.scope)
else:
p[0] = Deferred(p[1], None, p.lineno(2)) | block_decl : identifier t_semicolon |
377,534 | def model_to_owl(model, fname):
io_class = autoclass()
io = io_class(autoclass().L3)
try:
fileOS = autoclass()(fname)
except JavaException:
logger.error( % fname)
return
l3_factory = autoclass().L3.getDefaultFactory()
model_out = l3_factory.createModel()
for r in model.getObjects().toArray():
model_out.add(r)
io.convertToOWL(model_out, fileOS)
fileOS.close() | Save a BioPAX model object as an OWL file.
Parameters
----------
model : org.biopax.paxtools.model.Model
A BioPAX model object (java object).
fname : str
The name of the OWL file to save the model in. |
377,535 | def create_notification_plan(self, label=None, name=None,
critical_state=None, ok_state=None, warning_state=None):
return self._notification_plan_manager.create(label=label, name=name,
critical_state=critical_state, ok_state=ok_state,
warning_state=warning_state) | Creates a notification plan to be executed when a monitoring check
triggers an alarm. |
377,536 | def get_root_subject(self):
manifest = URIRef(self.manifest)
if list(self.rdf.triples((manifest, None, None))):
return manifest
else:
return self.rdf.subjects(None, self.manifest).next() | Returns the BNode which describes the topmost subject of the graph. |
377,537 | def config_absent(name):
name = name.lower()
ret = {: name,
: {},
: None,
: }
config = _load_config()
if name in config:
ret[] = True
ret[] = .format(name)
ret[][name] = None
del config[name]
else:
return ret | Ensure configuration property is absent in /usbkey/config
name : string
name of property |
377,538 | def _getfunctionlist(self):
try:
eventhandler = self.obj.__eventhandler__
except AttributeError:
eventhandler = self.obj.__eventhandler__ = {}
return eventhandler.setdefault(self.event, []) | (internal use) |
377,539 | def vcenter_interval(self, **kwargs):
config = ET.Element("config")
vcenter = ET.SubElement(config, "vcenter", xmlns="urn:brocade.com:mgmt:brocade-vswitch")
id_key = ET.SubElement(vcenter, "id")
id_key.text = kwargs.pop()
interval = ET.SubElement(vcenter, "interval")
interval.text = kwargs.pop()
callback = kwargs.pop(, self._callback)
return callback(config) | Auto Generated Code |
377,540 | def initialize_concept_scheme(rdf, cs, label, language, set_modified):
labels = list(rdf.objects(cs, RDFS.label)) + \
list(rdf.objects(cs, SKOS.prefLabel))
if len(labels) == 0:
if not label:
logging.warning(
"Concept scheme has no label(s). "
"Use --label option to set the concept scheme label.")
else:
logging.info(
"Unlabeled concept scheme detected. Setting label to " %
label)
rdf.add((cs, RDFS.label, Literal(label, language)))
if set_modified:
curdate = datetime.datetime.utcnow().replace(microsecond=0).isoformat() +
rdf.remove((cs, DCTERMS.modified, None))
rdf.add((cs, DCTERMS.modified, Literal(curdate, datatype=XSD.dateTime))) | Initialize a concept scheme: Optionally add a label if the concept
scheme doesn't have a label, and optionally add a dct:modified
timestamp. |
377,541 | def validate_block(self, block: BaseBlock) -> None:
if not isinstance(block, self.get_block_class()):
raise ValidationError(
"This vm ({0!r}) is not equipped to validate a block of type {1!r}".format(
self,
block,
)
)
if block.is_genesis:
validate_length_lte(block.header.extra_data, 32, title="BlockHeader.extra_data")
else:
parent_header = get_parent_header(block.header, self.chaindb)
self.validate_header(block.header, parent_header)
tx_root_hash, _ = make_trie_root_and_nodes(block.transactions)
if tx_root_hash != block.header.transaction_root:
raise ValidationError(
"Block's transaction_root ({0}) does not match expected value: {1}".format(
block.header.transaction_root, tx_root_hash))
if len(block.uncles) > MAX_UNCLES:
raise ValidationError(
"Blocks may have a maximum of {0} uncles. Found "
"{1}.".format(MAX_UNCLES, len(block.uncles))
)
if not self.chaindb.exists(block.header.state_root):
raise ValidationError(
"`state_root` was not found in the db.\n"
"- state_root: {0}".format(
block.header.state_root,
)
)
local_uncle_hash = keccak(rlp.encode(block.uncles))
if local_uncle_hash != block.header.uncles_hash:
raise ValidationError(
"`uncles_hash` and block `uncles` do not match.\n"
" - num_uncles : {0}\n"
" - block uncle_hash : {1}\n"
" - header uncle_hash: {2}".format(
len(block.uncles),
local_uncle_hash,
block.header.uncles_hash,
)
) | Validate the the given block. |
377,542 | def insert(self, context):
status_code, msg = self.__endpoint.post(
"/resources/jdbc-connection-pool",
data={
"id": self.__name,
"resType": self.__res_type,
"datasourceClassname": self.__ds_classname,
"property": props_value(self.__props)
}
)
self.__available = True | Create connection pool.
:param resort.engine.execution.Context context:
Current execution context. |
377,543 | def convert_advanced_relu(builder, layer, input_names, output_names, keras_layer):
input_name, output_name = (input_names[0], output_names[0])
if keras_layer.max_value is None:
builder.add_activation(layer, , input_name, output_name)
return
relu_output_name = output_name +
builder.add_activation(layer, , input_name, relu_output_name)
neg_output_name = relu_output_name +
builder.add_activation(layer+, , relu_output_name,
neg_output_name,[-1.0, 0])
clip_output_name = relu_output_name +
builder.add_unary(layer+, neg_output_name, clip_output_name,
, alpha = -keras_layer.max_value)
builder.add_activation(layer+, , clip_output_name,
output_name,[-1.0, 0]) | Convert an ReLU layer with maximum value from keras to coreml.
Parameters
----------
keras_layer: layer
A keras layer object.
builder: NeuralNetworkBuilder
A neural network builder object. |
377,544 | def set_key(cls, k, v):
k = cls.__name__ + "__" + k
session[k] = v | Allows attaching stateless information to the class using the
flask session dict |
377,545 | def request_ride(
self,
ride_type=None,
start_latitude=None,
start_longitude=None,
start_address=None,
end_latitude=None,
end_longitude=None,
end_address=None,
primetime_confirmation_token=None,
):
args = {
: ride_type,
: {
: start_latitude,
: start_longitude,
: start_address,
},
: {
: end_latitude,
: end_longitude,
: end_address,
},
: primetime_confirmation_token,
}
return self._api_call(, , args=args) | Request a ride on behalf of an Lyft user.
Parameters
ride_type (str)
Name of the type of ride you're requesting.
E.g., lyft, lyft_plus
start_latitude (float)
Latitude component of a start location.
start_longitude (float)
Longitude component of a start location.
start_address (str)
Optional pickup address.
end_latitude (float)
Optional latitude component of a end location.
Destination would be NULL in this case.
end_longitude (float)
Optional longitude component of a end location.
Destination would be NULL in this case.
end_address (str)
Optional destination address.
primetime_confirmation_token (str)
Optional string containing the Prime Time confirmation token
to book rides having Prime Time Pricing.
Returns
(Response)
A Response object containing the ride request ID and other
details about the requested ride.. |
377,546 | def cli(env, identifier):
manager = SoftLayer.SSLManager(env.client)
if not (env.skip_confirmations or formatting.no_going_back()):
raise exceptions.CLIAbort("Aborted.")
manager.remove_certificate(identifier) | Remove SSL certificate. |
377,547 | def delete_instance(self, instance_id, project_id=None):
instance = self.get_instance(instance_id=instance_id, project_id=project_id)
if instance:
instance.delete()
else:
self.log.info("The instance does not exist in project . Exiting", instance_id,
project_id) | Deletes the specified Cloud Bigtable instance.
Raises google.api_core.exceptions.NotFound if the Cloud Bigtable instance does
not exist.
:param project_id: Optional, Google Cloud Platform project ID where the
BigTable exists. If set to None or missing,
the default project_id from the GCP connection is used.
:type project_id: str
:param instance_id: The ID of the Cloud Bigtable instance.
:type instance_id: str |
377,548 | def fix_whitespace(tokens, start, result):
for e in result:
for child in e.iter():
child.text = child.text.replace(, )
for hyphen in HYPHENS:
child.text = child.text.replace( % hyphen, % hyphen)
child.text = re.sub(r, r, child.text)
return result | Fix whitespace around hyphens and commas. Can be used to remove whitespace tokenization artefacts. |
377,549 | def authenticate(self, bound_route, actual_params) -> bool:
if self.__auth_service is not None:
auth_route = "{0}_{1}{2}".format(self.__method, self.__route, bound_route)
auth_data = self.__auth_service.authenticate(self.__request, auth_route, actual_params)
if auth_data is True:
self.app.auth_data = self.__auth_service.auth_data
else:
return False
return True | Runs the pre-defined authenticaton service
:param bound_route str route matched
:param actual_params dict actual url parameters
:rtype: bool |
377,550 | def read_api_service_status(self, name, **kwargs):
kwargs[] = True
if kwargs.get():
return self.read_api_service_status_with_http_info(name, **kwargs)
else:
(data) = self.read_api_service_status_with_http_info(name, **kwargs)
return data | read_api_service_status # noqa: E501
read status of the specified APIService # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_api_service_status(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the APIService (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1beta1APIService
If the method is called asynchronously,
returns the request thread. |
377,551 | def get_states(self, action_name, config_name, instances=None, map_name=None, **kwargs):
policy = self.get_policy()
_set_forced_update_ids(kwargs, policy.container_maps, map_name or self._default_map, instances)
state_generator = self.get_state_generator(action_name, policy, kwargs)
log.debug("Remaining kwargs passed to client actions: %s", kwargs)
config_ids = get_map_config_ids(config_name, policy.container_maps, map_name or self._default_map,
instances)
log.debug("Generating states for configurations: %s", config_ids)
return state_generator.get_states(config_ids) | Returns a generator of states in relation to the indicated action.
:param action_name: Action name.
:type action_name: unicode | str
:param config_name: Name(s) of container configuration(s) or MapConfigId tuple(s).
:type config_name: unicode | str | collections.Iterable[unicode | str] | dockermap.map.input.InputConfigId | collections.Iterable[dockermap.map.input.InputConfigId]
:param instances: Optional instance names, where applicable but not included in ``config_name``.
:type instances: unicode | str | collections.Iterable[unicode | str]
:param map_name: Optional map name, where not inlcuded in ``config_name``.
:param kwargs: Additional kwargs for state generation, action generation, runner, or the client action.
:return: Resulting states of the configurations.
:rtype: collections.Iterable[dockermap.map.state.ConfigState] |
377,552 | def combine_HSPs(a):
m = a[0]
if len(a) == 1:
return m
for b in a[1:]:
assert m.query == b.query
assert m.subject == b.subject
m.hitlen += b.hitlen
m.nmismatch += b.nmismatch
m.ngaps += b.ngaps
m.qstart = min(m.qstart, b.qstart)
m.qstop = max(m.qstop, b.qstop)
m.sstart = min(m.sstart, b.sstart)
m.sstop = max(m.sstop, b.sstop)
if m.has_score:
m.score += b.score
m.pctid = 100 - (m.nmismatch + m.ngaps) * 100. / m.hitlen
return m | Combine HSPs into a single BlastLine. |
377,553 | def set_status(self, value):
if not self._status == value:
old = self._status
self._status = value
logger.info("{} changing status from {} to {}".format(self, old.name, value.name))
self._statusChanged(old, value) | Set the status of the motor to the specified value if not already set. |
377,554 | def full_y(self, Y):
if not self.n:
return
Ysh = matrix(self.g,
(self.n, 1), ) + 1j * matrix(self.b, (self.n, 1), )
uYsh = mul(self.u, Ysh)
Y += spmatrix(uYsh, self.a, self.a, Y.size, ) | Add self(shunt) into full Jacobian Y |
377,555 | def targets(self):
logging.info(.format(self.analysistype))
for sample in self.runmetadata:
sample[self.analysistype].runanalysis = True
sample[self.analysistype].targetpath = (os.path.join(self.targetpath,
sample[self.analysistype].pointfindergenus))
try:
sample[self.analysistype].baitfile = glob(os.path.join(sample[self.analysistype].targetpath,
))[0]
except IndexError:
fsafiles = glob(os.path.join(sample[self.analysistype].targetpath, ))
if fsafiles:
combinetargets(fsafiles, sample[self.analysistype].targetpath)
try:
sample[self.analysistype].baitfile = glob(os.path.join(sample[self.analysistype].targetpath,
))[0]
except IndexError as e:
e.args = [
.format(sample[self.analysistype].targetpath)]
if os.path.isdir(sample[self.analysistype].targetpath):
raise
else:
sample[self.analysistype].runanalysis = False
for sample in self.runmetadata:
sample[self.analysistype].outputdir = os.path.join(sample.run.outputdirectory, self.analysistype)
make_path(sample[self.analysistype].outputdir)
sample[self.analysistype].logout = os.path.join(sample[self.analysistype].outputdir, )
sample[self.analysistype].logerr = os.path.join(sample[self.analysistype].outputdir, )
sample[self.analysistype].baitedfastq = \
os.path.join(sample[self.analysistype].outputdir,
.format(at=self.analysistype)) | Search the targets folder for FASTA files, create the multi-FASTA file of all targets if necessary, and
populate objects |
377,556 | def put(self, resource, obj,
operation_timeout=None, max_envelope_size=None, locale=None):
headers = None
return self.service.invoke(headers, obj) | resource can be a URL or a ResourceLocator |
377,557 | def facets(self, *args, **kwargs):
facets = dict((a, {}) for a in args)
facets.update(kwargs)
if not facets:
raise AttributeError()
for f in facets.keys():
if not isinstance(f, six.string_types):
raise AttributeError()
q = self._clone()
q._limit = 0
q.execute(offset=0, facets=facets)
return q._response.get() | Returns a dictionary with the requested facets.
The facets function supports string args, and keyword
args.
q.facets('field_1', 'field_2') will return facets for
field_1 and field_2.
q.facets(field_1={'limit': 0}, field_2={'limit': 10})
will return all facets for field_1 and 10 facets for field_2. |
377,558 | def stop(self):
self.log.debug( % repr(self))
self.log.debug( % repr(self))
inputs = self.get_inputs()
for piper in inputs:
piper.stop(forced=True)
self.log.debug( % repr(self))
outputs = self.get_outputs()
while outputs:
for piper in outputs:
try:
piper.next()
except StopIteration:
outputs.remove(piper)
self.log.debug("%s stopped output piper: %s" % \
(repr(self), repr(piper)))
continue
except Exception, excp:
self.log.debug("%s %s raised an exception: %s" % \
(repr(self), piper, excp))
self.log.debug("%s stops the remaining pipers" % repr(self))
postorder = self.postorder()
for piper in postorder:
if piper not in inputs:
piper.stop(ends=[0])
self.log.debug("%s finishes stopping of input pipers" % repr(self))
for piper in inputs:
if hasattr(piper.imap, ):
piper.imap.stop(ends=[0])
self.log.debug( % repr(self)) | Stops the ``Pipers`` according to pipeline topology. |
377,559 | def lookup(self, asn=None, inc_raw=False, retry_count=3, response=None,
field_list=None, asn_alts=None, asn_methods=None):
if asn[0:2] != :
asn = .format(asn)
if asn_methods is None:
if asn_alts is None:
lookups = [, ]
else:
from warnings import warn
warn(
)
lookups = [] + asn_alts
else:
if {, }.isdisjoint(asn_methods):
raise ValueError(
)
lookups = asn_methods
results = {
: asn,
: [],
: None
}
is_http = False
return results | The function for retrieving and parsing ASN origin whois information
via port 43/tcp (WHOIS).
Args:
asn (:obj:`str`): The ASN (required).
inc_raw (:obj:`bool`): Whether to include the raw results in the
returned dictionary. Defaults to False.
retry_count (:obj:`int`): The number of times to retry in case
socket errors, timeouts, connection resets, etc. are
encountered. Defaults to 3.
response (:obj:`str`): Optional response object, this bypasses the
Whois lookup. Defaults to None.
field_list (:obj:`list`): If provided, fields to parse:
['description', 'maintainer', 'updated', 'source']
If None, defaults to all.
asn_alts (:obj:`list`): Additional lookup types to attempt if the
ASN whois lookup fails. If None, defaults to all ['http'].
*WARNING* deprecated in favor of new argument asn_methods.
asn_methods (:obj:`list`): ASN lookup types to attempt, in order.
If None, defaults to all ['whois', 'http'].
Returns:
dict: The ASN origin lookup results
::
{
'query' (str) - The Autonomous System Number
'nets' (list) - Dictionaries containing network
information which consists of the fields listed in the
ASN_ORIGIN_WHOIS dictionary.
'raw' (str) - Raw ASN origin whois results if the inc_raw
parameter is True.
}
Raises:
ValueError: methods argument requires one of whois, http.
ASNOriginLookupError: ASN origin lookup failed. |
377,560 | def simulate(self, data, mime=None):
self._client.simulate_feeddata(self.__pointid, data, mime) | Simulate the arrival of feeddata into the feed. Useful if the remote Thing doesn't publish
very often.
`data` (mandatory) (as applicable) The data you want to use to simulate the arrival of remote feed data
`mime` (optional) (string) The mime type of your data. See:
[share()](./Point.m.html#IoticAgent.IOT.Point.Feed.share) |
377,561 | def idxterms(self):
try:
terms = listify(self._json.get("idxterms", {}).get(, []))
except AttributeError:
return None
try:
return [d[] for d in terms]
except AttributeError:
return None | List of index terms. |
377,562 | def readadd(file, system):
dyr = {}
data = []
end = 0
retval = True
sep =
fid = open(file, )
for line in fid.readlines():
if line.find() >= 0:
line = line.split()[0]
end = 1
if line.find() >= 0:
line = [to_number(item.strip()) for item in line.split(sep)]
else:
line = [to_number(item.strip()) for item in line.split()]
if not line:
end = 0
continue
data.extend(line)
if end == 1:
field = data[1]
if field not in dyr.keys():
dyr[field] = []
dyr[field].append(data)
end = 0
data = []
fid.close()
supported = [
,
,
,
,
,
,
,
,
,
]
used = list(supported)
for model in supported:
if model not in dyr.keys():
used.remove(model)
continue
for data in dyr[model]:
add_dyn(system, model, data)
needed = list(dyr.keys())
for i in supported:
if i in needed:
needed.remove(i)
logger.warning(.format(
.join(needed)))
return retval | read DYR file |
377,563 | def _start_console(self):
class InputStream:
def __init__(self):
self._data = b""
def write(self, data):
self._data += data
@asyncio.coroutine
def drain(self):
if not self.ws.closed:
self.ws.send_bytes(self._data)
self._data = b""
output_stream = asyncio.StreamReader()
input_stream = InputStream()
telnet = AsyncioTelnetServer(reader=output_stream, writer=input_stream, echo=True)
self._telnet_servers.append((yield from asyncio.start_server(telnet.run, self._manager.port_manager.console_host, self.console)))
self._console_websocket = yield from self.manager.websocket_query("containers/{}/attach/ws?stream=1&stdin=1&stdout=1&stderr=1".format(self._cid))
input_stream.ws = self._console_websocket
output_stream.feed_data(self.name.encode() + b" console is now available... Press RETURN to get started.\r\n")
asyncio.async(self._read_console_output(self._console_websocket, output_stream)) | Start streaming the console via telnet |
377,564 | def damerau_levenshtein_distance(self, s1, s2):
d = {}
lenstr1 = len(s1)
lenstr2 = len(s2)
for i in xrange(-1,lenstr1+1):
d[(i,-1)] = i+1
for j in xrange(-1,lenstr2+1):
d[(-1,j)] = j+1
for i in xrange(lenstr1):
for j in xrange(lenstr2):
if s1[i] == s2[j]:
cost = 0
else:
cost = 1
d[(i,j)] = min(
d[(i-1,j)] + 1,
d[(i,j-1)] + 1,
d[(i-1,j-1)] + cost,
)
if i and j and s1[i]==s2[j-1] and s1[i-1] == s2[j]:
d[(i,j)] = min (d[(i,j)], d[i-2,j-2] + cost)
return d[lenstr1-1,lenstr2-1] | Dervied algorithm from the following website:
https://www.guyrutenberg.com/2008/12/15/damerau-levenshtein-distance-in-python/
Gives us the distance between two words. |
377,565 | def render_math(self, token):
if token.content.startswith():
return self.render_raw_text(token)
return .format(self.render_raw_text(token)) | Ensure Math tokens are all enclosed in two dollar signs. |
377,566 | def update_firewall_rule(self, server_name, name, start_ip_address,
end_ip_address):
_validate_not_none(, server_name)
_validate_not_none(, name)
_validate_not_none(, start_ip_address)
_validate_not_none(, end_ip_address)
return self._perform_put(
self._get_firewall_rules_path(server_name, name),
_SqlManagementXmlSerializer.update_firewall_rule_to_xml(
name, start_ip_address, end_ip_address
)
) | Update a firewall rule for an Azure SQL Database server.
server_name:
Name of the server to set the firewall rule on.
name:
The name of the firewall rule to update.
start_ip_address:
The lowest IP address in the range of the server-level firewall
setting. IP addresses equal to or greater than this can attempt to
connect to the server. The lowest possible IP address is 0.0.0.0.
end_ip_address:
The highest IP address in the range of the server-level firewall
setting. IP addresses equal to or less than this can attempt to
connect to the server. The highest possible IP address is
255.255.255.255. |
377,567 | def delete_webhook(self, webhook):
return self.manager.delete_webhook(self.scaling_group, self, webhook) | Deletes the specified webhook from this policy. |
377,568 | def _redirect(self, request, response):
if in request.POST:
return HttpResponseRedirect()
elif in request.POST:
return HttpResponseRedirect()
elif in request.POST:
return response
return HttpResponseRedirect() | Generic redirect for item editor. |
377,569 | def visit_Include(self, node, frame):
if node.ignore_missing:
self.writeline()
self.indent()
func_name =
if isinstance(node.template, nodes.Const):
if isinstance(node.template.value, string_types):
func_name =
elif isinstance(node.template.value, (tuple, list)):
func_name =
elif isinstance(node.template, (nodes.Tuple, nodes.List)):
func_name =
self.writeline( % func_name, node)
self.visit(node.template, frame)
self.write( % self.name)
if node.ignore_missing:
self.outdent()
self.writeline()
self.indent()
self.writeline()
self.outdent()
self.writeline()
self.indent()
skip_event_yield = False
if node.with_context:
loop = self.environment.is_async and or
self.writeline(
% (loop, self.dump_local_context(frame)))
elif self.environment.is_async:
self.writeline(
)
else:
if supports_yield_from:
self.writeline(
)
skip_event_yield = True
else:
self.writeline(
)
if not skip_event_yield:
self.indent()
self.simple_write(, frame)
self.outdent()
if node.ignore_missing:
self.outdent() | Handles includes. |
377,570 | def read_file(filepath, **kwargs):
return DataFrameModel(dataFrame=superReadFile(filepath, **kwargs),
filePath=filepath) | Read a data file into a DataFrameModel.
:param filepath: The rows/columns filepath to read.
:param kwargs:
xls/x files - see pandas.read_excel(**kwargs)
.csv/.txt/etc - see pandas.read_csv(**kwargs)
:return: DataFrameModel |
377,571 | def calc_acceleration_bca(jackknife_replicates):
jackknife_mean = jackknife_replicates.mean(axis=0)[None, :]
differences = jackknife_mean - jackknife_replicates
numerator = (differences**3).sum(axis=0)
denominator = 6 * ((differences**2).sum(axis=0))**1.5
zero_denom = np.where(denominator == 0)
denominator[zero_denom] = MIN_COMP_VALUE
acceleration = numerator / denominator
return acceleration | Calculate the acceleration constant for the Bias Corrected and Accelerated
(BCa) bootstrap confidence intervals.
Parameters
----------
jackknife_replicates : 2D ndarray.
Each row should correspond to a different jackknife parameter sample,
formed by deleting a particular observation and then re-estimating the
desired model. Each column should correspond to an element of the
parameter vector being estimated.
Returns
-------
acceleration : 1D ndarray.
There will be one element for each element in `mle_estimate`. Elements
denote the acceleration factors for each component of the parameter
vector.
References
----------
Efron, Bradley, and Robert J. Tibshirani. An Introduction to the Bootstrap.
CRC press, 1994. Section 14.3, Equation 14.15. |
377,572 | def bytes_array(self):
assert len(self.dimensions) == 2, \
.format(self.name)
l, n = self.dimensions
return [self.bytes[i*l:(i+1)*l] for i in range(n)] | Get the param as an array of raw byte strings. |
377,573 | def reload(self, client=None):
if self.notification_id is None:
raise ValueError("Notification not intialized by server")
client = self._require_client(client)
query_params = {}
if self.bucket.user_project is not None:
query_params["userProject"] = self.bucket.user_project
response = client._connection.api_request(
method="GET", path=self.path, query_params=query_params
)
self._set_properties(response) | Update this notification from the server configuration.
See:
https://cloud.google.com/storage/docs/json_api/v1/notifications/get
If :attr:`user_project` is set on the bucket, bills the API request
to that project.
:type client: :class:`~google.cloud.storage.client.Client` or
``NoneType``
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the current bucket.
:rtype: bool
:returns: True, if the notification exists, else False.
:raises ValueError: if the notification has no ID. |
377,574 | def new_parallel(self, function, *params):
if self.ppool is None:
if core_type == :
from multiprocessing.pool import ThreadPool
self.ppool = ThreadPool(500)
else:
from gevent.pool import Pool
self.ppool = Pool(500)
self.ppool.apply_async(function, *params) | Register a new thread executing a parallel method. |
377,575 | def new(params, event_size, num_components,
dtype=None, validate_args=False, name=None):
with tf.compat.v1.name_scope(name, ,
[params, event_size, num_components]):
dist = MixtureSameFamily.new(
params,
num_components,
OneHotCategorical(
event_size,
validate_args=False,
name=name),
validate_args=validate_args,
name=name)
dist._mean = functools.partial(
_eval_all_one_hot, tfd.Distribution.prob, dist)
dist.log_mean = functools.partial(
_eval_all_one_hot, tfd.Distribution.log_prob, dist)
return dist | Create the distribution instance from a `params` vector. |
377,576 | def encode_request(name, items):
client_message = ClientMessage(payload_size=calculate_size(name, items))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.append_int(len(items))
for items_item in items:
client_message.append_data(items_item)
client_message.update_frame_length()
return client_message | Encode request into client_message |
377,577 | def encrypt(self, data, nounce=None):
if nounce is None:
nounce = self._out_counter.to_bytes(length=8, byteorder=)
self._out_counter += 1
return self._enc_out.seal(b + nounce, data, bytes()) | Encrypt data with counter or specified nounce. |
377,578 | def at_line(self, line: FileLine) -> Iterator[InsertionPoint]:
logger.debug("finding insertion points at line: %s", str(line))
filename = line.filename
line_num = line.num
for ins in self.in_file(filename):
if line_num == ins.location.line:
logger.debug("found insertion point at line [%s]: %s",
str(line), ins)
yield ins | Returns an iterator over all of the insertion points located at a
given line. |
377,579 | def cli(obj, roles):
client = obj[]
query = [(, r) for r in roles]
if obj[] == :
r = client.http.get(, query)
click.echo(json.dumps(r[], sort_keys=True, indent=4, ensure_ascii=False))
else:
timezone = obj[]
headers = {: , : , : , : , : , : ,
: , : , : , : }
click.echo(
tabulate([u.tabular(timezone) for u in client.get_users(query)], headers=headers, tablefmt=obj[])
) | List users. |
377,580 | def delete_tag(context, id, tag_id):
result = job.delete_tag(context, id=id, tag_id=tag_id)
if result.status_code == 204:
utils.print_json({: id, : })
else:
utils.format_output(result, context.format) | delete_tag(context, id, tag_id)
Delete a tag from a job.
>>> dcictl job-delete-tag [OPTIONS]
:param string id: ID of the job to attach the meta to [required]
:param string tag_id: ID of the tag to be removed from the job [required] |
377,581 | def split_comma_argument(comma_sep_str):
terms = []
for term in comma_sep_str.split():
if term:
terms.append(term)
return terms | Split a comma separated option into a list. |
377,582 | def _mkdirs_impacket(path, share=, conn=None, host=None, username=None, password=None):
if conn is None:
conn = get_conn(host, username, password)
if conn is False:
return False
comps = path.split()
pos = 1
for comp in comps:
cwd = .join(comps[0:pos])
try:
conn.listPath(share, cwd)
except (smbSessionError, smb3SessionError):
log.exception()
conn.createDirectory(share, cwd)
pos += 1 | Recursively create a directory structure on an SMB share
Paths should be passed in with forward-slash delimiters, and should not
start with a forward-slash. |
377,583 | def concatenate(x, other):
return type(x)(tf.TensorShape(x).concatenate(other)) | Returns the concatenation of the dimension in `x` and `other`.
*Note:* If either `x` or `other` is completely unknown, concatenation will
discard information about the other shape. In future, we might support
concatenation that preserves this information for use with slicing.
For more details, see `help(tf.TensorShape.concatenate)`.
Args:
x: object representing a shape; convertible to `tf.TensorShape`.
other: object representing a shape; convertible to `tf.TensorShape`.
Returns:
new_shape: an object like `x` whose elements are the concatenation of the
dimensions in `x` and `other`. |
377,584 | def transform(testtype):
if testtype.startswith():
return None
testtype = testtype.split()[-1]
testtype = testtype.split()[-1]
testtype = testtype.replace(, )
testtype = testtype.strip()
testtype = testtype.replace(, )
testtype = testtype.replace(, )
testtype = testtype.replace(, )
testtype = testtype.replace(, )
return testtype | A lot of these transformations are from tasks before task labels and some of them are if we
grab data directly from Treeherder jobs endpoint instead of runnable jobs API. |
377,585 | def map_aliases_to_device_objects(self):
all_devices = self.get_all_devices_in_portal()
for dev_o in all_devices:
dev_o[] = self.get_portal_by_name(
self.portal_name()
)[2][1][][][ dev_o[] ]
return all_devices | A device object knows its rid, but not its alias.
A portal object knows its device rids and aliases.
This function adds an 'portals_aliases' key to all of the
device objects so they can be sorted by alias. |
377,586 | def predict_proba(self, X):
check_is_fitted(self, [])
X = check_array(X)
return self.__find_leverages(X, self.inverse_influence_matrix) | Predict the distances for X to center of the training set.
Parameters
----------
X : array-like or sparse matrix, shape (n_samples, n_features)
The input samples. Internally, it will be converted to
``dtype=np.float32`` and if a sparse matrix is provided
to a sparse ``csr_matrix``.
Returns
-------
leverages: array of shape = [n_samples]
The objects distances to center of the training set. |
377,587 | def turn_on(host, did, token=None):
urllib3.disable_warnings()
if token:
scheme = "https"
if not token:
scheme = "http"
token = "1234567890"
url = (
scheme + + host + + token + + did + )
response = requests.get(url, verify=False)
if response.status_code == :
return True
else:
return False | Turn on bulb or fixture |
377,588 | async def async_delete_all_keys(session, host, port, api_key, api_keys=[]):
url = .format(host, str(port), api_key)
response = await async_request(session.get, url)
api_keys.append(api_key)
for key in response[].keys():
if key not in api_keys:
await async_delete_api_key(session, host, port, key) | Delete all API keys except for the ones provided to the method. |
377,589 | def cov_dvrpmllbb_to_vxyz(d,e_d,e_vr,pmll,pmbb,cov_pmllbb,l,b,
plx=False,degree=False):
if plx:
d= 1./d
e_d*= d**2.
if degree:
l*= _DEGTORAD
b*= _DEGTORAD
if sc.array(d).shape == ():
return cov_dvrpmllbb_to_vxyz_single(d,e_d,e_vr,pmll,pmbb,cov_pmllbb,
l,b)
else:
ndata= len(d)
out= sc.zeros((ndata,3,3))
for ii in range(ndata):
out[ii,:,:]= cov_dvrpmllbb_to_vxyz_single(d[ii],e_d[ii],e_vr[ii],
pmll[ii],pmbb[ii],
cov_pmllbb[ii,:,:],
l[ii],b[ii])
return out | NAME:
cov_dvrpmllbb_to_vxyz
PURPOSE:
propagate distance, radial velocity, and proper motion uncertainties to Galactic coordinates
INPUT:
d - distance [kpc, as/mas for plx]
e_d - distance uncertainty [kpc, [as/mas] for plx]
e_vr - low velocity uncertainty [km/s]
pmll - proper motion in l (*cos(b)) [ [as/mas]/yr ]
pmbb - proper motion in b [ [as/mas]/yr ]
cov_pmllbb - uncertainty covariance for proper motion [pmll is pmll x cos(b)]
l - Galactic longitude
b - Galactic lattitude
KEYWORDS:
plx - if True, d is a parallax, and e_d is a parallax uncertainty
degree - if True, l and b are given in degree
OUTPUT:
cov(vx,vy,vz) [3,3] or [:,3,3]
HISTORY:
2010-04-12 - Written - Bovy (NYU) |
377,590 | def mission_count_send(self, target_system, target_component, count, force_mavlink1=False):
return self.send(self.mission_count_encode(target_system, target_component, count), force_mavlink1=force_mavlink1) | This message is emitted as response to MISSION_REQUEST_LIST by the MAV
and to initiate a write transaction. The GCS can then
request the individual mission item based on the
knowledge of the total number of MISSIONs.
target_system : System ID (uint8_t)
target_component : Component ID (uint8_t)
count : Number of mission items in the sequence (uint16_t) |
377,591 | def r(op, rc=None, r=None, iq=None, ico=None, pl=None):
return CONN.References(op,
ResultClass=rc,
Role=r,
IncludeQualifiers=iq,
IncludeClassOrigin=ico,
PropertyList=pl) | This function is a wrapper for
:meth:`~pywbem.WBEMConnection.References`.
Instance-level use: Retrieve the association instances referencing a source
instance.
Class-level use: Retrieve the association classes referencing a source
class.
Parameters:
op (:class:`~pywbem.CIMInstanceName`):
Source instance path; select instance-level use.
op (:class:`~pywbem.CIMClassName`):
Source class path; select class-level use.
rc (:term:`string`):
ResultClass filter: Include only traversals across this association
(result) class.
`None` means this filter is not applied.
r (:term:`string`):
Role filter: Include only traversals from this role (= reference
name) in source object.
`None` means this filter is not applied.
iq (:class:`py:bool`):
IncludeQualifiers flag: Include qualifiers.
`None` will cause the server default of `False` to be used.
Deprecated in :term:`DSP0200`: Clients cannot rely on qualifiers to
be returned in this operation.
ico (:class:`py:bool`):
IncludeClassOrigin flag: Include class origin information for the
properties in the retrieved instances or for the properties and
methods in the retrieved classes.
`None` will cause the server default of `False` to be used.
Deprecated in :term:`DSP0200` for instance-level use: WBEM servers
may either implement this parameter as specified, or may treat any
specified value as `False`.
pl (:term:`string` or :term:`py:iterable` of :term:`string`):
PropertyList: Names of properties to be included (if not otherwise
excluded). An empty iterable indicates to include no properties.
If `None`, all properties will be included.
Returns:
list of result objects:
* For instance-level use, a list of :class:`~pywbem.CIMInstance` objects
representing the retrieved instances, with their `path` attribute
being a :class:`~pywbem.CIMInstanceName` object with its attributes
set as follows:
* `classname`: Name of the creation class of the instance.
* `keybindings`: Keybindings of the instance.
* `namespace`: Name of the CIM namespace containing the instance.
* `host`: Host and optionally port of the WBEM server containing
the CIM namespace, or `None` if the server did not return host
information.
* For class-level use, a list of tuple (classpath, class) representing
the retrieved classes, with the following (unnamed) tuple items:
* classpath (:class:`~pywbem.CIMClassName`): Class path with its
attributes set as follows:
* `classname`: Name of the class.
* `namespace`: Name of the CIM namespace containing the class.
* `host`: Host and optionally port of the WBEM server
containing the CIM namespace, or `None` if the server did not
return host information.
* class (:class:`~pywbem.CIMClass`): The representation of the class. |
377,592 | def get_configs(
config_filepath,
local_filepath_override=,
):
global_config = read_config(config_filepath)
local_filepath = get_local_config_filepath(config_filepath, True)
if local_filepath_override:
local_filepath = local_filepath_override
local_config = read_config(local_filepath)
return global_config, local_config | go and fetch the global/local configs from file and load them with configparser
Args:
config_filepath (str): path to config
local_filepath_override (str): secondary place to locate config file
Returns:
ConfigParser: global_config
ConfigParser: local_config |
377,593 | def setup_logger(log_level, log_file=None):
level = getattr(logging, log_level.upper(), None)
if not level:
color_print("Invalid log level: %s" % log_level, "RED")
sys.exit(1)
if level >= logging.INFO:
sys.tracebacklimit = 0
formatter = ColoredFormatter(
u"%(log_color)s%(bg_white)s%(levelname)-8s%(reset)s %(message)s",
datefmt=None,
reset=True,
log_colors=log_colors_config
)
if log_file:
handler = logging.FileHandler(log_file, encoding="utf-8")
else:
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(level) | setup root logger with ColoredFormatter. |
377,594 | def _set_property(self, name, value):
if name in worker_mapping().keys():
setattr(self, name, value)
return
raise KeyError("Can't set `%s`!" % name) | Set property `name` to `value`, but only if it is part of the mapping
returned from `worker_mapping` (ie - data transported to frontend).
This method is used from the REST API DB, so it knows what to set and
what not, to prevent users from setting internal values.
Args:
name (str): Name of the property to set.
value (obj): Any python value.
Raises:
KeyError: If `name` can't be set. |
377,595 | def convex_conj(self):
conj_exp = conj_exponent(self.pointwise_norm.exponent)
return IndicatorGroupL1UnitBall(self.domain, exponent=conj_exp) | The convex conjugate functional of the group L1-norm. |
377,596 | def xywh_from_points(points):
xys = [[int(p) for p in pair.split()] for pair in points.split()]
minx = sys.maxsize
miny = sys.maxsize
maxx = 0
maxy = 0
for xy in xys:
if xy[0] < minx:
minx = xy[0]
if xy[0] > maxx:
maxx = xy[0]
if xy[1] < miny:
miny = xy[1]
if xy[1] > maxy:
maxy = xy[1]
return {
: minx,
: miny,
: maxx - minx,
: maxy - miny,
} | Constructs an dict representing a rectangle with keys x, y, w, h |
377,597 | def set_password(self,
password,
user=,
note=None):
shutit = self.shutit
shutit.handle_note(note)
if isinstance(password, str):
shutit_global.shutit_global_object.secret_words_set.add(password)
self.install()
if self.current_environment.install_type == :
self.send(ShutItSendSpec(self,
send= + user,
expect=,
check_exit=False,
ignore_background=True))
self.send(ShutItSendSpec(self,
send=password,
expect=,
check_exit=False,
echo=False,
ignore_background=True
))
self.send(ShutItSendSpec(self,
send=password,
expect=self.default_expect,
echo=False,
ignore_background=True,
))
elif self.current_environment.install_type == :
self.send(ShutItSendSpec(self,
send= + user,
expect=,
check_exit=False,
ignore_background=True))
self.send(ShutItSendSpec(self,
send=password,
expect=,
check_exit=False,
echo=False,
ignore_background=True))
self.send(ShutItSendSpec(self,
send=password,
expect=self.default_expect,
echo=False,
ignore_background=True))
else:
self.send(ShutItSendSpec(self,
send= + user,
expect=,
check_exit=False,
ignore_background=True))
self.send(ShutItSendSpec(self,
send=password,
expect=,
check_exit=False,
echo=False,
ignore_background=True))
self.send(ShutItSendSpec(self,
send=password,
expect=self.default_expect,
echo=False,
ignore_background=True))
shutit.handle_note_after(note=note)
return True | Sets the password for the current user or passed-in user.
As a side effect, installs the "password" package.
@param user: username to set the password for. Defaults to '' (i.e. current user)
@param password: password to set for the user
@param note: See send() |
377,598 | def geoframe(self, *args, **kwargs):
from geopandas import GeoDataFrame
import geopandas as gpd
from shapely.geometry.polygon import BaseGeometry
from shapely.wkt import loads
gdf = None
try:
gdf = self.resolved_url.geoframe(*args, **kwargs)
except AttributeError:
pass
if gdf is None:
try:
gdf = self.resolved_url.geo_generator.geoframe(*args, **kwargs)
except AttributeError:
pass
if gdf is None:
try:
gdf = self.row_generator.geoframe(*args, **kwargs)
except AttributeError:
pass
if gdf is None:
try:
gdf = GeoDataFrame(self.dataframe(*args, **kwargs))
first = next(gdf.iterrows())[1][]
if isinstance(first, str):
shapes = [loads(row[]) for i, row in gdf.iterrows()]
elif not isinstance(first, BaseGeometry):
assert gdf.crs is not None
return gdf | Return a Geo dataframe |
377,599 | def ListFileEntries(self, base_path_specs, output_writer):
for base_path_spec in base_path_specs:
file_system = resolver.Resolver.OpenFileSystem(base_path_spec)
file_entry = resolver.Resolver.OpenFileEntry(base_path_spec)
if file_entry is None:
logging.warning(
.format(
base_path_spec.comparable))
return
self._ListFileEntry(file_system, file_entry, , output_writer) | Lists file entries in the base path specification.
Args:
base_path_specs (list[dfvfs.PathSpec]): source path specification.
output_writer (StdoutWriter): output writer. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.