Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
24,200 |
def create(appname, **kwargs):
if appname in LinkFactory._class_dict:
return LinkFactory._class_dict[appname].create(**kwargs)
else:
raise KeyError(
"Could not create object associated to app %s" % appname)
|
Create a `Link` of a particular class, using the kwargs as options
|
24,201 |
def stop(self) -> None:
if self._stopped:
return
self._stopped = True
for fd, sock in self._sockets.items():
assert sock.fileno() == fd
self._handlers.pop(fd)()
sock.close()
|
Stops listening for new connections.
Requests currently in progress may still continue after the
server is stopped.
|
24,202 |
def print_difftext(text, other=None):
if other is not None:
text = util_str.difftext(text, other)
colortext = util_str.color_diff_text(text)
try:
print(colortext)
except UnicodeEncodeError as ex:
import unicodedata
colortext = unicodedata.normalize(, colortext).encode(, )
print(colortext)
|
Args:
text (str):
CommandLine:
#python -m utool.util_print --test-print_difftext
#autopep8 ingest_data.py --diff | python -m utool.util_print --test-print_difftext
|
24,203 |
def class_box(self, cn: ClassDefinitionName) -> str:
slot_defs: List[str] = []
if cn not in self.box_generated and (not self.focus_classes or cn in self.focus_classes):
cls = self.schema.classes[cn]
for slotname in self.filtered_cls_slots(cn, all_slots=True):
slot = self.schema.slots[slotname]
if not slot.range or slot.range in builtin_names or slot.range in self.schema.types:
mod = self.prop_modifier(cls, slot)
slot_defs.append(underscore(self.aliased_slot_name(slot)) +
mod + +
underscore(slot.range) + self.cardinality(slot))
self.box_generated.add(cn)
self.referenced.add(cn)
return + camelcase(cn) + ( + .join(slot_defs) if slot_defs else ) +
|
Generate a box for the class. Populate its interior only if (a) it hasn't previously been generated and
(b) it appears in the gen_classes list
@param cn:
@param inherited:
@return:
|
24,204 |
def make_instance(cls, data):
schema = cls()
if not hasattr(schema.Meta, ):
raise AttributeError("In order to make an instance, a model for "
"the schema must be defined in the Meta "
"class.")
serialized_data = schema.load(data).data
return cls.Meta.model(**serialized_data)
|
Validate the data and create a model instance from the data.
Args:
data (dict): The unserialized data to insert into the new model
instance through it's constructor.
Returns:
peewee.Model|sqlalchemy.Model: The model instance with it's data
inserted into it.
Raises:
AttributeError: This is raised if ``Meta.model`` isn't set on the
schema's definition.
|
24,205 |
def diamond(x, y, radius, filled=False, thickness=1):
if thickness != 1:
raise NotImplementedError()
outsideSpaces = radius
insideSpaces = 1
for row in range(radius * 2 + 1):
yield (outsideSpaces + 1 + x, row + y)
if row != 0 and row != radius * 2:
if filled:
for interiorx in range(outsideSpaces + 2 + x, outsideSpaces + insideSpaces + 2 + x):
yield (interiorx, row + y)
yield (outsideSpaces + insideSpaces + 2 + x, row + y)
if row < radius:
outsideSpaces -= 1
if row != 0:
insideSpaces += 2
else:
outsideSpaces += 1
insideSpaces -= 2
|
Returns a generator that produces (x, y) tuples in a diamond shape.
It is easier to predict the size of the diamond that this function
produces, as opposed to creatinga 4-sided polygon with `polygon()`
and rotating it 45 degrees.
The `left` and `top` arguments are the x and y coordinates for the topleft corner of the square.
The width and height of the diamond will be `2 * radius + 1`.
If `filled` is `True`, the interior points are also returned.
In this example diamond shape, the D characters represent the
drawn diamond, the . characters represent the "outside spaces",
and the ' characters represent the "inside spaces".
(The radius of this example diamond is 3.)
...D
..D'D
.D'''D
D'''''D
.D'''D
..D'D
...D
>>> list(diamond(0, 0, 3))
[(4, 0), (3, 1), (5, 1), (2, 2), (6, 2), (1, 3), (7, 3), (2, 4), (6, 4), (3, 5), (5, 5), (4, 6)]
>>> drawPoints(diamond(0, 0, 3))
,,,O,,,
,,O,O,,
,O,,,O,
O,,,,,O
,O,,,O,
,,O,O,,
,,,O,,,
>>> drawPoints(diamond(0, 0, 3, filled=True))
,,,O,,,
,,OOO,,
,OOOOO,
OOOOOOO
,OOOOO,
,,OOO,,
,,,O,,,
|
24,206 |
def _get_sm_scale_in(self, scale_sm=91.1876):
_smeft = SMEFT()
_smeft.set_initial(self.C_in, self.scale_in, self.scale_high)
_smeft.C_in.update(self._run_sm_scale_in(self.C_in, scale_sm=scale_sm))
C_out = _smeft.rgevolve_leadinglog(scale_sm)
return self._run_sm_scale_in(C_out, scale_sm=scale_sm)
|
Get an estimate of the SM parameters at the input scale by running
them from the EW scale using constant values for the Wilson coefficients
(corresponding to their leading log approximated values at the EW
scale).
Note that this is not guaranteed to work and will fail if some of the
Wilson coefficients (the ones affecting the extraction of SM parameters)
are large.
|
24,207 |
def describe_volumes(self, xml_bytes):
root = XML(xml_bytes)
result = []
for volume_data in root.find("volumeSet"):
volume_id = volume_data.findtext("volumeId")
size = int(volume_data.findtext("size"))
snapshot_id = volume_data.findtext("snapshotId")
availability_zone = volume_data.findtext("availabilityZone")
status = volume_data.findtext("status")
create_time = volume_data.findtext("createTime")
create_time = datetime.strptime(
create_time[:19], "%Y-%m-%dT%H:%M:%S")
volume = model.Volume(
volume_id, size, status, create_time, availability_zone,
snapshot_id)
result.append(volume)
for attachment_data in volume_data.find("attachmentSet"):
instance_id = attachment_data.findtext("instanceId")
device = attachment_data.findtext("device")
status = attachment_data.findtext("status")
attach_time = attachment_data.findtext("attachTime")
attach_time = datetime.strptime(
attach_time[:19], "%Y-%m-%dT%H:%M:%S")
attachment = model.Attachment(
instance_id, device, status, attach_time)
volume.attachments.append(attachment)
return result
|
Parse the XML returned by the C{DescribeVolumes} function.
@param xml_bytes: XML bytes with a C{DescribeVolumesResponse} root
element.
@return: A list of L{Volume} instances.
TODO: attachementSetItemResponseType#deleteOnTermination
|
24,208 |
def set_stencil_mask(self, mask=8, face=):
self.glir.command(, , face, int(mask))
|
Control the front or back writing of individual bits in the stencil
Parameters
----------
mask : int
Mask that is ANDed with ref and stored stencil value.
face : str
Can be 'front', 'back', or 'front_and_back'.
|
24,209 |
def purge_metadata_by_name(self, name):
meta_dir = self._get_metadata_dir_by_name(name, self._metadata_base_dir)
logger.debug(.format(meta_dir))
try:
rm_rf(meta_dir)
except OSError as e:
raise ProcessMetadataManager.MetadataError(.format(meta_dir, e))
|
Purge a processes metadata directory.
:raises: `ProcessManager.MetadataError` when OSError is encountered on metadata dir removal.
|
24,210 |
def html_encode(text):
text = text.replace(, )
text = text.replace(, )
text = text.replace(, )
text = text.replace(, )
return text
|
Encode characters with a special meaning as HTML.
:param text: The plain text (a string).
:returns: The text converted to HTML (a string).
|
24,211 |
def create_process(self, service, agent=None, title=None, mode=None, service_version=None, **kwargs):
simple_shell_commandTest simple shell command serviceopereto_agent
if not agent:
agent = self.input.get()
if not mode:
mode=self.input.get() or
if not service_version:
service_version=self.input.get()
request_data = {: service, : agent, : mode, :service_version}
if title:
request_data[]=title
if self.input.get():
request_data[]=self.input.get()
request_data.update(**kwargs)
ret_data= self._call_rest_api(, , data=request_data, error=)
if not isinstance(ret_data, list):
raise OperetoClientError(str(ret_data))
pid = ret_data[0]
message = %(service, pid)
if agent:
message += %agent
else:
message +=
self.logger.info(message)
return str(pid)
|
create_process(self, service, agent=None, title=None, mode=None, service_version=None, **kwargs)
Registers a new process or processes
:Parameters:
* *service* (`string`) -- Service which process will be started
* *agent* (`string`) -- The service identifier (e.g shell_command)
* *title* (`string`) -- Title for the process
* *mode* (`string`) -- production/development
* *service_version* (`string`) -- Version of the service to execute
:Keywords args:
Json value map containing the process input properties
:return: process id
:Example:
.. code-block:: python
process_properties = {"my_input_param" : "1"}
pid = opereto_client.create_process(service='simple_shell_command', title='Test simple shell command service', agent=opereto_client.input['opereto_agent'], **process_properties)
|
24,212 |
def configure(self, graph, spanning_tree):
self.graph = graph
self.spanning_tree = spanning_tree
|
Configure the filter.
@type graph: graph
@param graph: Graph.
@type spanning_tree: dictionary
@param spanning_tree: Spanning tree.
|
24,213 |
def canonicalize_tautomer(self):
return TautomerCanonicalizer(transforms=self.tautomer_transforms, scores=self.tautomer_scores,
max_tautomers=self.max_tautomers)
|
:returns: A callable :class:`~molvs.tautomer.TautomerCanonicalizer` instance.
|
24,214 |
def removeGuideline(self, guideline):
if isinstance(guideline, int):
index = guideline
else:
index = self._getGuidelineIndex(guideline)
index = normalizers.normalizeIndex(index)
if index >= self._len__guidelines():
raise ValueError("No guideline located at index %d." % index)
self._removeGuideline(index)
|
Remove ``guideline`` from the glyph.
>>> glyph.removeGuideline(guideline)
``guideline`` may be a :ref:`BaseGuideline` or an
:ref:`type-int` representing an guideline index.
|
24,215 |
def stop(self):
self.debug("()")
super(SensorServer, self).stop()
if self._multicast_socket is not None:
self._shutdown_multicast_socket()
self._is_stopped.set()
|
Stop the sensor server (soft stop - signal packet loop to stop)
Warning: Is non blocking (server might still do something after this!)
:rtype: None
|
24,216 |
def normalize(in_file, data, passonly=False, normalize_indels=True, split_biallelic=True,
rerun_effects=True, remove_oldeffects=False, nonrefonly=False, work_dir=None):
if remove_oldeffects:
out_file = "%s-noeff-nomultiallelic%s" % utils.splitext_plus(in_file)
else:
out_file = "%s-nomultiallelic%s" % utils.splitext_plus(in_file)
if work_dir:
out_file = os.path.join(work_dir, os.path.basename(out_file))
if not utils.file_exists(out_file):
if vcfutils.vcf_has_variants(in_file):
ready_ma_file = _normalize(in_file, data, passonly=passonly,
normalize_indels=normalize_indels,
split_biallelic=split_biallelic,
remove_oldeffects=remove_oldeffects,
nonrefonly=nonrefonly,
work_dir=work_dir)
if rerun_effects:
ann_ma_file, _ = effects.add_to_vcf(ready_ma_file, data)
if ann_ma_file:
ready_ma_file = ann_ma_file
utils.symlink_plus(ready_ma_file, out_file)
else:
utils.symlink_plus(in_file, out_file)
return vcfutils.bgzip_and_index(out_file, data["config"])
|
Normalizes variants and reruns SnpEFF for resulting VCF
|
24,217 |
def is_changed(self, start, end):
left, right = self._get_changed(start, end)
if left < right:
return True
return False
|
Tell whether any of start till end lines have changed
The end points are inclusive and indices start from 1.
|
24,218 |
def get_pdffilepath(pdffilename):
return FILEPATHSTR.format(
root_dir=ROOT_DIR, os_sep=os.sep, os_extsep=os.extsep,
name=pdffilename,
folder=PURPOSE.get("plots").get("folder", "plots"),
ext=PURPOSE.get("plots").get("extension", "pdf")
)
|
Returns the path for the pdf file
args:
pdffilename: string
returns path for the plots folder / pdffilename.pdf
|
24,219 |
def connect_delete_namespaced_pod_proxy(self, name, namespace, **kwargs):
kwargs[] = True
if kwargs.get():
return self.connect_delete_namespaced_pod_proxy_with_http_info(name, namespace, **kwargs)
else:
(data) = self.connect_delete_namespaced_pod_proxy_with_http_info(name, namespace, **kwargs)
return data
|
connect_delete_namespaced_pod_proxy # noqa: E501
connect DELETE requests to proxy of Pod # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.connect_delete_namespaced_pod_proxy(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the PodProxyOptions (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str path: Path is the URL path to use for the current proxy request to pod.
:return: str
If the method is called asynchronously,
returns the request thread.
|
24,220 |
def p_formula_atom(self, p):
if p[1]==Symbols.TRUE.value:
p[0] = PLTrue()
elif p[1]==Symbols.FALSE.value:
p[0] = PLFalse()
else:
p[0] = PLAtomic(Symbol(p[1]))
|
formula : ATOM
| TRUE
| FALSE
|
24,221 |
def trigger_script(self):
if self.remote_bridge.status not in (BRIDGE_STATUS.RECEIVED,):
return [1]
try:
self.remote_bridge.parsed_script = UpdateScript.FromBinary(self._device.script)
self.remote_bridge.status = BRIDGE_STATUS.IDLE
except Exception as exc:
self._logger.exception("Error parsing script streamed to device")
self.remote_bridge.script_error = exc
self.remote_bridge.error = 1
return [0]
|
Actually process a script.
|
24,222 |
def set_basic_params(
self, workers=None, zerg_server=None, fallback_node=None, concurrent_events=None,
cheap_mode=None, stats_server=None, quiet=None, buffer_size=None,
keepalive=None, resubscribe_addresses=None):
super(RouterHttp, self).set_basic_params(**filter_locals(locals(), drop=[
,
,
]))
self._set_aliased(, keepalive)
self._set_aliased(, resubscribe_addresses, multi=True)
return self
|
:param int workers: Number of worker processes to spawn.
:param str|unicode zerg_server: Attach the router to a zerg server.
:param str|unicode fallback_node: Fallback to the specified node in case of error.
:param int concurrent_events: Set the maximum number of concurrent events router can manage.
Default: system dependent.
:param bool cheap_mode: Enables cheap mode. When the router is in cheap mode,
it will not respond to requests until a node is available.
This means that when there are no nodes subscribed, only your local app (if any) will respond.
When all of the nodes go down, the router will return in cheap mode.
:param str|unicode stats_server: Router stats server address to run at.
:param bool quiet: Do not report failed connections to instances.
:param int buffer_size: Set internal buffer size in bytes. Default: page size.
:param int keepalive: Allows holding the connection open even if the request has a body.
* http://uwsgi.readthedocs.io/en/latest/HTTP.html#http-keep-alive
.. note:: See http11 socket type for an alternative.
:param str|unicode|list[str|unicode] resubscribe_addresses: Forward subscriptions
to the specified subscription server.
|
24,223 |
def objectcount(data, key):
objkey = key.upper()
return len(data.dt[objkey])
|
return the count of objects of key
|
24,224 |
def add_cable_dist(self, lv_cable_dist):
if lv_cable_dist not in self._cable_distributors and isinstance(lv_cable_dist,
LVCableDistributorDing0):
self._cable_distributors.append(lv_cable_dist)
self.graph_add_node(lv_cable_dist)
|
Adds a LV cable_dist to _cable_dists and grid graph if not already existing
Parameters
----------
lv_cable_dist :
Description #TODO
|
24,225 |
def do_search(self, string):
results = self.n.doSearch(string, full_path = self.current_path)
if results:
for r in results:
self.stdout.write("%s\n" % r[])
|
Search Ndrive for filenames containing the given string.
|
24,226 |
def say(self, message, **options):
if hasattr (self, ):
if (not in options):
options[] = self.voice
self._steps.append(Say(message, **options).obj)
|
When the current session is a voice channel this key will either play a message or an audio file from a URL.
In the case of an text channel it will send the text back to the user via i nstant messaging or SMS.
Argument: message is a string
Argument: **options is a set of optional keyword arguments.
See https://www.tropo.com/docs/webapi/say
|
24,227 |
def get_cell_shift(flow_model):
assert flow_model.lower() in FlowModelConst.d8_deltas
return FlowModelConst.d8_deltas.get(flow_model.lower())
|
Get flow direction induced cell shift dict.
Args:
flow_model: Currently, "TauDEM", "ArcGIS", and "Whitebox" are supported.
|
24,228 |
def special_handling(self, text):
self._attempting(text)
return concatenation([
"?",
self.identifier,
"?",
], ignore_whitespace=True)(text).retyped(TokenType.special_handling)
|
special_handling = "?" , identifier , "?" ;
|
24,229 |
def getWord(self, pattern, returnDiff = 0):
minDist = 10000
closest = None
for w in self.patterns:
if type(self.patterns[w]) in [int, float, int]: continue
if len(self.patterns[w]) == len(pattern):
dist = reduce(operator.add, [(a - b) ** 2 for (a,b) in zip(self.patterns[w], pattern )])
if dist == 0.0:
if returnDiff:
return w, dist
else:
return w
if dist < minDist:
minDist = dist
closest = w
if returnDiff:
return closest, minDist
else:
return closest
|
Returns the word associated with pattern.
Example: net.getWord([0, 0, 0, 1]) => "tom"
This method now returns the closest pattern based on distance.
|
24,230 |
def qwarp_epi(dset,align_subbrick=5,suffix=,prefix=None):
info = nl.dset_info(dset)
if info==None:
nl.notify( % (dset),level=nl.level.error)
return False
if prefix==None:
prefix = nl.suffix(dset,suffix)
dset_sub = lambda x: % (nl.prefix(dset),x)
try:
align_dset = nl.suffix(dset_sub(align_subbrick),)
nl.calc( % (dset,align_subbrick),expr=,prefix=align_dset,datum=)
for i in xrange(info.reps):
if i != align_subbrick:
nl.calc( % (dset,i),expr=,prefix=dset_sub(i),datum=)
nl.run([
, ,
, , , , , ,
, ,
, align_dset,
, dset_sub(i),
, nl.suffix(dset_sub(i),)
],quiet=True)
cmd = [,,prefix]
if info.TR:
cmd += [,info.TR]
if info.slice_timing:
cmd += [,info.slice_timing]
cmd += [nl.suffix(dset_sub(i),) for i in xrange(info.reps)]
nl.run(cmd,quiet=True)
except Exception as e:
raise e
finally:
for i in xrange(info.reps):
for suffix in [,]:
try:
os.remove(nl.suffix(dset_sub(i),suffix))
except:
pass
|
aligns an EPI time-series using 3dQwarp
Very expensive and not efficient at all, but it can produce pretty impressive alignment for EPI time-series with significant
distortions due to motion
|
24,231 |
def neighbor(self, **kwargs):
ip_addr = kwargs.pop()
remote_as = kwargs.pop(, None)
vrf = kwargs.pop(, )
rbridge_id = kwargs.pop(, )
delete = kwargs.pop(, False)
callback = kwargs.pop(, self._callback)
ip_addr = ip_interface(unicode(ip_addr))
if not delete and remote_as is None:
raise ValueError(
)
neighbor_args = dict(router_bgp_neighbor_address=str(ip_addr.ip),
remote_as=remote_as,
vrf_name=vrf,
rbridge_id=rbridge_id)
if ip_addr.version == 4:
neighbor = getattr(self._rbridge,
)
ip_addr_path =
else:
neighbor_args[] = str(ip_addr.ip)
neighbor = getattr(self._rbridge,
)
ip_addr_path =
config = neighbor(**neighbor_args)
if delete:
neighbor = config.find(ip_addr_path)
neighbor.set(, )
neighbor.remove(neighbor.find())
if ip_addr.version == 6:
activate_args = dict(vrf_name=vrf, rbridge_id=rbridge_id,
af_ipv6_neighbor_address=str(ip_addr.ip))
activate_neighbor = getattr(self._rbridge,
)
deactivate = activate_neighbor(**activate_args)
deactivate.find(
).set(, )
callback(deactivate)
else:
if ip_addr.version == 6:
callback(config)
activate_args = dict(vrf_name=vrf, rbridge_id=rbridge_id,
af_ipv6_neighbor_address=str(ip_addr.ip))
activate_neighbor = getattr(self._rbridge,
)
config = activate_neighbor(**activate_args)
if kwargs.pop(, False):
return callback(config, handler=)
return callback(config)
|
Add BGP neighbor.
Args:
ip_addr (str): IP Address of BGP neighbor.
remote_as (str): Remote ASN of BGP neighbor.
vrf (str): The VRF for this BGP process.
rbridge_id (str): The rbridge ID of the device on which BGP will be
configured in a VCS fabric.
delete (bool): Deletes the neighbor if `delete` is ``True``.
get (bool): Get config instead of editing config. (True, False)
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
``ElementTree`` `config`.
Returns:
Return value of `callback`.
Raises:
KeyError: if `remote_as` or `ip_addr` is not specified.
Examples:
>>> import pynos.device
>>> conn = ('10.24.39.211', '22')
>>> auth = ('admin', 'password')
>>> with pynos.device.Device(conn=conn, auth=auth) as dev:
... output = dev.bgp.local_asn(local_as='65535',
... rbridge_id='225')
... output = dev.bgp.neighbor(ip_addr='10.10.10.10',
... remote_as='65535', rbridge_id='225')
... output = dev.bgp.neighbor(ip_addr='10.10.10.10', get=True,
... remote_as='65535', rbridge_id='225')
... output = dev.bgp.neighbor(remote_as='65535',
... rbridge_id='225',
... ip_addr='2001:4818:f000:1ab:cafe:beef:1000:1')
... output = dev.bgp.neighbor(remote_as='65535', get=True,
... rbridge_id='225',
... ip_addr='2001:4818:f000:1ab:cafe:beef:1000:1')
... output = dev.bgp.neighbor(ip_addr='10.10.10.10',
... delete=True, rbridge_id='225')
... output = dev.bgp.neighbor(delete=True, rbridge_id='225',
... ip_addr='2001:4818:f000:1ab:cafe:beef:1000:1')
... dev.bgp.neighbor() # doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
NotImplementedError
KeyError
|
24,232 |
def handler(self, environ, start_response):
logger = logging.getLogger(__name__ + )
url = urlparse(reconstruct_url(environ, self.port))
try:
connection = self.connection_class(url.netloc)
path = url.geturl().replace( % (url.scheme, url.netloc),
)
except Exception:
start_response(, [(, )])
logger.exception()
yield
return
body = length = None
try:
length = int(environ[])
except (KeyError, ValueError):
if environ["REQUEST_METHOD"] == "POST":
if environ.get("CONTENT_TYPE") == :
body = environ[].read()
try:
body = unquote_plus(body.decode("utf-8"))
if body.startswith("{") and body.endswith("}="):
body = body[0:len(body) - 1]
except Exception as e:
logger.exception(e)
logger.error("Could not decode body: %s", body)
length = len(body)
else:
body = environ[].read(length)
logger.debug(, environ)
headers = dict(
(key, value)
for key, value in (
(key[5:].lower().replace(, ), value)
for key, value in environ.items()
if key.startswith()
)
if not is_hop_by_hop(key)
)
if not in headers:
headers[] = environ[]
try:
logger.debug(,
environ[], path, headers)
connection.request(environ[], path,
body=body, headers=headers)
except Exception as e:
if isinstance(e, ConnectionRefusedError):
pass
else:
logger.exception(e)
start_response(, [(, )])
yield .format(self.port).encode("utf-8")
return
try:
response = connection.getresponse()
except ConnectionResetError:
start_response(, [(, )])
yield .format(self.port).encode("utf-8")
return
hopped_headers = response.getheaders()
headers = [(key, value)
for key, value in hopped_headers
if not is_hop_by_hop(key)]
start_response(.format(response), headers)
while True:
chunk = response.read(4096)
if chunk:
yield chunk
else:
break
|
Proxy for requests to the actual http server
|
24,233 |
def getDefaultConfigObj(taskname,configObj,input_dict={},loadOnly=True):
if configObj is None:
configObj = teal.load(taskname)
elif isinstance(configObj,str):
if configObj.lower().strip() == :
configObj = teal.load(taskname,defaults=True)
configObj.filename = taskname.lower()+
else:
configObj = teal.load(fileutil.osfn(configObj))
if input_dict not in [None,{}]:
validateUserPars(configObj,input_dict)
cfgpars.mergeConfigObj(configObj, input_dict)
if not loadOnly:
configObj = teal.teal(configObj,loadOnly=False)
return configObj
|
Return default configObj instance for task updated
with user-specified values from input_dict.
Parameters
----------
taskname : string
Name of task to load into TEAL
configObj : string
The valid values for 'configObj' would be::
None - loads last saved user .cfg file
'defaults' - loads task default .cfg file
name of .cfg file (string)- loads user-specified .cfg file
input_dict : dict
Set of parameters and values specified by user to be different from
what gets loaded in from the .cfg file for the task
loadOnly : bool
Setting 'loadOnly' to False causes the TEAL GUI to start allowing the
user to edit the values further and then run the task if desired.
|
24,234 |
def get_config(self):
return {
: self.hash_name,
: self.dim,
: self.bin_width,
: self.projection_count,
: self.normals
}
|
Returns pickle-serializable configuration struct for storage.
|
24,235 |
def _advapi32_create_handles(cipher, key, iv):
context_handle = None
if cipher == :
algorithm_id = {
16: Advapi32Const.CALG_AES_128,
24: Advapi32Const.CALG_AES_192,
32: Advapi32Const.CALG_AES_256,
}[len(key)]
else:
algorithm_id = {
: Advapi32Const.CALG_DES,
: Advapi32Const.CALG_3DES_112,
: Advapi32Const.CALG_3DES,
: Advapi32Const.CALG_RC2,
: Advapi32Const.CALG_RC4,
}[cipher]
provider = Advapi32Const.MS_ENH_RSA_AES_PROV
context_handle = open_context_handle(provider, verify_only=False)
blob_header_pointer = struct(advapi32, )
blob_header = unwrap(blob_header_pointer)
blob_header.bType = Advapi32Const.PLAINTEXTKEYBLOB
blob_header.bVersion = Advapi32Const.CUR_BLOB_VERSION
blob_header.reserved = 0
blob_header.aiKeyAlg = algorithm_id
blob_struct_pointer = struct(advapi32, )
blob_struct = unwrap(blob_struct_pointer)
blob_struct.hdr = blob_header
blob_struct.dwKeySize = len(key)
blob = struct_bytes(blob_struct_pointer) + key
flags = 0
if cipher in set([, ]) and len(key) == 5:
flags = Advapi32Const.CRYPT_NO_SALT
key_handle_pointer = new(advapi32, )
res = advapi32.CryptImportKey(
context_handle,
blob,
len(blob),
null(),
flags,
key_handle_pointer
)
handle_error(res)
key_handle = unwrap(key_handle_pointer)
if cipher == :
buf = new(advapi32, , len(key) * 8)
res = advapi32.CryptSetKeyParam(
key_handle,
Advapi32Const.KP_EFFECTIVE_KEYLEN,
buf,
0
)
handle_error(res)
if cipher != :
res = advapi32.CryptSetKeyParam(
key_handle,
Advapi32Const.KP_IV,
iv,
0
)
handle_error(res)
buf = new(advapi32, , Advapi32Const.CRYPT_MODE_CBC)
res = advapi32.CryptSetKeyParam(
key_handle,
Advapi32Const.KP_MODE,
buf,
0
)
handle_error(res)
buf = new(advapi32, , Advapi32Const.PKCS5_PADDING)
res = advapi32.CryptSetKeyParam(
key_handle,
Advapi32Const.KP_PADDING,
buf,
0
)
handle_error(res)
return (context_handle, key_handle)
|
Creates an HCRYPTPROV and HCRYPTKEY for symmetric encryption/decryption. The
HCRYPTPROV must be released by close_context_handle() and the
HCRYPTKEY must be released by advapi32.CryptDestroyKey() when done.
:param cipher:
A unicode string of "aes", "des", "tripledes_2key", "tripledes_3key",
"rc2", "rc4"
:param key:
A byte string of the symmetric key
:param iv:
The initialization vector - a byte string - unused for RC4
:return:
A tuple of (HCRYPTPROV, HCRYPTKEY)
|
24,236 |
def set_cookie(response, name, value, expiry_seconds=None, secure=False):
if expiry_seconds is None:
expiry_seconds = 90 * 24 * 60 * 60
expires = datetime.strftime(datetime.utcnow() +
timedelta(seconds=expiry_seconds),
"%a, %d-%b-%Y %H:%M:%S GMT")
secure=secure)
|
Set cookie wrapper that allows number of seconds to be given as the
expiry time, and ensures values are correctly encoded.
|
24,237 |
def _execute_callback(async, callback):
from furious.async import Async
if not callback:
return async.result.payload
if isinstance(callback, Async):
return callback.start()
return callback()
|
Execute the given callback or insert the Async callback, or if no
callback is given return the async.result.
|
24,238 |
def record(self, i=0):
f = self.__getFileObj(self.dbf)
if not self.numRecords:
self.__dbfHeader()
i = self.__restrictIndex(i)
recSize = self.__recordFmt()[1]
f.seek(0)
f.seek(self.__dbfHeaderLength() + (i * recSize))
return self.__record()
|
Returns a specific dbf record based on the supplied index.
|
24,239 |
def _createIndexRti(self, index, nodeName):
return PandasIndexRti(index=index, nodeName=nodeName, fileName=self.fileName,
iconColor=self._iconColor)
|
Auxiliary method that creates a PandasIndexRti.
|
24,240 |
def transform(self, pyobject):
if pyobject is None:
return (,)
object_type = type(pyobject)
try:
method = getattr(self, object_type.__name__ + )
return method(pyobject)
except AttributeError:
return (,)
|
Transform a `PyObject` to textual form
|
24,241 |
def hrd_new(self, input_label="", skip=0):
xl_old=pyl.gca().get_xlim()
if input_label == "":
my_label="M="+str(self.header_attr[])+", Z="+str(self.header_attr[])
else:
my_label="M="+str(self.header_attr[])+", Z="+str(self.header_attr[])+"; "+str(input_label)
pyl.plot(self.data[skip:,self.cols[]-1],self.data[skip:,self.cols[]-1],label = my_label)
pyl.legend(loc=0)
xl_new=pyl.gca().get_xlim()
pyl.xlabel()
pyl.ylabel()
if any(array(xl_old)==0):
pyl.gca().set_xlim(max(xl_new),min(xl_new))
elif any(array(xl_new)==0):
pyl.gca().set_xlim(max(xl_old),min(xl_old))
else:
pyl.gca().set_xlim([max(xl_old+xl_new),min(xl_old+xl_new)])
|
plot an HR diagram with options to skip the first N lines and
add a label string
Parameters
----------
input_label : string, optional
Diagram label. The default is "".
skip : integer, optional
Skip the first n lines. The default is 0.
|
24,242 |
def _has_exclusive_option(cls, options):
return any([getattr(options, opt) is not None for opt in
cls.BASE_ERROR_SELECTION_OPTIONS])
|
Return `True` iff one or more exclusive options were selected.
|
24,243 |
def setBatchSize(self, val):
self._paramMap[self.batchSize] = val
pythonBigDL_method_name = "setBatchSize" + self.__class__.__name__
callBigDlFunc(self.bigdl_type, pythonBigDL_method_name, self.value, val)
return self
|
Sets the value of :py:attr:`batchSize`.
|
24,244 |
def Join(self, Id):
reply = self._Owner._DoCommand( % (self.Id, Id),
% self.Id)
return Conference(self._Owner, reply.split()[-1])
|
Joins with another call to form a conference.
:Parameters:
Id : int
Call Id of the other call to join to the conference.
:return: Conference object.
:rtype: `Conference`
|
24,245 |
def external2internal(xe, bounds):
xi = np.empty_like(xe)
for i, (v, bound) in enumerate(zip(xe, bounds)):
a = bound[0]
b = bound[1]
if a == None and b == None:
xi[i] = v
elif b == None:
xi[i] = np.sqrt((v - a + 1.) ** 2. - 1)
elif a == None:
xi[i] = np.sqrt((b - v + 1.) ** 2. - 1)
else:
xi[i] = np.arcsin((2. * (v - a) / (b - a)) - 1.)
return xi
|
Convert a series of external variables to internal variables
|
24,246 |
def get_year_and_month(self, net, qs, **kwargs):
now = c.get_now()
year = now.year
month = now.month + net
month_orig = None
if not in qs:
if and in self.kwargs:
year, month_orig = map(
int, (self.kwargs[], self.kwargs[])
)
month = month_orig + net
else:
try:
year = int(self.request.GET[])
month_orig = int(self.request.GET[])
month = month_orig + net
except Exception:
pass
return c.clean_year_month(year, month, month_orig)
|
Get the year and month. First tries from kwargs, then from
querystrings. If none, or if cal_ignore qs is specified,
sets year and month to this year and this month.
|
24,247 |
def gmean(data, channels=None):
if channels is None:
data_stats = data
else:
data_stats = data[:, channels]
return scipy.stats.gmean(data_stats, axis=0)
|
Calculate the geometric mean of the events in an FCSData object.
Parameters
----------
data : FCSData or numpy array
NxD flow cytometry data where N is the number of events and D is
the number of parameters (aka channels).
channels : int or str or list of int or list of str, optional
Channels on which to calculate the statistic. If None, use all
channels.
Returns
-------
float or numpy array
The geometric mean of the events in the specified channels of
`data`.
|
24,248 |
def main():
setup_main_logger(console=True, file_logging=False)
params = argparse.ArgumentParser(description=
)
arguments.add_init_embedding_args(params)
args = params.parse_args()
init_embeddings(args)
|
Commandline interface to initialize Sockeye embedding weights with pretrained word representations.
|
24,249 |
def musixmatch(song):
escape = re.sub(" -|", , artist)
title = song.title
title = re.sub(r"( )", , title)
title = re.sub(r"-\-{2,}-\-{2,}-https://www.musixmatch.com/lyrics/{}/{}pmxm-lyrics__content\n\n'
return text.strip()
|
Returns the lyrics found in musixmatch for the specified mp3 file or an
empty string if not found.
|
24,250 |
def _get_bootstrap_url(directory):
v = _get_buildout_ver(directory)
return _URL_VERSIONS.get(v, _URL_VERSIONS[DEFAULT_VER])
|
Get the most appropriate download URL for the bootstrap script.
directory
directory to execute in
|
24,251 |
def linear_interpolation_extrapolation(df, target_height):
r
heights_sorted = df.columns[
sorted(range(len(df.columns)),
key=lambda i: abs(df.columns[i] - target_height))]
return ((df[heights_sorted[1]] - df[heights_sorted[0]]) /
(heights_sorted[1] - heights_sorted[0]) *
(target_height - heights_sorted[0]) + df[heights_sorted[0]])
|
r"""
Linear inter- or extrapolates between the values of a data frame.
This function can be used for the inter-/extrapolation of a parameter
(e.g wind speed) available at two or more different heights, to approximate
the value at hub height. The function is carried out when the parameter
`wind_speed_model`, `density_model` or `temperature_model` of an
instance of the :class:`~.modelchain.ModelChain` class is
'interpolation_extrapolation'.
Parameters
----------
df : pandas.DataFrame
DataFrame with time series for parameter that is to be interpolated or
extrapolated. The columns of the DataFrame are the different heights
for which the parameter is available. If more than two heights are
given, the two closest heights are used. See example below on how the
DataFrame should look like and how the function can be used.
target_height : float
Height for which the parameter is approximated (e.g. hub height).
Returns
-------
pandas.Series
Result of the inter-/extrapolation (e.g. wind speed at hub height).
Notes
-----
For the inter- and extrapolation the following equation is used:
.. math:: f(x) = \frac{(f(x_2) - f(x_1))}{(x_2 - x_1)} \cdot
(x - x_1) + f(x_1)
Examples
---------
>>> import numpy as np
>>> import pandas as pd
>>> wind_speed_10m = np.array([[3], [4]])
>>> wind_speed_80m = np.array([[6], [6]])
>>> weather_df = pd.DataFrame(np.hstack((wind_speed_10m,
... wind_speed_80m)),
... index=pd.date_range('1/1/2012',
... periods=2,
... freq='H'),
... columns=[np.array(['wind_speed',
... 'wind_speed']),
... np.array([10, 80])])
>>> value = linear_interpolation_extrapolation(
... weather_df['wind_speed'], 100)[0]
|
24,252 |
def replace_all_post_order(expression: Expression, rules: Iterable[ReplacementRule]) \
-> Union[Expression, Sequence[Expression]]:
return _replace_all_post_order(expression, rules)[0]
|
Replace all occurrences of the patterns according to the replacement rules.
A replacement rule consists of a *pattern*, that is matched against any subexpression
of the expression. If a match is found, the *replacement* callback of the rule is called with
the variables from the match substitution. Whatever the callback returns is used as a replacement for the
matched subexpression. This can either be a single expression or a sequence of expressions, which is then
integrated into the surrounding operation in place of the subexpression.
Note that the pattern can therefore not be a single sequence variable/wildcard, because only single expressions
will be matched.
Args:
expression:
The expression to which the replacement rules are applied.
rules:
A collection of replacement rules that are applied to the expression.
max_count:
If given, at most *max_count* applications of the rules are performed. Otherwise, the rules
are applied until there is no more match. If the set of replacement rules is not confluent,
the replacement might not terminate without a *max_count* set.
Returns:
The resulting expression after the application of the replacement rules. This can also be a sequence of
expressions, if the root expression is replaced with a sequence of expressions by a rule.
|
24,253 |
def illumination(x, gamma=1., contrast=1., saturation=1., is_random=False):
if is_random:
if not (len(gamma) == len(contrast) == len(saturation) == 2):
raise AssertionError("if is_random = True, the arguments are (min, max)")
illum_settings = np.random.randint(0, 3)
if illum_settings == 0:
gamma = np.random.uniform(gamma[0], 1.0)
elif illum_settings == 1:
gamma = np.random.uniform(1.0, gamma[1])
else:
gamma = 1
im_ = brightness(x, gamma=gamma, gain=1, is_random=False)
image = PIL.Image.fromarray(im_)
contrast_adjust = PIL.ImageEnhance.Contrast(image)
image = contrast_adjust.enhance(np.random.uniform(contrast[0], contrast[1]))
saturation_adjust = PIL.ImageEnhance.Color(image)
image = saturation_adjust.enhance(np.random.uniform(saturation[0], saturation[1]))
im_ = np.array(image)
else:
im_ = brightness(x, gamma=gamma, gain=1, is_random=False)
image = PIL.Image.fromarray(im_)
contrast_adjust = PIL.ImageEnhance.Contrast(image)
image = contrast_adjust.enhance(contrast)
saturation_adjust = PIL.ImageEnhance.Color(image)
image = saturation_adjust.enhance(saturation)
im_ = np.array(image)
return np.asarray(im_)
|
Perform illumination augmentation for a single image, randomly or non-randomly.
Parameters
-----------
x : numpy.array
An image with dimension of [row, col, channel] (default).
gamma : float
Change brightness (the same with ``tl.prepro.brightness``)
- if is_random=False, one float number, small than one means brighter, greater than one means darker.
- if is_random=True, tuple of two float numbers, (min, max).
contrast : float
Change contrast.
- if is_random=False, one float number, small than one means blur.
- if is_random=True, tuple of two float numbers, (min, max).
saturation : float
Change saturation.
- if is_random=False, one float number, small than one means unsaturation.
- if is_random=True, tuple of two float numbers, (min, max).
is_random : boolean
If True, randomly change illumination. Default is False.
Returns
-------
numpy.array
A processed image.
Examples
---------
Random
>>> x = tl.prepro.illumination(x, gamma=(0.5, 5.0), contrast=(0.3, 1.0), saturation=(0.7, 1.0), is_random=True)
Non-random
>>> x = tl.prepro.illumination(x, 0.5, 0.6, 0.8, is_random=False)
|
24,254 |
def adjust_brightness_contrast(image, brightness=0., contrast=0.):
beta = 0
return cv2.addWeighted(image,
1 + float(contrast) / 100.,
image,
beta,
float(brightness))
|
Adjust the brightness and/or contrast of an image
:param image: OpenCV BGR image
:param contrast: Float, contrast adjustment with 0 meaning no change
:param brightness: Float, brightness adjustment with 0 meaning no change
|
24,255 |
def update_series(self, series):
url = make_series_url(series.key)
resp = self.session.put(url, series.to_json())
return resp
|
Update a series with new attributes. This does not change
any of the data written to this series. The recommended workflow for
series updates is to pull a Series object down using the
:meth:`get_series` method, change its attributes, then pass it into
this method.
:param series: the series to update
:type series: `tempodb.protocol.Series` object
:rtype: :class:`tempodb.response.Response` object with the updated
:class:`tempodb.protocol.objects.Series` as the data payload
|
24,256 |
def filter_empty_parameters(func):
@wraps(func)
def func_wrapper(self, *args, **kwargs):
my_kwargs = {key: value for key, value in kwargs.items()
if value not in EMPTIES}
args_is_empty = all(arg in EMPTIES for arg in args)
if (
{, }.issuperset(my_kwargs) or not my_kwargs
) and args_is_empty:
return
return func(self, *args, **my_kwargs)
return func_wrapper
|
Decorator that is filtering empty parameters.
:param func: function that you want wrapping
:type func: function
|
24,257 |
def compress(x, y):
polarity = "02" if y % 2 == 0 else "03"
wrap = lambda x: x
if not is_py2:
wrap = lambda x: bytes(x, )
return unhexlify(wrap("%s%0.64x" % (polarity, x)))
|
Given a x,y coordinate, encode in "compressed format"
Returned is always 33 bytes.
|
24,258 |
def translate_path(self, path):
path = path.split(,1)[0]
path = path.split(,1)[0]
path = posixpath.normpath(urllib_parse.unquote(path))
words = path.split()
words = filter(None, words)
path = os.getcwd()
for word in words:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in (os.curdir, os.pardir): continue
path = os.path.join(path, word)
return path
|
Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)
|
24,259 |
def IterAssociatorInstancePaths(self, InstanceName, AssocClass=None,
ResultClass=None,
Role=None, ResultRole=None,
FilterQueryLanguage=None, FilterQuery=None,
OperationTimeout=None, ContinueOnError=None,
MaxObjectCount=DEFAULT_ITER_MAXOBJECTCOUNT,
**extra):
_validateIterCommonParams(MaxObjectCount, OperationTimeout)
pull_result = None
try:
if (self._use_assoc_path_pull_operations is None or
self._use_assoc_path_pull_operations):
try:
pull_result = self.OpenAssociatorInstancePaths(
InstanceName,
AssocClass=AssocClass,
ResultClass=ResultClass,
Role=Role,
ResultRole=ResultRole,
FilterQueryLanguage=FilterQueryLanguage,
FilterQuery=FilterQuery,
OperationTimeout=OperationTimeout,
ContinueOnError=ContinueOnError,
MaxObjectCount=MaxObjectCount, **extra)
self._use_assoc_path_pull_operations = True
for inst in pull_result.paths:
yield inst
while not pull_result.eos:
pull_result = self.PullInstancePaths(
pull_result.context, MaxObjectCount=MaxObjectCount)
for inst in pull_result.paths:
yield inst
pull_result = None
return
except CIMError as ce:
if (self._use_assoc_path_pull_operations is None and
ce.status_code == CIM_ERR_NOT_SUPPORTED):
self._use_assoc_path_pull_operations = False
else:
raise
assert self._use_assoc_path_pull_operations is False
if FilterQuery is not None or FilterQueryLanguage is not None:
raise ValueError(
)
if ContinueOnError is not None:
raise ValueError(
)
enum_rslt = self.AssociatorNames(
InstanceName,
AssocClass=AssocClass,
ResultClass=ResultClass,
Role=Role,
ResultRole=ResultRole, **extra)
for inst in enum_rslt:
yield inst
finally:
if pull_result is not None and not pull_result.eos:
self.CloseEnumeration(pull_result.context)
pull_result = None
|
Retrieve the instance paths of the instances associated to a source
instance, using the Python :term:`py:generator` idiom to return the
result.
*New in pywbem 0.10 as experimental and finalized in 0.12.*
This method uses the corresponding pull operations if supported by the
WBEM server or otherwise the corresponding traditional operation.
This method is an alternative to using the pull operations directly,
that frees the user of having to know whether the WBEM server supports
pull operations.
This method is a generator function that retrieves instance paths from
the WBEM server and returns them one by one (using :keyword:`yield`)
when the caller iterates through the returned generator object. The
number of instance paths that are retrieved from the WBEM server in one
request (and thus need to be materialized in this method) is up to the
`MaxObjectCount` parameter if the corresponding pull operations are
used, or the complete result set all at once if the corresponding
traditional operation is used.
By default, this method attempts to perform the corresponding pull
operations
(:meth:`~pywbem.WBEMConnection.OpenAssociatorInstancePaths` and
:meth:`~pywbem.WBEMConnection.PullInstancePaths`).
If these pull operations are not supported by the WBEM server, this
method falls back to using the corresponding traditional operation
(:meth:`~pywbem.WBEMConnection.AssociatorNames`).
Whether the WBEM server supports these pull operations is remembered
in the :class:`~pywbem.WBEMConnection` object (by operation type), and
avoids unnecessary attempts to try these pull operations on that
connection in the future.
The `use_pull_operations` init parameter of
:class:`~pywbem.WBEMConnection` can be used to control the preference
for always using pull operations, always using traditional operations,
or using pull operations if supported by the WBEM server (the default).
This method provides all of the controls of the corresponding pull
operations except for the ability to set different response sizes on
each request; the response size (defined by the `MaxObjectCount`
parameter) is the same for all pull operations in the enumeration
session.
In addition, some functionality is only available if the corresponding
pull operations are used by this method:
* Filtering is not supported for the corresponding traditional
operation so that setting the `FilterQuery` or `FilterQueryLanguage`
parameters will be rejected if the corresponding traditional
operation is used by this method.
Note that this limitation is not a disadvantage compared to using the
corresponding pull operations directly, because in both cases, the
WBEM server must support the pull operations and their filtering
capability in order for the filtering to work.
* Setting the `ContinueOnError` parameter to `True` will be rejected if
the corresponding traditional operation is used by this method.
The enumeration session that is opened with the WBEM server when using
pull operations is closed automatically when the returned generator
object is exhausted, or when the generator object is closed using its
:meth:`~py:generator.close` method (which may also be called before the
generator is exhausted).
Parameters:
InstanceName (:class:`~pywbem.CIMInstanceName`):
The instance path of the source instance.
If this object does not specify a namespace, the default namespace
of the connection is used.
Its `host` attribute will be ignored.
AssocClass (:term:`string` or :class:`~pywbem.CIMClassName`):
Class name of an association class (case independent),
to filter the result to include only traversals of that association
class (or subclasses).
`None` means that no such filtering is peformed.
ResultClass (:term:`string` or :class:`~pywbem.CIMClassName`):
Class name of an associated class (case independent),
to filter the result to include only traversals to that associated
class (or subclasses).
`None` means that no such filtering is peformed.
Role (:term:`string`):
Role name (= property name) of the source end (case independent),
to filter the result to include only traversals from that source
role.
`None` means that no such filtering is peformed.
ResultRole (:term:`string`):
Role name (= property name) of the far end (case independent),
to filter the result to include only traversals to that far
role.
`None` means that no such filtering is peformed.
FilterQueryLanguage (:term:`string`):
The name of the filter query language used for the `FilterQuery`
parameter. The DMTF-defined Filter Query Language (see
:term:`DSP0212`) is specified as "DMTF:FQL".
If this parameter is not `None` and the traditional operation is
used by this method, :exc:`~py:exceptions.ValueError` will be
raised.
Not all WBEM servers support filtering for this operation because
it returns instance paths and the act of the server filtering
requires that it generate instances just for that purpose and then
discard them.
FilterQuery (:term:`string`):
The filter query in the query language defined by the
`FilterQueryLanguage` parameter.
If this parameter is not `None` and the traditional operation is
used by this method, :exc:`~py:exceptions.ValueError` will be
raised.
OperationTimeout (:class:`~pywbem.Uint32`):
Minimum time in seconds the WBEM Server shall maintain an open
enumeration session after a previous Open or Pull request is
sent to the client. Once this timeout time has expired, the
WBEM server may close the enumeration session.
* If not `None`, this parameter is sent to the WBEM server as the
proposed timeout for the enumeration session. A value of 0
indicates that the server is expected to never time out. The
server may reject the proposed value, causing a
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_INVALID_OPERATION_TIMEOUT`.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default timeout to be used.
ContinueOnError (:class:`py:bool`):
Indicates to the WBEM server to continue sending responses
after an error response has been sent.
* If `True`, the server is to continue sending responses after
sending an error response. Not all servers support continuation
on error; a server that does not support it must send an error
response if `True` was specified, causing
:class:`~pywbem.CIMError` to be raised with status code
:attr:`~pywbem.CIM_ERR_CONTINUATION_ON_ERROR_NOT_SUPPORTED`.
If the corresponding traditional operation is used by this
method, :exc:`~py:exceptions.ValueError` will be raised.
* If `False`, the server is requested to close the enumeration
after sending an error response.
* If `None`, this parameter is not passed to the WBEM server, and
causes the server-implemented default behaviour to be used.
:term:`DSP0200` defines that the server-implemented default is
`False`.
MaxObjectCount (:class:`~pywbem.Uint32`)
Maximum number of instances the WBEM server may return for each of
the open and pull requests issued during the iterations over the
returned generator object.
* If positive, the WBEM server is to return no more than the
specified number of instances.
* Zero is not allowed; it would mean that zero instances
are to be returned for open and all pull requests issued to the
server.
* The default is defined as a system config variable.
* `None` is not allowed.
**extra :
Additional keyword arguments are passed as additional operation
parameters to the WBEM server.
Note that :term:`DSP0200` does not define any additional parameters
for this operation.
Returns:
:term:`py:generator` iterating :class:`~pywbem.CIMInstanceName`:
A generator object that iterates the resulting CIM instance paths.
These instance paths have their host and namespace components set.
Raises:
Exceptions described in :class:`~pywbem.WBEMConnection`.
Example::
paths_generator = conn.IterAssociatorInstancePaths('CIM_Blah')
for path in paths_generator:
print('path {0}'.format(path))
|
24,260 |
def add_chan(self, chan, color=None, values=None, limits_c=None,
colormap=CHAN_COLORMAP, alpha=None, colorbar=False):
if limits_c is None and self._chan_limits is not None:
limits_c = self._chan_limits
chan_colors, limits = _prepare_colors(color=color, values=values,
limits_c=limits_c,
colormap=colormap, alpha=alpha,
chan=chan)
self._chan_limits = limits
xyz = chan.return_xyz()
marker = Markers()
marker.set_data(pos=xyz, size=CHAN_SIZE, face_color=chan_colors)
self._add_mesh(marker)
if colorbar:
self._view.add(_colorbar_for_surf(colormap, limits))
|
Add channels to visualization
Parameters
----------
chan : instance of Channels
channels to plot
color : tuple
3-, 4-element tuple, representing RGB and alpha, between 0 and 1
values : ndarray
array with values for each channel
limits_c : tuple of 2 floats, optional
min and max values to normalize the color
colormap : str
one of the colormaps in vispy
alpha : float
transparency (0 = transparent, 1 = opaque)
colorbar : bool
add a colorbar at the back of the surface
|
24,261 |
def titlefy(subject):
def clean_word(word):
return _APOS_PATTERN.sub(lambda m: u % (m.group(1), m.group(2) if not m.group(2) == else u":-AandQ '.join(res)
|
\
Titlecases the provided subject but respects common abbreviations.
This function returns ``None`` if the provided `subject` is ``None``. It
returns an empty string if the provided subject is empty.
`subject
A cable's subject.
|
24,262 |
def _process_output_source_directive(schema, current_schema_type, ast,
location, context, local_unique_directives):
output_source_directive = local_unique_directives.get(, None)
if output_source_directive:
if has_encountered_output_source(context):
raise GraphQLCompilationError(u)
if is_in_optional_scope(context):
raise GraphQLCompilationError(u)
set_output_source_data(context, location)
return blocks.OutputSource()
else:
return None
|
Process the output_source directive, modifying the context as appropriate.
Args:
schema: GraphQL schema object, obtained from the graphql library
current_schema_type: GraphQLType, the schema type at the current location
ast: GraphQL AST node, obtained from the graphql library
location: Location object representing the current location in the query
context: dict, various per-compilation data (e.g. declared tags, whether the current block
is optional, etc.). May be mutated in-place in this function!
local_unique_directives: dict, directive name string -> directive object, containing
unique directives present on the current AST node *only*
Returns:
an OutputSource block, if one should be emitted, or None otherwise
|
24,263 |
def integer(description, **kwargs) -> typing.Type:
kwargs[] = description
return type(, (Integer,), kwargs)
|
Create a :class:`~doctor.types.Integer` type.
:param description: A description of the type.
:param kwargs: Can include any attribute defined in
:class:`~doctor.types.Integer`
|
24,264 |
def toFilename(url):
urlp = urlparse(url)
path = urlp.path
if not path:
path = "file_{}".format(int(time.time()))
value = re.sub(r, , path).strip().lower()
return re.sub(r, , value).strip("-")[-200:]
|
gets url and returns filename
|
24,265 |
def map_constructor(self, loader, node, deep=False):
mapping = {}
for key_node, value_node in node.value:
key = loader.construct_object(key_node, deep=deep)
value = loader.construct_object(value_node, deep=deep)
if key in mapping:
raise ValueError(f"Duplicate key: \"{key}\"")
mapping[key] = value
return mapping
|
Walk the mapping, recording any duplicate keys.
|
24,266 |
def do_continue(self, arg):
if self.cmdprefix:
raise CmdError("prefix not allowed")
if arg:
raise CmdError("too many arguments")
if self.debug.get_debugee_count() > 0:
return True
|
continue - continue execution
g - continue execution
go - continue execution
|
24,267 |
def _check_holiday_structure(self, times):
if not isinstance(times, list):
raise TypeError("an list is required")
for time in times:
if not isinstance(time, tuple):
raise TypeError("a tuple is required")
if len(time) > 5:
raise TypeError("Target time takes at most 5 arguments"
" ( given)" % len(time))
if len(time) < 5:
raise TypeError("Required argument (pos )"
" not found" % (TIME_LABEL[len(time)], len(time)))
self._check_time_format(TIME_LABEL, time)
|
To check the structure of the HolidayClass
:param list times: years or months or days or number week
:rtype: None or Exception
:return: in the case of exception returns the exception
|
24,268 |
def _submit_request(self):
try:
self._response = urlopen(self._url)
except URLError as e:
msg = (
.format(str(e), self._msg_div))
LOG.error(msg)
self._failed = True
else:
self._failed = False
|
Submit a request to the ACS Zeropoint Calculator.
If an exception is raised during the request, an error message is
given. Otherwise, the response is saved in the corresponding
attribute.
|
24,269 |
def merge(self, elements):
from collections import Counter
from lltk.utils import list2tuple, tuple2list
merged = tuple2list([value for value, count in Counter(list2tuple(list(elements))).most_common()])
return merged
|
Merges all scraping results to a list sorted by frequency of occurrence.
|
24,270 |
def nice_display(item):
if hasattr(item, ):
return .join(map(text_type, item.all()))
return item
|
Display a comma-separated list of models for M2M fields
|
24,271 |
def set_resolved_url(self, item=None, subtitles=None):
t have a trailing
slash in the URL. Otherwise it wons PlayMedia().
:param subtitles: A URL to a remote subtitles file or a local filename
for a subtitles file to be played along with the
item.
Current XBMC handle has been removed. Either set_resolved_url(), end_of_directory(), or finish() has already been called.path': item}
item = self._listitemify(item)
item.set_played(True)
xbmcplugin.setResolvedUrl(self.handle, succeeded,
item.as_xbmc_listitem())
if subtitles:
self._add_subtitles(subtitles)
return [item]
|
Takes a url or a listitem to be played. Used in conjunction with a
playable list item with a path that calls back into your addon.
:param item: A playable list item or url. Pass None to alert XBMC of a
failure to resolve the item.
.. warning:: When using set_resolved_url you should ensure
the initial playable item (which calls back
into your addon) doesn't have a trailing
slash in the URL. Otherwise it won't work
reliably with XBMC's PlayMedia().
:param subtitles: A URL to a remote subtitles file or a local filename
for a subtitles file to be played along with the
item.
|
24,272 |
def add_row(self, key: str, default: str=None,
unit_label: str=None, enable: bool=None):
self.keys.append(ttk.Label(self, text=key))
self.defaults.append(default)
self.unit_labels.append(
ttk.Label(self, text=unit_label if unit_label else )
)
self.enables.append(enable)
self.values.append(ttk.Entry(self))
row_offset = 1 if self.title is not None else 0
for i in range(len(self.keys)):
self.keys[i].grid_forget()
self.keys[i].grid(row=row_offset, column=0, sticky=)
self.values[i].grid(row=row_offset, column=1)
if self.unit_labels[i]:
self.unit_labels[i].grid(row=row_offset, column=3, sticky=)
if self.defaults[i]:
self.values[i].config(state=tk.NORMAL)
self.values[i].delete(0, tk.END)
self.values[i].insert(0, self.defaults[i])
if self.enables[i] in [True, None]:
self.values[i].config(state=tk.NORMAL)
elif self.enables[i] is False:
self.values[i].config(state=tk.DISABLED)
row_offset += 1
self.values[i].unbind()
self.values[i].unbind()
if self.callback is not None:
def callback(event):
self.callback()
self.values[i].bind(, callback)
self.values[i].bind(, callback)
|
Add a single row and re-draw as necessary
:param key: the name and dict accessor
:param default: the default value
:param unit_label: the label that should be \
applied at the right of the entry
:param enable: the 'enabled' state (defaults to True)
:return:
|
24,273 |
def info(zone, show_all=False):
*
ret = {}
res = __salt__[](.format(
zone=zone,
))
if res[] == 0:
resname = None
resdata = {}
for line in res[].split("\n"):
if not in line:
continue
if line.startswith():
if not show_all:
continue
line = line.rstrip()[1:-1]
key = line.strip().split()[0]
if in key:
key = key[1:]
if key in _zonecfg_info_resources_calculated:
if resname:
ret[resname].append(resdata)
if show_all:
resname = key
resdata = {}
if key not in ret:
ret[key] = []
else:
resname = None
resdata = {}
elif key in _zonecfg_info_resources:
if resname:
ret[resname].append(resdata)
resname = key
resdata = {}
if key not in ret:
ret[key] = []
elif line.startswith("\t"):
if line.strip().startswith():
if not show_all:
continue
line = line.strip()[1:-1]
if key == :
if not in resdata:
resdata[key] = {}
kv = _parse_value(line.strip()[line.strip().index()+1:])
if in kv and in kv:
resdata[key][kv[]] = kv[]
else:
log.warning(, kv)
else:
resdata[key] = _parse_value(line.strip()[line.strip().index()+1:])
else:
if resname:
ret[resname].append(resdata)
resname = None
resdata = {}
if key == :
if not in ret:
ret[key] = {}
kv = _parse_value(line.strip()[line.strip().index()+1:])
if in kv and in kv:
res[key][kv[]] = kv[]
else:
log.warning(, kv)
else:
ret[key] = _parse_value(line.strip()[line.strip().index()+1:])
if resname:
ret[resname].append(resdata)
return ret
|
Display the configuration from memory
zone : string
name of zone
show_all : boolean
also include calculated values like capped-cpu, cpu-shares, ...
CLI Example:
.. code-block:: bash
salt '*' zonecfg.info tallgeese
|
24,274 |
def print(self, indent=0):
text = (
).format(
indent= * indent,
dim=Style.DIM,
magenta=Fore.MAGENTA,
none=Style.RESET_ALL,
name=self.name,
cls=self.cls,
default=self.default
)
if self.description:
text += + pretty_description(self.description,
indent=indent + 2)
print(text)
|
Print self with optional indent.
|
24,275 |
def check_lon(self, dataset):
results = []
lon = util.get_lon_variable(dataset)
if not lon:
return Result(BaseCheck.HIGH, False, , [t existRequired attributes for variable {}standard_namelongitudestandard_name attribute must be longitudeunitsdegrees_eastunits are valid UDUNITS for longitudeaxisX{} axis attribute must be XRecommended attributes for variable {}long_namelong_name attribute should exist and not be emptycommentcommentcomment attribute should not be empty if specifieddegrees_east{} should have units degrees_east'.format(lon))
results.append(test_ctx.to_result())
return results
|
float lon(timeSeries) ; //........................................ Depending on the precision used for the variable, the data type could be int or double instead of float.
lon:long_name = "" ; //...................................... RECOMMENDED
lon:standard_name = "longitude" ; //......................... REQUIRED - This is fixed, do not change.
lon:units = "degrees_east" ; //.............................. REQUIRED - CF recommends degrees_east, but at least use UDUNITS.
lon:axis = "X" ; //.......................................... REQUIRED - Do not change.
lon:valid_min = 0.0f ; //.................................... RECOMMENDED - Replace this with correct value.
lon:valid_max = 0.0f ; //.................................... RECOMMENDED - Replace this with correct value.
lon:_FillValue = 0.0f;//..................................... REQUIRED if there could be missing values in the data.
lon:ancillary_variables = "" ; //............................ RECOMMENDED - List other variables providing information about this variable.
lon:comment = "" ; //........................................ RECOMMENDED - Add useful, additional information here.
|
24,276 |
def _find_only_column_of_type(sframe, target_type, type_name, col_name):
image_column_name = None
if type(target_type) != list:
target_type = [target_type]
for name, ctype in zip(sframe.column_names(), sframe.column_types()):
if ctype in target_type:
if image_column_name is not None:
raise ToolkitError(.format(col_name=col_name, type_name=type_name))
image_column_name = name
if image_column_name is None:
raise ToolkitError( % type_name)
return image_column_name
|
Finds the only column in `SFrame` with a type specified by `target_type`.
If there are zero or more than one such columns, an exception will be
raised. The name and type of the target column should be provided as
strings for the purpose of error feedback.
|
24,277 |
async def do_authentication(sender):
nonce = int.from_bytes(os.urandom(16), , signed=True)
res_pq = await sender.send(ReqPqMultiRequest(nonce))
assert isinstance(res_pq, ResPQ), % res_pq
if res_pq.nonce != nonce:
raise SecurityError()
pq = get_int(res_pq.pq)
p, q = Factorization.factorize(pq)
p, q = rsa.get_byte_array(p), rsa.get_byte_array(q)
new_nonce = int.from_bytes(os.urandom(32), , signed=True)
pq_inner_data = bytes(PQInnerData(
pq=rsa.get_byte_array(pq), p=p, q=q,
nonce=res_pq.nonce,
server_nonce=res_pq.server_nonce,
new_nonce=new_nonce
))
cipher_text, target_fingerprint = None, None
for fingerprint in res_pq.server_public_key_fingerprints:
cipher_text = rsa.encrypt(fingerprint, pq_inner_data)
if cipher_text is not None:
target_fingerprint = fingerprint
break
if cipher_text is None:
raise SecurityError(
.format(.join(
[str(f) for f in res_pq.server_public_key_fingerprints])
)
)
server_dh_params = await sender.send(ReqDHParamsRequest(
nonce=res_pq.nonce,
server_nonce=res_pq.server_nonce,
p=p, q=q,
public_key_fingerprint=target_fingerprint,
encrypted_data=cipher_text
))
assert isinstance(
server_dh_params, (ServerDHParamsOk, ServerDHParamsFail)),\
% server_dh_params
if server_dh_params.nonce != res_pq.nonce:
raise SecurityError()
if server_dh_params.server_nonce != res_pq.server_nonce:
raise SecurityError()
if isinstance(server_dh_params, ServerDHParamsFail):
nnh = int.from_bytes(
sha1(new_nonce.to_bytes(32, , signed=True)).digest()[4:20],
, signed=True
)
if server_dh_params.new_nonce_hash != nnh:
raise SecurityError()
assert isinstance(server_dh_params, ServerDHParamsOk),\
% server_dh_params
key, iv = helpers.generate_key_data_from_nonce(
res_pq.server_nonce, new_nonce
)
if len(server_dh_params.encrypted_answer) % 16 != 0:
raise SecurityError()
plain_text_answer = AES.decrypt_ige(
server_dh_params.encrypted_answer, key, iv
)
with BinaryReader(plain_text_answer) as reader:
reader.read(20)
server_dh_inner = reader.tgread_object()
assert isinstance(server_dh_inner, ServerDHInnerData),\
% server_dh_inner
if server_dh_inner.nonce != res_pq.nonce:
raise SecurityError()
if server_dh_inner.server_nonce != res_pq.server_nonce:
raise SecurityError()
dh_prime = get_int(server_dh_inner.dh_prime, signed=False)
g_a = get_int(server_dh_inner.g_a, signed=False)
time_offset = server_dh_inner.server_time - int(time.time())
b = get_int(os.urandom(256), signed=False)
gb = pow(server_dh_inner.g, b, dh_prime)
gab = pow(g_a, b, dh_prime)
client_dh_inner = bytes(ClientDHInnerData(
nonce=res_pq.nonce,
server_nonce=res_pq.server_nonce,
retry_id=0,
g_b=rsa.get_byte_array(gb)
))
client_dh_inner_hashed = sha1(client_dh_inner).digest() + client_dh_inner
client_dh_encrypted = AES.encrypt_ige(client_dh_inner_hashed, key, iv)
dh_gen = await sender.send(SetClientDHParamsRequest(
nonce=res_pq.nonce,
server_nonce=res_pq.server_nonce,
encrypted_data=client_dh_encrypted,
))
nonce_types = (DhGenOk, DhGenRetry, DhGenFail)
assert isinstance(dh_gen, nonce_types), % dh_gen
name = dh_gen.__class__.__name__
if dh_gen.nonce != res_pq.nonce:
raise SecurityError(.format(name))
if dh_gen.server_nonce != res_pq.server_nonce:
raise SecurityError(
.format(name))
auth_key = AuthKey(rsa.get_byte_array(gab))
nonce_number = 1 + nonce_types.index(type(dh_gen))
new_nonce_hash = auth_key.calc_new_nonce_hash(new_nonce, nonce_number)
dh_hash = getattr(dh_gen, .format(nonce_number))
if dh_hash != new_nonce_hash:
raise SecurityError()
if not isinstance(dh_gen, DhGenOk):
raise AssertionError( % dh_gen)
return auth_key, time_offset
|
Executes the authentication process with the Telegram servers.
:param sender: a connected `MTProtoPlainSender`.
:return: returns a (authorization key, time offset) tuple.
|
24,278 |
def order_target(self) -> Optional[Union[int, Point2]]:
if self.orders:
if isinstance(self.orders[0].target, int):
return self.orders[0].target
else:
return Point2.from_proto(self.orders[0].target)
return None
|
Returns the target tag (if it is a Unit) or Point2 (if it is a Position)
from the first order, returns None if the unit is idle
|
24,279 |
def perform_command(self):
if len(self.actual_arguments) < 2:
return self.print_help()
source_url = self.actual_arguments[0]
output_file_path = self.actual_arguments[1]
download = not self.has_option("--list")
if self.has_option("--largest-audio"):
largest_audio = True
else:
largest_audio = not self.has_option("--smallest-audio")
download_format = self.has_option_with_value("--format")
try:
if download:
self.print_info(u"Downloading audio stream from ..." % source_url)
downloader = Downloader(logger=self.logger)
result = downloader.audio_from_youtube(
source_url,
download=True,
output_file_path=output_file_path,
download_format=download_format,
largest_audio=largest_audio,
)
self.print_info(u"Downloading audio stream from ... done" % source_url)
self.print_success(u"Downloaded file " % result)
else:
self.print_info(u"Downloading stream info from ..." % source_url)
downloader = Downloader(logger=self.logger)
result = downloader.audio_from_youtube(
source_url,
download=False
)
self.print_info(u"Downloading stream info from ... done" % source_url)
msg = []
msg.append(u"%s\t%s\t%s\t%s" % ("Format", "Extension", "Bitrate", "Size"))
for r in result:
filesize = gf.human_readable_number(r["filesize"])
msg.append(u"%s\t%s\t%s\t%s" % (r["format"], r["ext"], r["abr"], filesize))
self.print_generic(u"Available audio streams:")
self.print_generic(u"\n".join(msg))
return self.NO_ERROR_EXIT_CODE
except ImportError:
self.print_no_dependency_error()
except Exception as exc:
self.print_error(u"An unexpected error occurred while downloading audio from YouTube:")
self.print_error(u"%s" % exc)
return self.ERROR_EXIT_CODE
|
Perform command and return the appropriate exit code.
:rtype: int
|
24,280 |
def instance_attr_ancestors(self, name, context=None):
for astroid in self.ancestors(context=context):
if name in astroid.instance_attrs:
yield astroid
|
Iterate over the parents that define the given name as an attribute.
:param name: The name to find definitions for.
:type name: str
:returns: The parents that define the given name as
an instance attribute.
:rtype: iterable(NodeNG)
|
24,281 |
def host_domains(self, ip=None, limit=None, **kwargs):
return self._results(, .format(ip), limit=limit, **kwargs)
|
Pass in an IP address.
|
24,282 |
def merge(objects, compat=, join=):
from .dataarray import DataArray
from .dataset import Dataset
dict_like_objects = [
obj.to_dataset() if isinstance(obj, DataArray) else obj
for obj in objects]
variables, coord_names, dims = merge_core(dict_like_objects, compat, join)
merged = Dataset._construct_direct(
variables, coord_names, dims, indexes=None)
return merged
|
Merge any number of xarray objects into a single Dataset as variables.
Parameters
----------
objects : Iterable[Union[xarray.Dataset, xarray.DataArray, dict]]
Merge together all variables from these objects. If any of them are
DataArray objects, they must have a name.
compat : {'identical', 'equals', 'broadcast_equals', 'no_conflicts'}, optional
String indicating how to compare variables of the same name for
potential conflicts:
- 'broadcast_equals': all values must be equal when variables are
broadcast against each other to ensure common dimensions.
- 'equals': all values and dimensions must be the same.
- 'identical': all values, dimensions and attributes must be the
same.
- 'no_conflicts': only values which are not null in both datasets
must be equal. The returned dataset then contains the combination
of all non-null values.
join : {'outer', 'inner', 'left', 'right', 'exact'}, optional
How to combine objects with different indexes.
Returns
-------
Dataset
Dataset with combined variables from each object.
Examples
--------
>>> arrays = [xr.DataArray(n, name='var%d' % n) for n in range(5)]
>>> xr.merge(arrays)
<xarray.Dataset>
Dimensions: ()
Coordinates:
*empty*
Data variables:
var0 int64 0
var1 int64 1
var2 int64 2
var3 int64 3
var4 int64 4
Raises
------
xarray.MergeError
If any variables with the same name have conflicting values.
See also
--------
concat
|
24,283 |
def _print_message(self, flag_message=None, color=None, padding=None,
reverse=False):
if flag_message:
flag_message = stdout_encode(flag(flag_message,
color=color if self.pretty else None,
show=False))
if not reverse:
print(padd(flag_message, padding),
self.format_messages(self.message))
else:
print(self.format_messages(self.message),
padd(flag_message, padding))
else:
print(self.format_messages(self.message))
self.message = []
|
Outputs the message to the terminal
|
24,284 |
def add(self, pointer, value):
doc = deepcopy(self.document)
parent, obj = None, doc
try:
for token in Pointer(pointer):
parent, obj = obj, token.extract(obj, bypass_ref=True)
else:
if isinstance(parent, MutableSequence):
raise OutOfRange(parent)
if isinstance(parent, Mapping):
raise OutOfBounds(parent)
raise Error()
except (OutOfBounds, OutOfRange, LastElement) as error:
if not token.last:
raise NonexistentTarget(obj)
value = deepcopy(value)
if isinstance(error, OutOfBounds):
error.obj[str(token)] = value
elif isinstance(error, OutOfRange):
error.obj.insert(int(token), value)
elif isinstance(error, LastElement):
error.obj.append(value)
return Target(doc)
|
Add element to sequence, member to mapping.
:param pointer: the path to add in it
:param value: the new value
:return: resolved document
:rtype: Target
The pointer must reference one of:
- The root of the target document - whereupon the specified value
becomes the entire content of the target document.
- A member to add to an existing mapping - whereupon the supplied
value is added to that mapping at the indicated location. If the
member already exists, it is replaced by the specified value.
- An element to add to an existing sequence - whereupon the supplied
value is added to the sequence at the indicated location.
Any elements at or above the specified index are shifted one
position to the right.
The specified index must no be greater than the number of elements
in the sequence.
If the "-" character is used to index the end of the sequence, this
has the effect of appending the value to the sequence.
|
24,285 |
def is_catchup_needed_during_view_change(self) -> bool:
if self.caught_up_for_current_view():
logger.info(.format(self, self.viewNo))
return False
logger.info(.format(self, self.viewNo))
if self.num_txns_caught_up_in_last_catchup() == 0:
if self.has_ordered_till_last_prepared_certificate():
logger.info(.format(self))
return False
if self.is_catch_up_limit(self.config.MIN_TIMEOUT_CATCHUPS_DONE_DURING_VIEW_CHANGE):
self.master_replica.last_prepared_before_view_change = None
return False
return True
|
Check if received a quorum of view change done messages and if yes
check if caught up till the
Check if all requests ordered till last prepared certificate
Check if last catchup resulted in no txns
|
24,286 |
def _setting(self, key, default):
if key not in self._settings:
value = self._settings_in.get(
key, os.environ.get(.format(key).upper(), default))
self._settings[key] = value
return self._settings[key]
|
Return the setting, checking config, then the appropriate
environment variable, falling back to the default, caching the
results.
:param str key: The key to get
:param any default: The default value if not set
:return: str
|
24,287 |
def coroutine(func):
def wrapper(*args, **kwargs):
gen = func(*args, **kwargs)
val = next(gen)
if val != None:
raise TypeError()
return gen
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
|
Wraps a PEP-342 enhanced generator in a way that avoids boilerplate of the "priming" call to ``next``.
Args:
func (Callable): The function constructing a generator to decorate.
Returns:
Callable: The decorated generator.
|
24,288 |
def __openlib(self):
if self.__getattribute__():
return
libpath_list = self.__get_libres()
for p in libpath_list:
try:
libres = resource_filename(self._module_name, p)
self.lib = self.ffi.dlopen(libres)
return
except:
continue
try:
libres = resource_filename(self._module_name, self._libpath)
self.lib = self.ffi.dlopen(libres)
except:
try:
self._libloaded = True
libdir =
if self._module_name is not None:
mod = sys.modules.get(self._module_name, None)
if mod is not None:
libdir = os.path.dirname(mod.__file__) or os.getcwd()
libres = os.path.join(libdir, self._libpath)
self.lib = self.ffi.dlopen(libres)
except:
return None
|
Actual (lazy) dlopen() only when an attribute is accessed
|
24,289 |
def received_message(self, address, data):
self.value_cache.set(address, data)
if self.notify:
self.notify(address, data)
try:
listeners = self.address_listeners[address]
except KeyError:
listeners = []
for listener in listeners:
listener(address, data)
|
Process a message received from the KNX bus.
|
24,290 |
def get_event_loop():
ev = _state.event_loop
if not os.getenv(_EVENT_LOOP_KEY) and ev is not None:
ev.clear()
_state.event_loop = None
ev = None
if ev is None:
ev = EventLoop()
_state.event_loop = ev
os.environ[_EVENT_LOOP_KEY] =
return ev
|
Return a EventLoop instance.
A new instance is created for each new HTTP request. We determine
that we're in a new request by inspecting os.environ, which is reset
at the start of each request. Also, each thread gets its own loop.
|
24,291 |
def noEmptyNests(node):
if type(node)==list:
for i in node:
noEmptyNests(i)
if type(node)==dict:
for i in node.values():
noEmptyNests(i)
if node["children"] == []:
node.pop("children")
return node
|
recursively make sure that no dictionaries inside node contain empty children lists
|
24,292 |
def _remove(self, xer, primary):
if xer in primary:
notifier = primary.pop(xer)
notifier.shutdown()
|
Private method for removing a descriptor from the event loop.
It does the inverse job of _add, and also add a check in case of the fd
has gone away.
|
24,293 |
def _create_user_posts_table(self):
with self._engine.begin() as conn:
user_posts_table_name = self._table_name("user_posts")
if not conn.dialect.has_table(conn, user_posts_table_name):
post_id_key = self._table_name("post") + ".id"
self._user_posts_table = sqla.Table(
user_posts_table_name, self._metadata,
sqla.Column("user_id", sqla.String(128), index=True),
sqla.Column("post_id", sqla.Integer,
sqla.ForeignKey(post_id_key,
onupdate="CASCADE",
ondelete="CASCADE"),
index=True),
sqla.UniqueConstraint(, , name=),
info=self._info
)
self._logger.debug("Created table with table name %s" %
user_posts_table_name)
else:
self._user_posts_table = \
self._metadata.tables[user_posts_table_name]
self._logger.debug("Reflecting to table with table name %s" %
user_posts_table_name)
|
Creates the table to store association info between user and blog
posts.
:return:
|
24,294 |
def clearRedisPools():
t have to call it.. Maybe as some sort of disaster-recovery call..
connection_poolconnection_pool']
RedisPools.clear()
_redisManagedConnectionParams.clear()
|
clearRedisPools - Disconnect all managed connection pools,
and clear the connectiobn_pool attribute on all stored managed connection pools.
A "managed" connection pool is one where REDIS_CONNECTION_PARAMS does not define the "connection_pool" attribute.
If you define your own pools, IndexedRedis will use them and leave them alone.
This method will be called automatically after calling setDefaultRedisConnectionParams.
Otherwise, you shouldn't have to call it.. Maybe as some sort of disaster-recovery call..
|
24,295 |
def write(self, fname, append=True):
mode =
if append and os.path.isfile(fname):
mode =
header = .join([, ,
, ,
, ,
, ,
])
print_str = "{0}; {1}; {2}; {3}; {4}; {5}; {6}; {7}; {8}\n".format(
self.template_name, self.detect_time, self.no_chans,
self.chans, self.detect_val, self.threshold,
self.threshold_type, self.threshold_input, self.typeofdet)
with open(fname, mode) as _f:
_f.write(header + )
_f.write(print_str)
|
Write detection to csv formatted file.
Will append if append==True and file exists
:type fname: str
:param fname: Full path to file to open and write to.
:type append: bool
:param append: Set to true to append to an existing file, if True \
and file doesn't exist, will create new file and warn. If False
will overwrite old files.
|
24,296 |
def train(cls, data, isotonic=True):
boundaries, predictions = callMLlibFunc("trainIsotonicRegressionModel",
data.map(_convert_to_vector), bool(isotonic))
return IsotonicRegressionModel(boundaries.toArray(), predictions.toArray(), isotonic)
|
Train an isotonic regression model on the given data.
:param data:
RDD of (label, feature, weight) tuples.
:param isotonic:
Whether this is isotonic (which is default) or antitonic.
(default: True)
|
24,297 |
def current_frame(self, n):
self.sound.seek(n)
self._current_frame = n
|
Sets current frame to ``n``
:param integer n: Frame to set to ``current_frame``
|
24,298 |
def pprint(object, stream=None, indent=1, width=80, depth=None):
printer = PrettyPrinter(
stream=stream, indent=indent, width=width, depth=depth)
printer.pprint(object)
|
Pretty-print a Python object to a stream [default is sys.stdout].
|
24,299 |
def network_create(auth=None, **kwargs):
**{"network_type": "vlan",\
"segmentation_id": "4010",\
"physical_network": "provider"}
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(keep_name=True, **kwargs)
return cloud.create_network(**kwargs)
|
Create a network
name
Name of the network being created
shared : False
If ``True``, set the network as shared
admin_state_up : True
If ``True``, Set the network administrative state to "up"
external : False
Control whether or not this network is externally accessible
provider
An optional Python dictionary of network provider options
project_id
The project ID on which this network will be created
CLI Example:
.. code-block:: bash
salt '*' neutronng.network_create name=network2 \
shared=True admin_state_up=True external=True
salt '*' neutronng.network_create name=network3 \
provider='{"network_type": "vlan",\
"segmentation_id": "4010",\
"physical_network": "provider"}' \
project_id=1dcac318a83b4610b7a7f7ba01465548
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.