Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
14,400 | def get_worksheet(self, id_or_name):
url = self.build_url(self._endpoints.get().format(id=quote(id_or_name)))
response = self.session.get(url)
if not response:
return None
return self.worksheet_constructor(parent=self, **{self._cloud_data_key: response.json()}) | Gets a specific worksheet by id or name |
14,401 | def remove_record(self, record):
if not self.has_record(record):
current_app.logger.warning(
.format(uuid=record.id, comm=self.id))
else:
key = current_app.config[]
record[key] = [c for c in record[key] if c != self.id]
if current_app.config[]:
if self.oaiset.has_record(record):
self.oaiset.remove_record(record) | Remove an already accepted record from the community.
:param record: Record object.
:type record: `invenio_records.api.Record` |
14,402 | def fetch_command(self, global_options, subcommand):
commands = self.get_commands(global_options)
try:
klass = commands[subcommand]
except KeyError:
sys.stderr.write("Unknown command: %r\nType for usage.\nMany commands will only run at project directory, maybe the directory is not right.\n" % \
(subcommand, self.prog_name))
sys.exit(1)
return klass | Tries to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"uliweb") if it can't be found. |
14,403 | def _sanity_check_coerce_type_outside_of_fold(ir_blocks):
is_in_fold = False
for first_block, second_block in pairwise(ir_blocks):
if isinstance(first_block, Fold):
is_in_fold = True
if not is_in_fold and isinstance(first_block, CoerceType):
if not isinstance(second_block, (MarkLocation, Filter)):
raise AssertionError(u
u.format(ir_blocks))
if isinstance(second_block, Unfold):
is_in_fold = False | Ensure that CoerceType not in a @fold are followed by a MarkLocation or Filter block. |
14,404 | def main(argv):
input_file = ""
output_file = ""
monitor = None
formula = None
trace = None
iformula = None
itrace = None
isys = None
online = False
fuzzer = False
l2m = False
debug = False
rounds = 1
server_port = 8080
webservice = False
help_str_extended = "fodtlmon V 0.1 .\n" + \
"For more information see fodtlmon home page\n Usage : mon.py [OPTIONS] formula trace" + \
"\n -h \t--help " + "\t display this help and exit" + \
"\n -i \t--input= [file] " + "\t the input file" + \
"\n -o \t--output= [path]" + "\t the output file" + \
"\n -f \t--formula " + "\t the formula" + \
"\n \t--iformula " + "\t path to file that contains the formula" + \
"\n -t \t--trace " + "\t the trace" + \
"\n \t--itrace " + "\t path to file that contains the trace" + \
"\n -1 \t--ltl " + "\t use LTL monitor" + \
"\n \t--l2m " + "\t call ltl2mon also" + \
"\n -2 \t--fotl " + "\t use FOTL monitor" + \
"\n -3 \t--dtl " + "\t use DTL monitor" + \
"\n -4 \t--fodtl " + "\t use FODTL monitor" + \
"\n \t--sys= [file] " + "\t Run a system from json file" + \
"\n \t--rounds= int " + "\t Number of rounds to run in the system" + \
"\n -z \t--fuzzer " + "\t run fuzzing tester" + \
"\n -d \t--debug " + "\t enable debug mode" + \
"\n \t--server " + "\t start web service" + \
"\n \t--port= int " + "\t server port number" + \
"\n\nReport fodtlmon bugs to [email protected]" + \
"\nfodtlmon home page: <https://github.com/hkff/fodtlmon>" + \
"\nfodtlmon is a free software released under GPL 3"
try:
opts, args = getopt.getopt(argv[1:], "hi:o:f:t:1234zd",
["help", "input=", "output=", "trace=", "formula=" "ltl", "fotl", "dtl",
"fodtl", "sys=", "fuzzer", "itrace=", "iformula=", "rounds=", "l2m", "debug",
"server", "port="])
except getopt.GetoptError:
print(help_str_extended)
sys.exit(2)
if len(opts) == 0:
print(help_str_extended)
for opt, arg in opts:
if opt in ("-h", "--help"):
print(help_str_extended)
sys.exit()
elif opt in ("-i", "--input"):
input_file = arg
elif opt in ("-o", "--output"):
output_file = arg
elif opt in ("-1", "--ltl"):
monitor = Ltlmon
elif opt in ("-2", "--fotl"):
monitor = Fotlmon
elif opt in ("-3", "--dtl"):
monitor = Dtlmon
elif opt in ("-4", "--fodtl"):
monitor = Fodtlmon
elif opt in ("-f", "--formula"):
formula = arg
elif opt in ("-t", "--trace"):
trace = arg
elif opt in "--sys":
isys = arg
elif opt in "--rounds":
rounds = int(arg)
elif opt in ("-z", "--fuzzer"):
fuzzer = True
elif opt in "--iformula":
iformula = arg
elif opt in "--itrace":
itrace = arg
elif opt in "--l2m":
l2m = True
elif opt in ("-d", "--debug"):
debug = True
elif opt in "--server":
webservice = True
elif opt in "--port":
server_port = int(arg)
if webservice:
Webservice.start(server_port)
return
if fuzzer:
if monitor is Ltlmon:
run_ltl_tests(monitor="ltl", alphabet=["P"], constants=["a", "b", "c"], trace_lenght=10000, formula_depth=5,
formula_nbr=10000, debug=debug)
elif monitor is Dtlmon:
run_dtl_tests()
return
if itrace is not None:
with open(itrace, "r") as f:
trace = f.read()
if iformula is not None:
with open(iformula, "r") as f:
formula = f.read()
if isys is not None:
with open(isys, "r") as f:
js = f.read()
s = System.parseJSON(js)
for x in range(rounds):
s.run()
return
if None not in (monitor, trace, formula):
tr = Trace().parse(trace)
fl = eval(formula[1:]) if formula.startswith(":") else FodtlParser.parse(formula)
mon = monitor(fl, tr)
res = mon.monitor(debug=debug)
print("")
print("Trace : %s" % tr)
print("Formula : %s" % fl)
print("Code : %s" % fl.toCODE())
print("PPrint : %s" % fl.prefix_print())
print("TSPASS : %s" % fl.toTSPASS())
print("LTLFO : %s" % fl.toLTLFO())
print("Result : %s" % res)
if l2m:
print(fl.toLTLFO())
res = ltlfo2mon(fl.toLTLFO(), tr.toLTLFO())
print("ltl2mon : %s" % res) | Main mon
:param argv: console arguments
:return: |
14,405 | def setData(self, data, setName=None):
if not isinstance(data, DataFrame):
if pd is not None and isinstance(data, pd.DataFrame):
data = DataFrame.fromPandas(data)
if setName is None:
lock_and_call(
lambda: self._impl.setData(data._impl),
self._lock
)
else:
lock_and_call(
lambda: self._impl.setData(data._impl, setName),
self._lock
) | Assign the data in the dataframe to the AMPL entities with the names
corresponding to the column names.
Args:
data: The dataframe containing the data to be assigned.
setName: The name of the set to which the indices values of the
DataFrame are to be assigned.
Raises:
AMPLException: if the data assignment procedure was not successful. |
14,406 | def getConfig(self, key):
if hasattr(self, key):
return getattr(self, key)
else:
return False | Get a Config Value |
14,407 | def get_instance(self, payload):
return RoleInstance(self._version, payload, service_sid=self._solution[], ) | Build an instance of RoleInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.chat.v2.service.role.RoleInstance
:rtype: twilio.rest.chat.v2.service.role.RoleInstance |
14,408 | def all_sharded_cluster_links(cluster_id, shard_id=None,
router_id=None, rel_to=None):
return [
sharded_cluster_link(rel, cluster_id, shard_id, router_id,
self_rel=(rel == rel_to))
for rel in (
, ,
, ,
, , ,
,
)
] | Get a list of all links to be included with ShardedClusters. |
14,409 | def parts(self, *args, **kwargs):
return self._client.parts(*args, activity=self.id, **kwargs) | Retrieve parts belonging to this activity.
Without any arguments it retrieves the Instances related to this task only.
This call only returns the configured properties in an activity. So properties that are not configured
are not in the returned parts.
See :class:`pykechain.Client.parts` for additional available parameters.
Example
-------
>>> task = project.activity('Specify Wheel Diameter')
>>> parts = task.parts()
To retrieve the models only.
>>> parts = task.parts(category=Category.MODEL) |
14,410 | def _filter_desc(self, indexing):
if len(indexing) > 0:
desc_tmp = np.zeros((len(indexing),len(self.header_desc)),dtype=)
data_tmp = np.zeros((len(indexing),len(self.header_data)))
style_tmp= np.zeros((len(indexing),len(self.header_style)),dtype=)
for i in range(len(indexing)):
for j in range(len(self.header_desc)):
desc_tmp[i][j] = self.desc[indexing[i]][j]
for k in range(len(self.header_data)):
data_tmp[i][k] = self.data[indexing[i]][k]
for l in range(len(self.header_style)):
style_tmp[i][l]= self.style[indexing[i]][l]
self.desc = desc_tmp
self.data = data_tmp
self.style= style_tmp
else:
print() | Private function to filter data, goes with filter_desc |
14,411 | def update_group_states_for_vifs(self, vifs, ack):
vif_keys = [self.vif_key(vif.device_id, vif.mac_address)
for vif in vifs]
self.set_fields(vif_keys, SECURITY_GROUP_ACK, ack) | Updates security groups by setting the ack field |
14,412 | def customchain(**kwargsChain):
def wrap(f):
@click.pass_context
@verbose
def new_func(ctx, *args, **kwargs):
newoptions = ctx.obj
newoptions.update(kwargsChain)
ctx.bitshares = BitShares(**newoptions)
ctx.blockchain = ctx.bitshares
set_shared_bitshares_instance(ctx.bitshares)
return ctx.invoke(f, *args, **kwargs)
return update_wrapper(new_func, f)
return wrap | This decorator allows you to access ``ctx.bitshares`` which is
an instance of BitShares. But in contrast to @chain, this is a
decorator that expects parameters that are directed right to
``BitShares()``.
... code-block::python
@main.command()
@click.option("--worker", default=None)
@click.pass_context
@customchain(foo="bar")
@unlock
def list(ctx, worker):
print(ctx.obj) |
14,413 | def stop(self):
if not self._running:
logging.warning()
return False
if self._cap:
self._cap.release()
self._cap = None
self._running = False
return True | Stop the sensor. |
14,414 | def release(input_dict, environment_dict):
allow_ssl_insecure = _get_user_argument(input_dict, ) is not None
groupname = environment_dict[]
nodelist = seash_global_variables.targets[groupname]
retdict = seash_helper.contact_targets(nodelist, _get_clearinghouse_vessel_handle)
clearinghouse_vesselhandles = []
faillist = []
for nodename in retdict:
if retdict[nodename][0]:
clearinghouse_vesselhandles.append(retdict[nodename][1])
else:
faillist.append(nodename)
client = _connect_to_clearinghouse(environment_dict[],
allow_ssl_insecure)
client.release_resources(clearinghouse_vesselhandles)
removed_nodehandles = seash_global_variables.targets[groupname][:]
for handle in removed_nodehandles:
for target in seash_global_variables.targets:
if handle in seash_global_variables.targets[target]:
seash_global_variables.targets[target].remove(handle) | <Purpose>
Releases the specified vessels.
<Arguments>
input_dict: The commanddict representing the user's input.
environment_dict: The dictionary representing the current seash
environment.
<Side Effects>
Connects to the Clearinghouse and releases vessels.
Removes the released vessels from the list of valid targets.
Does not guarantee that all vessels specified are released!
<Exceptions>
None
<Returns>
None |
14,415 | def _onSize(self, evt):
DEBUG_MSG("_onSize()", 2, self)
self._width, self._height = self.GetClientSize()
self.bitmap =wx.EmptyBitmap(self._width, self._height)
self._isDrawn = False
if self._width <= 1 or self._height <= 1: return
dpival = self.figure.dpi
winch = self._width/dpival
hinch = self._height/dpival
self.figure.set_size_inches(winch, hinch)
self.Refresh(eraseBackground=False)
FigureCanvasBase.resize_event(self) | Called when wxEventSize is generated.
In this application we attempt to resize to fit the window, so it
is better to take the performance hit and redraw the whole window. |
14,416 | def new(partname, content_type):
xml = % nsmap[]
override = parse_xml(xml)
override.set(, partname)
override.set(, content_type)
return override | Return a new ``<Override>`` element with attributes set to parameter
values. |
14,417 | def add_multiple_to_queue(self, items, container=None):
if container is not None:
container_uri = container.resources[0].uri
container_metadata = to_didl_string(container)
else:
container_uri =
container_metadata =
chunk_size = 16
item_list = list(items)
for index in range(0, len(item_list), chunk_size):
chunk = item_list[index:index + chunk_size]
uris = .join([item.resources[0].uri for item in chunk])
uri_metadata = .join([to_didl_string(item) for item in chunk])
self.avTransport.AddMultipleURIsToQueue([
(, 0),
(, 0),
(, len(chunk)),
(, uris),
(, uri_metadata),
(, container_uri),
(, container_metadata),
(, 0),
(, 0)
]) | Add a sequence of items to the queue.
Args:
items (list): A sequence of items to the be added to the queue
container (DidlObject, optional): A container object which
includes the items. |
14,418 | def _get_sd(file_descr):
for stream_descr in NonBlockingStreamReader._streams:
if file_descr == stream_descr.stream.fileno():
return stream_descr
return None | Get streamdescriptor matching file_descr fileno.
:param file_descr: file object
:return: StreamDescriptor or None |
14,419 | def init_cas_a (year):
year = float (year)
models[] = lambda f: cas_a (f, year) | Insert an entry for Cas A into the table of models. Need to specify the
year of the observations to account for the time variation of Cas A's
emission. |
14,420 | def consume(iterator, n):
"Advance the iterator n-steps ahead. If n is none, consume entirely."
if n is None:
collections.deque(iterator, maxlen=0)
else:
next(islice(iterator, n, n), None) | Advance the iterator n-steps ahead. If n is none, consume entirely. |
14,421 | def _legion_state(self, inputs, t, argv):
index = argv;
x = inputs[0];
y = inputs[1];
p = inputs[2];
potential_influence = heaviside(p + math.exp(-self._params.alpha * t) - self._params.teta);
dx = 3.0 * x - x ** 3.0 + 2.0 - y + self._stimulus[index] * potential_influence + self._coupling_term[index] + self._noise[index];
dy = self._params.eps * (self._params.gamma * (1.0 + math.tanh(x / self._params.betta)) - y);
neighbors = self.get_neighbors(index);
potential = 0.0;
for index_neighbor in neighbors:
potential += self._params.T * heaviside(self._excitatory[index_neighbor] - self._params.teta_x);
dp = self._params.lamda * (1.0 - p) * heaviside(potential - self._params.teta_p) - self._params.mu * p;
return [dx, dy, dp]; | !
@brief Returns new values of excitatory and inhibitory parts of oscillator and potential of oscillator.
@param[in] inputs (list): Initial values (current) of oscillator [excitatory, inhibitory, potential].
@param[in] t (double): Current time of simulation.
@param[in] argv (uint): Extra arguments that are not used for integration - index of oscillator.
@return (list) New values of excitatoty and inhibitory part of oscillator and new value of potential (not assign). |
14,422 | def fix(self):
fill_layout = None
fill_height = y = 0
for _ in range(2):
if self._has_border:
x = y = start_y = 1
height = self._canvas.height - 2
width = self._canvas.width - 2
else:
x = y = start_y = 0
height = self._canvas.height
width = self._canvas.width
for layout in self._layouts:
if layout.fill_frame:
if fill_layout is None:
fill_layout = layout
elif fill_layout == layout:
y = layout.fix(x, y, width, fill_height)
else:
raise Highlander("Too many Layouts filling Frame")
else:
y = layout.fix(x, y, width, height)
if fill_layout is None:
break
else:
fill_height = max(1, start_y + height - y)
self._max_height = y
while self._focus < len(self._layouts):
try:
self._layouts[self._focus].focus(force_first=True)
break
except IndexError:
self._focus += 1
self._clear() | Fix the layouts and calculate the locations of all the widgets.
This function should be called once all Layouts have been added to the Frame and all
widgets added to the Layouts. |
14,423 | def _getgrnam(name, root=None):
root = root or
passwd = os.path.join(root, )
with salt.utils.files.fopen(passwd) as fp_:
for line in fp_:
line = salt.utils.stringutils.to_unicode(line)
comps = line.strip().split()
if len(comps) < 4:
log.debug(, line)
continue
if comps[0] == name:
comps[2] = int(comps[2])
comps[3] = comps[3].split() if comps[3] else []
return grp.struct_group(comps)
raise KeyError(.format(name)) | Alternative implementation for getgrnam, that use only /etc/group |
14,424 | def hosts(self, **kwargs):
kwargs[] = self.id
return self.connection.listHosts(**kwargs) | Convenience wrapper around listHosts(...) for this channel ID.
:param **kwargs: keyword arguments to the listHosts RPC.
:returns: deferred that when fired returns a list of hosts (dicts). |
14,425 | def es_query_proto(path, selects, wheres, schema):
output = None
last_where = MATCH_ALL
for p in reversed(sorted( wheres.keys() | set(selects.keys()))):
where = wheres.get(p)
select = selects.get(p)
if where:
where = AndOp(where).partial_eval().to_esfilter(schema)
if output:
where = es_or([es_and([output, where]), where])
else:
if output:
if last_where is MATCH_ALL:
where = es_or([output, MATCH_ALL])
else:
where = output
else:
where = MATCH_ALL
if p == ".":
output = set_default(
{
"from": 0,
"size": 0,
"sort": [],
"query": where
},
select.to_es()
)
else:
output = {"nested": {
"path": p,
"inner_hits": set_default({"size": 100000}, select.to_es()) if select else None,
"query": where
}}
last_where = where
return output | RETURN TEMPLATE AND PATH-TO-FILTER AS A 2-TUPLE
:param path: THE NESTED PATH (NOT INCLUDING TABLE NAME)
:param wheres: MAP FROM path TO LIST OF WHERE CONDITIONS
:return: (es_query, filters_map) TUPLE |
14,426 | def retrieveVals(self):
file_stats = self._fileInfo.getContainerStats()
for contname in self._fileContList:
stats = file_stats.get(contname)
if stats is not None:
if self.hasGraph():
self.setGraphVal(, contname,
stats.get())
if self.hasGraph():
self.setGraphVal(, contname,
stats.get()) | Retrieve values for graphs. |
14,427 | def _add_access_token_to_response(self, response, access_token):
response[] = access_token.value
response[] = access_token.type
response[] = access_token.expires_in | Adds the Access Token and the associated parameters to the Token Response. |
14,428 | def Create(self, *args, **kwargs):
if not self.writable:
raise IOError()
options = kwargs.pop(, {})
kwargs[] = driverdict_tolist(options or self.settings)
return self._driver.Create(*args, **kwargs) | Calls Driver.Create() with optionally provided creation options as
dict, or falls back to driver specific defaults. |
14,429 | def wrap(self, sock):
EMPTY_RESULT = None, {}
try:
s = self.context.wrap_socket(
sock, do_handshake_on_connect=True, server_side=True,
)
except ssl.SSLError as ex:
if ex.errno == ssl.SSL_ERROR_EOF:
return EMPTY_RESULT
elif ex.errno == ssl.SSL_ERROR_SSL:
if _assert_ssl_exc_contains(ex, ):
raise errors.NoSSLError
return EMPTY_RESULT
elif _assert_ssl_exc_contains(ex, ):
return EMPTY_RESULT
raise
except generic_socket_error as exc:
is_error0 = exc.args == (0, )
if is_error0 and IS_ABOVE_OPENSSL10:
return EMPTY_RESULT
raise
return s, self.get_environ(s) | Wrap and return the given socket, plus WSGI environ entries. |
14,430 | def _replace_coerce(self, to_replace, value, inplace=True, regex=False,
convert=False, mask=None):
if mask.any():
block = super()._replace_coerce(
to_replace=to_replace, value=value, inplace=inplace,
regex=regex, convert=convert, mask=mask)
if convert:
block = [b.convert(by_item=True, numeric=False, copy=True)
for b in block]
return block
return self | Replace value corresponding to the given boolean array with another
value.
Parameters
----------
to_replace : object or pattern
Scalar to replace or regular expression to match.
value : object
Replacement object.
inplace : bool, default False
Perform inplace modification.
regex : bool, default False
If true, perform regular expression substitution.
convert : bool, default True
If true, try to coerce any object types to better types.
mask : array-like of bool, optional
True indicate corresponding element is ignored.
Returns
-------
A new block if there is anything to replace or the original block. |
14,431 | def assume_role_credentials(self, arn):
log.info("Assuming role as %s", arn)
for name in [, , , ]:
if name in os.environ and not os.environ[name]:
del os.environ[name]
sts = self.amazon.session.client("sts")
with self.catch_boto_400("CouldnAWS_ACCESS_KEY_IDAWS_SECRET_ACCESS_KEYAWS_SECURITY_TOKENAWS_SESSION_TOKEN': creds["Credentials"]["SessionToken"]
} | Return the environment variables for an assumed role |
14,432 | def _start(self):
self._recv_lock = coros.Semaphore(0)
self._send_lock = coros.Semaphore(0)
self._recv_thread = gevent.spawn(self._recv)
self._send_thread = gevent.spawn(self._send)
self._recv_thread.link(self._thread_error)
self._send_thread.link(self._thread_error) | Starts the underlying send and receive threads. |
14,433 | def get_memfree(memory, parallel):
number = int(memory.rstrip(string.ascii_letters))
memtype = memory.lstrip(string.digits)
if not memtype:
memtype = "G"
return "%d%s" % (number*parallel, memtype) | Computes the memory required for the memfree field. |
14,434 | def select_by_index(self, index):
match = str(index)
for opt in self.options:
if opt.get_attribute("index") == match:
self._setSelected(opt)
return
raise NoSuchElementException("Could not locate element with index %d" % index) | Select the option at the given index. This is done by examing the "index" attribute of an
element, and not merely by counting.
:Args:
- index - The option at this index will be selected
throws NoSuchElementException If there is no option with specified index in SELECT |
14,435 | def clearOldCalibrations(self, date=None):
self.coeffs[] = [self.coeffs[][-1]]
self.coeffs[] = [self.coeffs[][-1]]
for light in self.coeffs[]:
self.coeffs[][light] = [
self.coeffs[][light][-1]]
for light in self.coeffs[]:
self.coeffs[][light] = [self.coeffs[][light][-1]] | if not only a specific date than remove all except of the youngest calibration |
14,436 | def buscar_timeout_opcvip(self, id_ambiente_vip):
if not is_valid_int_param(id_ambiente_vip):
raise InvalidParameterError(
u)
url = + str(id_ambiente_vip) +
code, xml = self.submit(None, , url)
return self.response(code, xml) | Buscar nome_opcao_txt das Opcoes VIp quando tipo_opcao = 'Timeout' pelo environmentvip_id
:return: Dictionary with the following structure:
::
{‘timeout_opt’: ‘timeout_opt’: <'nome_opcao_txt'>}
:raise InvalidParameterError: Environment VIP identifier is null and invalid.
:raise EnvironmentVipNotFoundError: Environment VIP not registered.
:raise InvalidParameterError: finalidade_txt and cliente_txt is null and invalid.
:raise DataBaseError: Networkapi failed to access the database.
:raise XMLError: Networkapi failed to generate the XML response. |
14,437 | def _shrink_list(self, shrink):
res = []
if len(shrink) == 1:
return self.shrink(shrink[0])
else:
for a in shrink:
temp = self.shrink(a)
if temp:
res.append(temp)
return res | Shrink list down to essentials
:param shrink: List to shrink
:type shrink: list
:return: Shrunk list
:rtype: list |
14,438 | def updateTerms(self, data:list, LIMIT:int=20, _print:bool=True, crawl:bool=False,) -> list:
url_base = self.base_url +
merged_data = []
old_data = self.identifierSearches(
[d[] for d in data],
LIMIT = LIMIT,
_print = _print,
crawl = crawl,
)
for d in data:
url = url_base.format(id=str(d[]))
if d[] != old_data[int(d[])][]:
print(d[], old_data[int(d[])][])
exit()
merged = scicrunch_client_helper.merge(new=d, old=old_data[int(d[])])
merged = scicrunch_client_helper.superclasses_bug_fix(merged)
merged_data.append((url, merged))
resp = self.post(
merged_data,
LIMIT = LIMIT,
action = ,
_print = _print,
crawl = crawl,
)
return resp | Updates existing entities
Args:
data:
needs:
id <str>
ilx_id <str>
options:
definition <str> #bug with qutations
superclasses [{'id':<int>}]
type term, cde, anntation, or relationship <str>
synonyms {'literal':<str>}
existing_ids {'iri':<str>,'curie':<str>','change':<bool>, 'delete':<bool>}
LIMIT:
limit of concurrent
_print:
prints label of data presented
crawl:
True: Uses linear requests.
False: Uses concurrent requests from the asyncio and aiohttp modules
Returns:
List of filled in data parallel with the input data. If any entity failed with an
ignorable reason, it will return empty for the item in the list returned. |
14,439 | def read(self):
if self._is_initialized:
return
self._is_initialized = True
if not isinstance(self._file_or_files, (tuple, list)):
files_to_read = [self._file_or_files]
else:
files_to_read = list(self._file_or_files)
seen = set(files_to_read)
num_read_include_files = 0
while files_to_read:
file_path = files_to_read.pop(0)
fp = file_path
file_ok = False
if hasattr(fp, "seek"):
self._read(fp, fp.name)
else:
try:
with open(file_path, ) as fp:
file_ok = True
self._read(fp, fp.name)
except IOError:
continue
assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work"
include_path = osp.join(osp.dirname(file_path), include_path)
include_path = osp.normpath(include_path)
if include_path in seen or not os.access(include_path, os.R_OK):
continue
seen.add(include_path)
files_to_read.insert(0, include_path)
num_read_include_files += 1
if num_read_include_files == 0:
self._merge_includes = False | Reads the data stored in the files we have been initialized with. It will
ignore files that cannot be read, possibly leaving an empty configuration
:return: Nothing
:raise IOError: if a file cannot be handled |
14,440 | def spaceout_and_resize_panels(self):
ncol = self.ncol
nrow = self.nrow
figure = self.figure
theme = self.theme
get_property = theme.themeables.property
left = figure.subplotpars.left
right = figure.subplotpars.right
top = figure.subplotpars.top
bottom = figure.subplotpars.bottom
top_strip_height = self.strip_size()
W, H = figure.get_size_inches()
try:
spacing_x = get_property()
except KeyError:
spacing_x = 0.1
try:
spacing_y = get_property()
except KeyError:
spacing_y = 0.1
try:
aspect_ratio = get_property()
except KeyError:
if not self.free[] and not self.free[]:
aspect_ratio = self.coordinates.aspect(
self.layout.panel_params[0])
else:
aspect_ratio = None
if theme.themeables.is_blank():
top_strip_height = 0
with suppress(KeyError):
strip_margin_x = get_property()
top_strip_height *= (1 + strip_margin_x)
w = ((right-left)*W - spacing_x*(ncol-1)) / ncol
h = ((top-bottom)*H - (spacing_y+top_strip_height)*(nrow-1)) / nrow
if aspect_ratio is not None:
h = w*aspect_ratio
H = (h*nrow + (spacing_y+top_strip_height)*(nrow-1)) / \
(top-bottom)
figure.set_figheight(H)
wspace = spacing_x/w
hspace = (spacing_y + top_strip_height) / h
figure.subplots_adjust(wspace=wspace, hspace=hspace) | Adjust the spacing between the panels and resize them
to meet the aspect ratio |
14,441 | def close(self):
assert self._opened, "RPC System is not opened"
logger.debug("Closing rpc system. Stopping ping loop")
self._ping_loop.stop()
if self._ping_current_iteration:
self._ping_current_iteration.cancel()
return self._connectionpool.close() | Stop listing for new connections and close all open connections.
:returns: Deferred that calls back once everything is closed. |
14,442 | def _inferSchemaFromList(self, data, names=None):
if not data:
raise ValueError("can not infer schema from empty dataset")
first = data[0]
if type(first) is dict:
warnings.warn("inferring schema from dict is deprecated,"
"please use pyspark.sql.Row instead")
schema = reduce(_merge_type, (_infer_schema(row, names) for row in data))
if _has_nulltype(schema):
raise ValueError("Some of types cannot be determined after inferring")
return schema | Infer schema from list of Row or tuple.
:param data: list of Row or tuple
:param names: list of column names
:return: :class:`pyspark.sql.types.StructType` |
14,443 | def distance_centimeters_continuous(self):
self._ensure_mode(self.MODE_US_DIST_CM)
return self.value(0) * self._scale() | Measurement of the distance detected by the sensor,
in centimeters.
The sensor will continue to take measurements so
they are available for future reads.
Prefer using the equivalent :meth:`UltrasonicSensor.distance_centimeters` property. |
14,444 | def build(self, X, Y, w=None, edges=None):
self.reset()
if X is None or Y is None:
return
self.__set_data(X, Y, w)
if self.debug:
sys.stdout.write("Graph Preparation: ")
start = time.clock()
self.graph_rep = nglpy.Graph(
self.Xnorm,
self.graph,
self.max_neighbors,
self.beta,
connect=self.connect,
)
if self.debug:
end = time.clock()
sys.stdout.write("%f s\n" % (end - start)) | Assigns data to this object and builds the requested topological
structure
@ In, X, an m-by-n array of values specifying m
n-dimensional samples
@ In, Y, a m vector of values specifying the output
responses corresponding to the m samples specified by X
@ In, w, an optional m vector of values specifying the
weights associated to each of the m samples used. Default of
None means all points will be equally weighted
@ In, edges, an optional list of custom edges to use as a
starting point for pruning, or in place of a computed graph. |
14,445 | def adsSyncWriteControlReqEx(
port, address, ads_state, device_state, data, plc_data_type
):
sync_write_control_request = _adsDLL.AdsSyncWriteControlReqEx
ams_address_pointer = ctypes.pointer(address.amsAddrStruct())
ads_state_c = ctypes.c_ulong(ads_state)
device_state_c = ctypes.c_ulong(device_state)
if plc_data_type == PLCTYPE_STRING:
data = ctypes.c_char_p(data.encode("utf-8"))
data_pointer = data
data_length = len(data_pointer.value) + 1
else:
data = plc_data_type(data)
data_pointer = ctypes.pointer(data)
data_length = ctypes.sizeof(data)
error_code = sync_write_control_request(
port,
ams_address_pointer,
ads_state_c,
device_state_c,
data_length,
data_pointer,
)
if error_code:
raise ADSError(error_code) | Change the ADS state and the machine-state of the ADS-server.
:param int port: local AMS port as returned by adsPortOpenEx()
:param pyads.structs.AmsAddr adr: local or remote AmsAddr
:param int ads_state: new ADS-state, according to ADSTATE constants
:param int device_state: new machine-state
:param data: additional data
:param int plc_data_type: plc datatype, according to PLCTYPE constants |
14,446 | def getTableAsCsv(self, networkId, tableType, verbose=None):
response=api(url=self.___url++str(networkId)++str(tableType)+, method="GET", verbose=verbose, parse_params=False)
return response | Returns a CSV representation of the table specified by the `networkId` and `tableType` parameters. All column names are included in the first row.
:param networkId: SUID of the network containing the table
:param tableType: Table type
:param verbose: print more
:returns: 200: successful operation |
14,447 | def start_resolver(finder=None, wheel_cache=None):
pip_command = get_pip_command()
pip_options = get_pip_options(pip_command=pip_command)
if not finder:
finder = get_finder(pip_command=pip_command, pip_options=pip_options)
if not wheel_cache:
wheel_cache = WHEEL_CACHE
_ensure_dir(fs_str(os.path.join(wheel_cache.cache_dir, "wheels")))
download_dir = PKGS_DOWNLOAD_DIR
_ensure_dir(download_dir)
_build_dir = create_tracked_tempdir(fs_str("build"))
_source_dir = create_tracked_tempdir(fs_str("source"))
preparer = partialclass(
pip_shims.shims.RequirementPreparer,
build_dir=_build_dir,
src_dir=_source_dir,
download_dir=download_dir,
wheel_download_dir=WHEEL_DOWNLOAD_DIR,
progress_bar="off",
build_isolation=False,
)
resolver = partialclass(
pip_shims.shims.Resolver,
finder=finder,
session=finder.session,
upgrade_strategy="to-satisfy-only",
force_reinstall=True,
ignore_dependencies=False,
ignore_requires_python=True,
ignore_installed=True,
isolated=False,
wheel_cache=wheel_cache,
use_user_site=False,
)
try:
if packaging.version.parse(pip_shims.shims.pip_version) >= packaging.version.parse():
with pip_shims.shims.RequirementTracker() as req_tracker:
preparer = preparer(req_tracker=req_tracker)
yield resolver(preparer=preparer)
else:
preparer = preparer()
yield resolver(preparer=preparer)
finally:
finder.session.close() | Context manager to produce a resolver.
:param finder: A package finder to use for searching the index
:type finder: :class:`~pip._internal.index.PackageFinder`
:return: A 3-tuple of finder, preparer, resolver
:rtype: (:class:`~pip._internal.operations.prepare.RequirementPreparer`, :class:`~pip._internal.resolve.Resolver`) |
14,448 | def find_by_id(self, organization_export, params={}, **options):
path = "/organization_exports/%s" % (organization_export)
return self.client.get(path, params, **options) | Returns details of a previously-requested Organization export.
Parameters
----------
organization_export : {Id} Globally unique identifier for the Organization export.
[params] : {Object} Parameters for the request |
14,449 | def format_modes(modes, full_modes=False, current_mode=None):
t = table.Table(((
if mode == current_mode else ,
str(Q.CGDisplayModeGetWidth(mode)),
str(Q.CGDisplayModeGetHeight(mode)),
+shorter_float_str(Q.CGDisplayModeGetRefreshRate(mode)),
format_pixelEncoding(
Q.CGDisplayModeCopyPixelEncoding(mode)))
for mode in modes))
t.set_key(2, )
t.set_key(3, )
t.set_key(4, )
t.set_alignment(, )
t.set_alignment(, )
t.set_separator(, )
created_flags_col = False
if full_modes:
t.append_col(tuple((.join(get_flags_of_mode(mode))
for mode in modes)), key=)
created_flags_col = True
else:
if len(frozenset(t.get_col())) == 1:
t.del_col()
if len(frozenset(t.get_col())) == 1:
t.del_col()
lut = {}
for i, row in enumerate(t):
row = tuple(row)
if row not in lut:
lut[row] = []
elif not created_flags_col:
t.append_col((,) * len(modes), key=)
lut[row].append(i)
for rw, indices in lut.iteritems():
if len(indices) == 1:
continue
flags = {}
for i in indices:
flags[i] = get_flags_of_mode(modes[i])
common_flags = reduce(lambda x, y: x.intersection(y),
map(frozenset, flags.itervalues()))
for i in indices:
t[i, ] = .join(frozenset(flags[i])
- common_flags)
if created_flags_col:
t.set_alignment(, )
return t | Creates a nice readily printable Table for a list of modes.
Used in `displays list' and the candidates list
in `displays set'. |
14,450 | def __step1(self):
C = self.C
n = self.n
for i in range(n):
minval = min(self.C[i])
for j in range(n):
self.C[i][j] -= minval
return 2 | For each row of the matrix, find the smallest element and
subtract it from every element in its row. Go to Step 2. |
14,451 | def _handle_double_click(self, event):
if event.get_button()[1] == 1:
path_info = self.tree_view.get_path_at_pos(int(event.x), int(event.y))
if path_info:
path = path_info[0]
iter = self.list_store.get_iter(path)
model = self.list_store.get_value(iter, self.MODEL_STORAGE_ID)
selection = self.model.get_state_machine_m().selection
selection.focus = model | Double click with left mouse button focuses the element |
14,452 | def transform(self, X):
if self.mode_ == :
return np.apply_along_axis(self._target, 1, np.reshape(X, (X.shape[0], X.shape[1] * X.shape[2])))
if self.mode_ == :
return np.apply_along_axis(self._majority, 1, np.reshape(X, (X.shape[0], X.shape[1] * X.shape[2])))
print()
return X | Parameters
----------
X : array-like, shape [n x m]
The mask in form of n x m array. |
14,453 | async def _async_connect(self):
try:
self.conn_coro = self.client.connected()
aenter = type(self.conn_coro).__aenter__(self.conn_coro)
self.stream = await aenter
logger.info(f"Agent {str(self.jid)} connected and authenticated.")
except aiosasl.AuthenticationFailure:
raise AuthenticationFailure(
"Could not authenticate the agent. Check user and password or use auto_register=True") | connect and authenticate to the XMPP server. Async mode. |
14,454 | def rytov_sc(radius=5e-6, sphere_index=1.339, medium_index=1.333,
wavelength=550e-9, pixel_size=1e-7, grid_size=(80, 80),
center=(39.5, 39.5), radius_sampling=42):
r
r_ryt, n_ryt = correct_rytov_sc_input(radius_sc=radius,
sphere_index_sc=sphere_index,
medium_index=medium_index,
radius_sampling=radius_sampling)
qpi = mod_rytov.rytov(radius=r_ryt,
sphere_index=n_ryt,
medium_index=medium_index,
wavelength=wavelength,
pixel_size=pixel_size,
grid_size=grid_size,
center=center,
radius_sampling=radius_sampling)
qpi["sim radius"] = radius
qpi["sim index"] = sphere_index
qpi["sim model"] = "rytov-sc"
return qpi | r"""Field behind a dielectric sphere, systematically corrected Rytov
This method implements a correction of
:func:`qpsphere.models.rytov`, where the
`radius` :math:`r_\text{Ryt}` and the `sphere_index`
:math:`n_\text{Ryt}` are corrected using
the approach described in :cite:`Mueller2018` (eqns. 3,4, and 5).
.. math::
n_\text{Ryt-SC} &= n_\text{Ryt} + n_\text{med} \cdot
\left( a_n x^2 + b_n x + c_n \right)
r_\text{Ryt-SC} &= r_\text{Ryt} \cdot
\left( a_r x^2 +b_r x + c_r \right)
&\text{with} x = \frac{n_\text{Ryt}}{n_\text{med}} - 1
The correction factors are given in
:data:`qpsphere.models.mod_rytov_sc.RSC_PARAMS`.
Parameters
----------
radius: float
Radius of the sphere [m]
sphere_index: float
Refractive index of the sphere
medium_index: float
Refractive index of the surrounding medium
wavelength: float
Vacuum wavelength of the imaging light [m]
pixel_size: float
Pixel size [m]
grid_size: tuple of floats
Resulting image size in x and y [px]
center: tuple of floats
Center position in image coordinates [px]
radius_sampling: int
Number of pixels used to sample the sphere radius when
computing the Rytov field. The default value of 42
pixels is a reasonable number for single-cell analysis.
Returns
-------
qpi: qpimage.QPImage
Quantitative phase data set |
14,455 | def keys(name, basepath=, **kwargs):
ret = {: name, : {}, : True, : }
pillar_kwargs = {}
for key, value in six.iteritems(kwargs):
pillar_kwargs[.format(key)] = value
pillar = __salt__[]({: }, pillar_kwargs)
paths = {
: os.path.join(basepath, ,
, ),
: os.path.join(basepath, ,
),
: os.path.join(basepath, ,
, ),
: os.path.join(basepath, ,
),
: os.path.join(basepath, , )
}
for key in paths:
p_key = .format(key)
if p_key not in pillar:
continue
if not os.path.exists(os.path.dirname(paths[key])):
os.makedirs(os.path.dirname(paths[key]))
if os.path.isfile(paths[key]):
with salt.utils.files.fopen(paths[key], ) as fp_:
if salt.utils.stringutils.to_unicode(fp_.read()) != pillar[p_key]:
ret[][key] =
else:
ret[][key] =
if not ret[]:
ret[] =
elif __opts__[]:
ret[] = None
ret[] =
ret[] = {}
else:
for key in ret[]:
with salt.utils.files.fopen(paths[key], ) as fp_:
fp_.write(
salt.utils.stringutils.to_str(
pillar[.format(key)]
)
)
ret[] =
return ret | Manage libvirt keys.
name
The name variable used to track the execution
basepath
Defaults to ``/etc/pki``, this is the root location used for libvirt
keys on the hypervisor
The following parameters are optional:
country
The country that the certificate should use. Defaults to US.
.. versionadded:: 2018.3.0
state
The state that the certificate should use. Defaults to Utah.
.. versionadded:: 2018.3.0
locality
The locality that the certificate should use.
Defaults to Salt Lake City.
.. versionadded:: 2018.3.0
organization
The organization that the certificate should use.
Defaults to Salted.
.. versionadded:: 2018.3.0
expiration_days
The number of days that the certificate should be valid for.
Defaults to 365 days (1 year)
.. versionadded:: 2018.3.0 |
14,456 | def shapely_formatter(_, vertices, codes=None):
elements = []
if codes is None:
for vertices_ in vertices:
if np.all(vertices_[0, :] == vertices_[-1, :]):
if len(vertices) < 3:
elements.append(Point(vertices_[0, :]))
else:
elements.append(LinearRing(vertices_))
else:
elements.append(LineString(vertices_))
else:
for vertices_, codes_ in zip(vertices, codes):
starts = np.nonzero(codes_ == MPLPATHCODE.MOVETO)[0]
stops = np.nonzero(codes_ == MPLPATHCODE.CLOSEPOLY)[0]
try:
rings = [LinearRing(vertices_[start:stop+1, :])
for start, stop in zip(starts, stops)]
elements.append(Polygon(rings[0], rings[1:]))
except ValueError as err:
if np.any(stop - start - 1 == 0):
if stops[0] < starts[0]+2:
pass
else:
rings = [
LinearRing(vertices_[start:stop+1, :])
for start, stop in zip(starts, stops)
if stop >= start+2]
elements.append(Polygon(rings[0], rings[1:]))
else:
raise(err)
return elements | `Shapely`_ style contour formatter.
Contours are returned as a list of :class:`shapely.geometry.LineString`,
:class:`shapely.geometry.LinearRing`, and :class:`shapely.geometry.Point`
geometry elements.
Filled contours return a list of :class:`shapely.geometry.Polygon`
elements instead.
.. note:: If possible, `Shapely speedups`_ will be enabled.
.. _Shapely: http://toblerity.org/shapely/manual.html
.. _Shapely speedups: http://toblerity.org/shapely/manual.html#performance
See Also
--------
`descartes <https://bitbucket.org/sgillies/descartes/>`_ : Use `Shapely`_
or GeoJSON-like geometric objects as matplotlib paths and patches. |
14,457 | def plot_data():
var = [, , , , , , , ]
lims = np.array([[26, 33], [0, 10], [0, 36], [0, 6], [1005, 1025], [0, 0.6], [0, 2], [0, 9]])
for fname in fnames:
fig, axes = plt.subplots(nrows=4, ncols=2)
fig.set_size_inches(20, 10)
fig.subplots_adjust(top=0.95, bottom=0.01, left=0.2, right=0.99, wspace=0.0, hspace=0.07)
i = 0
for ax, Var, cmap in zip(axes.flat, var, cmaps):
lat, lon, z, data = test.read(Var, fname)
map1 = ax.scatter(lat, -z, c=data, cmap=cmap, s=10, linewidths=0., vmin=lims[i, 0], vmax=lims[i, 1])
y_formatter = mpl.ticker.ScalarFormatter(useOffset=False)
ax.xaxis.set_major_formatter(y_formatter)
if i == 6:
ax.set_xlabel()
ax.set_ylabel()
else:
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_ylim(-z.max(), 0)
ax.set_xlim(lat.min(), lat.max())
cb = plt.colorbar(map1, ax=ax, pad=0.02)
cb.set_label(cmap.name + + + cmap.units + )
i += 1
fig.savefig( + fname.split()[0] + , bbox_inches=) | Plot sample data up with the fancy colormaps. |
14,458 | def config_acl(args):
r = fapi.get_repository_config_acl(args.namespace, args.config,
args.snapshot_id)
fapi._check_response_code(r, 200)
acls = sorted(r.json(), key=lambda k: k[])
return map(lambda acl: .format(acl[], acl[]), acls) | Retrieve access control list for a method configuration |
14,459 | def set_current_limit(self, value, channel=1):
cmd = "I%d %f" % (channel, value)
self.write(cmd) | channel: 1=OP1, 2=OP2, AUX is not supported |
14,460 | def get_type_hints(obj, globalns=None, localns=None):
if getattr(obj, , None):
return {}
if isinstance(obj, type):
hints = {}
for base in reversed(obj.__mro__):
if globalns is None:
base_globals = sys.modules[base.__module__].__dict__
else:
base_globals = globalns
ann = base.__dict__.get(, {})
for name, value in ann.items():
if value is None:
value = type(None)
if isinstance(value, str):
value = _ForwardRef(value)
value = _eval_type(value, base_globals, localns)
hints[name] = value
return hints
if globalns is None:
if isinstance(obj, types.ModuleType):
globalns = obj.__dict__
else:
globalns = getattr(obj, , {})
if localns is None:
localns = globalns
elif localns is None:
localns = globalns
hints = getattr(obj, , None)
if hints is None:
if isinstance(obj, _allowed_types):
return {}
else:
raise TypeError(
.format(obj))
defaults = _get_defaults(obj)
hints = dict(hints)
for name, value in hints.items():
if value is None:
value = type(None)
if isinstance(value, str):
value = _ForwardRef(value)
value = _eval_type(value, globalns, localns)
if name in defaults and defaults[name] is None:
value = Optional[value]
hints[name] = value
return hints | Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
forward references encoded as string literals, and if necessary
adds Optional[t] if a default value equal to None is set.
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
inherited members.
TypeError is raised if the argument is not of a type that can contain
annotations, and an empty dictionary is returned if no annotations are
present.
BEWARE -- the behavior of globalns and localns is counterintuitive
(unless you are familiar with how eval() and exec() work). The
search order is locals first, then globals.
- If no dict arguments are passed, an attempt is made to use the
globals from obj (or the respective module's globals for classes),
and these are also used as the locals. If the object does not appear
to have globals, an empty dictionary is used.
- If one dict argument is passed, it is used for both globals and
locals.
- If two dict arguments are passed, they specify globals and
locals, respectively. |
14,461 | def average_data(self,ranges=[[None,None]],percentile=None):
ranges=copy.deepcopy(ranges)
for i in range(len(ranges)):
if ranges[i][0] is None:
ranges[i][0] = 0
else:
ranges[i][0] = int(ranges[i][0]*self.rate)
if ranges[i][1] is None:
ranges[i][1] = -1
else:
ranges[i][1] = int(ranges[i][1]*self.rate)
datas=np.empty((self.sweeps,len(ranges),2))
for iSweep in range(self.sweeps):
self.setSweep(iSweep)
for iRange in range(len(ranges)):
I1=ranges[iRange][0]
I2=ranges[iRange][1]
if percentile:
datas[iSweep][iRange][0]=np.percentile(self.dataY[I1:I2],percentile)
else:
datas[iSweep][iRange][0]=np.average(self.dataY[I1:I2])
datas[iSweep][iRange][1]=np.std(self.dataY[I1:I2])
return datas | given a list of ranges, return single point averages for every sweep.
Units are in seconds. Expects something like:
ranges=[[1,2],[4,5],[7,7.5]]
None values will be replaced with maximum/minimum bounds.
For baseline subtraction, make a range baseline then sub it youtself.
returns datas[iSweep][iRange][AVorSD]
if a percentile is given, return that percentile rather than average.
percentile=50 is the median, but requires sorting, and is slower. |
14,462 | def expanding_stdize(obj, **kwargs):
return (obj - obj.expanding(**kwargs).mean()) / (
obj.expanding(**kwargs).std()
) | Standardize a pandas object column-wise on expanding window.
**kwargs -> passed to `obj.expanding`
Example
-------
df = pd.DataFrame(np.random.randn(10, 3))
print(expanding_stdize(df, min_periods=5))
0 1 2
0 NaN NaN NaN
1 NaN NaN NaN
2 NaN NaN NaN
3 NaN NaN NaN
4 0.67639 -1.03507 0.96610
5 0.95008 -0.26067 0.27761
6 1.67793 -0.50816 0.19293
7 1.50364 -1.10035 -0.87859
8 -0.64949 0.08028 -0.51354
9 0.15280 -0.73283 -0.84907 |
14,463 | def _all_escape(self):
self.unesc = not self.unesc
self.urls, self.urls_unesc = self.urls_unesc, self.urls
urls = iter(self.urls)
for item in self.items:
if isinstance(item, urwid.Columns):
item[1].set_label(shorten_url(next(urls),
self.size[0],
self.shorten)) | u |
14,464 | def usable_ids(cls, id, accept_multi=True):
try:
qry_id = [int(id)]
except ValueError:
try:
qry_id = cls.from_cn(id)
except Exception:
qry_id = None
if not qry_id or not accept_multi and len(qry_id) != 1:
msg = % id
cls.error(msg)
return qry_id if accept_multi else qry_id[0] | Retrieve id from input which can be an id or a cn. |
14,465 | def installed(name,
pkgs=None,
dir=None,
user=None,
force_reinstall=False,
registry=None,
env=None):
ret = {: name, : None, : , : {}}
pkg_list = pkgs if pkgs else [name]
try:
installed_pkgs = __salt__[](dir=dir, runas=user, env=env, depth=0)
except (CommandNotFoundError, CommandExecutionError) as err:
ret[] = False
ret[] = {0}\.format(name, err)
return ret
else:
installed_pkgs = dict((p, info)
for p, info in six.iteritems(installed_pkgs))
pkgs_satisfied = []
pkgs_to_install = []
def _pkg_is_installed(pkg, installed_pkgs):
if (pkg_name in installed_pkgs and
in installed_pkgs[pkg_name]):
return True
elif in pkg_name:
for pkg_details in installed_pkgs.values():
try:
pkg_from = pkg_details.get(, ).split()[1]
if not pkg_from.endswith() and pkg_name.startswith():
pkg_from +=
if pkg_name.split()[1] == pkg_from:
return True
except IndexError:
pass
return False
for pkg in pkg_list:
matches = re.search(r, pkg)
pkg_name, pkg_ver = matches.group(1), matches.group(2) or None
if force_reinstall is True:
pkgs_to_install.append(pkg)
continue
if not _pkg_is_installed(pkg, installed_pkgs):
pkgs_to_install.append(pkg)
continue
installed_name_ver = .format(pkg_name,
installed_pkgs[pkg_name][])
if pkg_ver:
if installed_pkgs[pkg_name].get() != pkg_ver:
pkgs_to_install.append(pkg)
else:
pkgs_satisfied.append(installed_name_ver)
continue
else:
pkgs_satisfied.append(installed_name_ver)
continue
if __opts__[]:
ret[] = None
comment_msg = []
if pkgs_to_install:
comment_msg.append({0}\
.format(.join(pkgs_to_install)))
ret[] = {: [], : pkgs_to_install}
if pkgs_satisfied:
comment_msg.append({0}\
.format(.join(pkg_list), .join(pkgs_satisfied)))
ret[] = True
ret[] = .join(comment_msg)
return ret
if not pkgs_to_install:
ret[] = True
ret[] = ({0}\
.format(.join(pkg_list), .join(pkgs_satisfied)))
return ret
try:
cmd_args = {
: dir,
: user,
: registry,
: env,
: pkg_list,
}
call = __salt__[](**cmd_args)
except (CommandNotFoundError, CommandExecutionError) as err:
ret[] = False
ret[] = {0}\.format(
.join(pkg_list), err)
return ret
if call and (isinstance(call, list) or isinstance(call, dict)):
ret[] = True
ret[] = {: [], : pkgs_to_install}
ret[] = {0}\.format(
.join(pkgs_to_install))
else:
ret[] = False
ret[] = {0}\.format(
.join(pkg_list))
return ret | Verify that the given package is installed and is at the correct version
(if specified).
.. code-block:: yaml
coffee-script:
npm.installed:
- user: someuser
[email protected]:
npm.installed: []
name
The package to install
.. versionchanged:: 2014.7.2
This parameter is no longer lowercased by salt so that
case-sensitive NPM package names will work.
pkgs
A list of packages to install with a single npm invocation; specifying
this argument will ignore the ``name`` argument
.. versionadded:: 2014.7.0
dir
The target directory in which to install the package, or None for
global installation
user
The user to run NPM with
.. versionadded:: 0.17.0
registry
The NPM registry from which to install the package
.. versionadded:: 2014.7.0
env
A list of environment variables to be set prior to execution. The
format is the same as the :py:func:`cmd.run <salt.states.cmd.run>`.
state function.
.. versionadded:: 2014.7.0
force_reinstall
Install the package even if it is already installed |
14,466 | def print_summary(self):
for input in self.form.find_all(
("input", "textarea", "select", "button")):
input_copy = copy.copy(input)
for subtag in input_copy.find_all() + [input_copy]:
if subtag.string:
subtag.string = subtag.string.strip()
print(input_copy) | Print a summary of the form.
May help finding which fields need to be filled-in. |
14,467 | def preferred_format(incomplete_format, preferred_formats):
incomplete_format = long_form_one_format(incomplete_format)
if in incomplete_format:
return incomplete_format
for fmt in long_form_multiple_formats(preferred_formats):
if ((incomplete_format[] == fmt[] or (
fmt[] == and
incomplete_format[] not in [, , ])) and
incomplete_format.get() == fmt.get(, incomplete_format.get()) and
incomplete_format.get() == fmt.get(, incomplete_format.get())):
fmt.update(incomplete_format)
return fmt
return incomplete_format | Return the preferred format for the given extension |
14,468 | def extract_fields(lines, delim, searches, match_lineno=1, **kwargs):
keep_idx = []
for lineno, line in lines:
if lineno < match_lineno or delim not in line:
if lineno == match_lineno:
raise WcutError(.format(
match_lineno))
yield [line]
continue
fields = line.split(delim)
if lineno == match_lineno:
keep_idx = list(match_fields(fields, searches, **kwargs))
keep_fields = [fields[i] for i in keep_idx]
if keep_fields:
yield keep_fields | Return generator of fields matching `searches`.
Parameters
----------
lines : iterable
Provides line number (1-based) and line (str)
delim : str
Delimiter to split line by to produce fields
searches : iterable
Returns search (str) to match against line fields.
match_lineno : int
Line number of line to split and search fields
Remaining keyword arguments are passed to `match_fields`. |
14,469 | def completeness(self, catalogue, config, saveplot=False, filetype=,
timeout=120):
magnitude_bintime_binincrement_lock
if saveplot and not isinstance(saveplot, str):
raise ValueError()
magnitude_bins = self._get_magnitudes_from_spacing(
catalogue.data[],
config[])
dec_time = catalogue.get_decimal_time()
completeness_table = np.zeros([len(magnitude_bins) - 1, 2],
dtype=float)
min_year = float(np.min(catalogue.data[]))
max_year = float(np.max(catalogue.data[])) + 1.0
has_completeness = np.zeros(len(magnitude_bins) - 1, dtype=bool)
for iloc in range(0, len(magnitude_bins) - 1):
lower_mag = magnitude_bins[iloc]
upper_mag = magnitude_bins[iloc + 1]
idx = np.logical_and(catalogue.data[] >= lower_mag,
catalogue.data[] < upper_mag)
cumvals = np.cumsum(np.ones(np.sum(idx)))
plt.plot(dec_time[idx], cumvals, )
plt.xlim(min_year, max_year + 5)
title_string = % (lower_mag, upper_mag)
plt.title(title_string)
pts = pylab.ginput(1, timeout=timeout)[0]
if pts[0] <= max_year:
has_completeness[iloc] = True
completeness_table[iloc, 0] = np.floor(pts[0])
completeness_table[iloc, 1] = magnitude_bins[iloc]
print(completeness_table[iloc, :], has_completeness[iloc])
if config[] and (iloc > 0) and \
(completeness_table[iloc, 0] > completeness_table[iloc - 1, 0]):
completeness_table[iloc, 0] = \
completeness_table[iloc - 1, 0]
marker_line = np.array([
[0., completeness_table[iloc, 0]],
[cumvals[-1], completeness_table[iloc, 0]]])
plt.plot(marker_line[:, 0], marker_line[:, 1], )
if saveplot:
filename = saveplot + + ( % lower_mag) + (
% upper_mag) + + filetype
plt.savefig(filename, format=filetype)
plt.close()
return completeness_table[has_completeness, :] | :param catalogue:
Earthquake catalogue as instance of
:class:`openquake.hmtk.seismicity.catalogue.Catalogue`
:param dict config:
Configuration parameters of the algorithm, containing the
following information:
'magnitude_bin' Size of magnitude bin (non-negative float)
'time_bin' Size (in dec. years) of the time window (non-negative
float)
'increment_lock' Boolean to indicate whether to ensure
completeness magnitudes always decrease with more
recent bins
:returns:
2-column table indicating year of completeness and corresponding
magnitude numpy.ndarray |
14,470 | def get_local_annotations(
cls, target, exclude=None, ctx=None, select=lambda *p: True
):
result = []
exclude = () if exclude is None else exclude
try:
local_annotations = get_local_property(
target, Annotation.__ANNOTATIONS_KEY__, result, ctx=ctx
)
if not local_annotations:
if ismethod(target):
func = get_method_function(target)
local_annotations = get_local_property(
func, Annotation.__ANNOTATIONS_KEY__,
result, ctx=ctx
)
if not local_annotations:
local_annotations = get_local_property(
func, Annotation.__ANNOTATIONS_KEY__,
result
)
elif isfunction(target):
local_annotations = get_local_property(
target, Annotation.__ANNOTATIONS_KEY__,
result
)
except TypeError:
raise TypeError(.format(target))
for local_annotation in local_annotations:
inherited = isinstance(local_annotation, cls)
not_excluded = not isinstance(local_annotation, exclude)
selected = select(target, ctx, local_annotation)
if inherited and not_excluded and selected:
result.append(local_annotation)
return result | Get a list of local target annotations in the order of their
definition.
:param type cls: type of annotation to get from target.
:param target: target from where get annotations.
:param tuple/type exclude: annotation types to exclude from selection.
:param ctx: target ctx.
:param select: selection function which takes in parameters a target,
a ctx and an annotation and returns True if the annotation has to
be selected. True by default.
:return: target local annotations.
:rtype: list |
14,471 | def _split_tidy(self, string, maxsplit=None):
if maxsplit is None:
return string.rstrip("\n").split("\t")
else:
return string.rstrip("\n").split("\t", maxsplit) | Rstrips string for \n and splits string for \t |
14,472 | def ae_latent_softmax(latents_pred, latents_discrete, hparams):
vocab_size = 2 ** hparams.z_size
if hparams.num_decode_blocks < 2:
latents_logits = tf.layers.dense(latents_pred, vocab_size,
name="extra_logits")
if hparams.logit_normalization:
latents_logits *= tf.rsqrt(1e-8 +
tf.reduce_mean(tf.square(latents_logits)))
loss = None
if latents_discrete is not None:
if hparams.soft_em:
assert hparams.num_decode_blocks == 1
loss = tf.nn.softmax_cross_entropy_with_logits_v2(
labels=latents_discrete, logits=latents_logits)
else:
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=latents_discrete, logits=latents_logits)
sample = multinomial_sample(
latents_logits, vocab_size, hparams.sampling_temp)
return sample, loss
vocab_bits = int(math.log(vocab_size, 2))
assert vocab_size == 2**vocab_bits
assert vocab_bits % hparams.num_decode_blocks == 0
block_vocab_size = 2**(vocab_bits // hparams.num_decode_blocks)
latents_logits = [
tf.layers.dense(
latents_pred, block_vocab_size, name="extra_logits_%d" % i)
for i in range(hparams.num_decode_blocks)
]
loss = None
if latents_discrete is not None:
losses = []
for i in range(hparams.num_decode_blocks):
d = tf.floormod(tf.floordiv(latents_discrete,
block_vocab_size**i), block_vocab_size)
losses.append(tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=d, logits=latents_logits[i]))
loss = sum(losses)
samples = [multinomial_sample(l, block_vocab_size, hparams.sampling_temp)
for l in latents_logits]
sample = sum([s * block_vocab_size**i for i, s in enumerate(samples)])
return sample, loss | Latent prediction and loss. |
14,473 | def get_all_groups(region=None, key=None, keyid=None, profile=None):
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
retries = 30
while True:
try:
next_token =
asgs = []
while next_token is not None:
ret = conn.get_all_groups(next_token=next_token)
asgs += [a for a in ret]
next_token = ret.next_token
return asgs
except boto.exception.BotoServerError as e:
if retries and e.code == :
log.debug()
time.sleep(5)
retries -= 1
continue
log.error(e)
return [] | Return all AutoScale Groups visible in the account
(as a list of boto.ec2.autoscale.group.AutoScalingGroup).
.. versionadded:: 2016.11.0
CLI example:
.. code-block:: bash
salt-call boto_asg.get_all_groups region=us-east-1 --output yaml |
14,474 | def hessian(self, x, y, grid_interp_x=None, grid_interp_y=None, f_=None, f_x=None, f_y=None, f_xx=None, f_yy=None, f_xy=None):
n = len(np.atleast_1d(x))
if n <= 1 and np.shape(x) == ():
f_xx_out = self.f_xx_interp(x, y, grid_interp_x, grid_interp_y, f_xx)
f_yy_out = self.f_yy_interp(x, y, grid_interp_x, grid_interp_y, f_yy)
f_xy_out = self.f_xy_interp(x, y, grid_interp_x, grid_interp_y, f_xy)
return f_xx_out[0][0], f_yy_out[0][0], f_xy_out[0][0]
else:
if self._grid and n >= self._min_grid_number:
x_, y_ = util.get_axes(x, y)
f_xx_out = self.f_xx_interp(x_, y_, grid_interp_x, grid_interp_y, f_xx)
f_yy_out = self.f_yy_interp(x_, y_, grid_interp_x, grid_interp_y, f_yy)
f_xy_out = self.f_xy_interp(x_, y_, grid_interp_x, grid_interp_y, f_xy)
f_xx_out = util.image2array(f_xx_out)
f_yy_out = util.image2array(f_yy_out)
f_xy_out = util.image2array(f_xy_out)
else:
f_xx_out, f_yy_out, f_xy_out = np.zeros(n), np.zeros(n), np.zeros(n)
for i in range(n):
f_xx_out[i] = self.f_xx_interp(x[i], y[i], grid_interp_x, grid_interp_y, f_xx)
f_yy_out[i] = self.f_yy_interp(x[i], y[i], grid_interp_x, grid_interp_y, f_yy)
f_xy_out[i] = self.f_xy_interp(x[i], y[i], grid_interp_x, grid_interp_y, f_xy)
return f_xx_out, f_yy_out, f_xy_out | returns Hessian matrix of function d^2f/dx^2, d^f/dy^2, d^2/dxdy |
14,475 | def add_interface_to_router(self, segment_id,
router_name, gip, router_ip, mask, server):
if not segment_id:
segment_id = DEFAULT_VLAN
cmds = []
for c in self._interfaceDict[]:
if self._mlag_configured:
ip = router_ip
else:
ip = gip + + mask
cmds.append(c.format(segment_id, router_name, ip))
if self._mlag_configured:
for c in self._additionalInterfaceCmdsDict[]:
cmds.append(c.format(gip))
self._run_config_cmds(cmds, server) | Adds an interface to existing HW router on Arista HW device.
:param segment_id: VLAN Id associated with interface that is added
:param router_name: globally unique identifier for router/VRF
:param gip: Gateway IP associated with the subnet
:param router_ip: IP address of the router
:param mask: subnet mask to be used
:param server: Server endpoint on the Arista switch to be configured |
14,476 | def issues(self):
for board in self.get_boards():
for lst in self.get_lists(board[]):
listextra = dict(boardname=board[], listname=lst[])
for card in self.get_cards(lst[]):
issue = self.get_issue_for_record(card, extra=listextra)
issue.update_extra({"annotations": self.annotations(card)})
yield issue | Returns a list of dicts representing issues from a remote service. |
14,477 | def get_analysis_question(hazard, exposure):
question = specific_analysis_question(hazard, exposure)
if question:
return question
if hazard == hazard_generic:
return question | Construct analysis question based on hazard and exposure.
:param hazard: A hazard definition.
:type hazard: dict
:param exposure: An exposure definition.
:type exposure: dict
:returns: Analysis question based on reporting standards.
:rtype: str |
14,478 | def increment(self, key, value=1):
data, time_ = self._get_payload(key)
integer = int(data) + value
self.put(key, integer, int(time_))
return integer | Increment the value of an item in the cache.
:param key: The cache key
:type key: str
:param value: The increment value
:type value: int
:rtype: int or bool |
14,479 | def update(self, response):
data = response.splitlines()
_LOGGER.debug(, data, self.host)
while data:
line = data.pop(0)
if in line:
self.rtsp_version = int(line.split()[0][5])
self.status_code = int(line.split()[1])
self.status_text = line.split()[2]
elif in line:
self.sequence_ack = int(line.split()[1])
elif in line:
self.date = line.split()[1]
elif in line:
self.methods_ack = line.split()[1].split()
elif "WWW-Authenticate: Basic" in line:
self.basic = True
self.realm = line.split()[1]
elif "WWW-Authenticate: Digest" in line:
self.digest = True
self.realm = line.split()[1]
self.nonce = line.split()[3]
self.stale = (line.split()[1] == )
elif in line:
self.content_type = line.split()[1]
elif in line:
self.content_base = line.split()[1]
elif in line:
self.content_length = int(line.split()[1])
elif in line:
self.session_id = line.split()[1].split(";")[0]
if in line:
self.session_timeout = int(line.split()[1].split()[1])
elif in line:
self.transport_ack = line.split()[1]
elif in line:
self.range = line.split()[1]
elif in line:
self.rtp_info = line.split()[1]
elif not line:
if data:
self.sdp = data
break
if self.sdp:
stream_found = False
for param in self.sdp:
if not stream_found and in param:
stream_found = True
elif stream_found and in param:
self.control_url = param.split(, 1)[1]
break
if self.status_code == 200:
if self.state == STATE_STARTING:
self.sequence += 1
elif self.status_code == 401:
pass
else:
_LOGGER.debug(
"%s RTSP %s %s", self.host, self.status_code, self.status_text) | Update session information from device response.
Increment sequence number when starting stream, not when playing.
If device requires authentication resend previous message with auth. |
14,480 | def from_string(string, _or=):
if _or:
and_or =
else:
and_or =
return Input(string, and_or=and_or) | Parse a given string and turn it into an input token. |
14,481 | def eval(self, expression):
expression_wrapped = wrap_script.format(expression)
self._libeng.engEvalString(self._ep, expression_wrapped)
mxresult = self._libeng.engGetVariable(self._ep, )
error_string = self._libmx.mxArrayToString(mxresult)
self._libmx.mxDestroyArray(mxresult)
if error_string != "":
raise RuntimeError("Error from MATLAB\n{0}".format(error_string)) | Evaluate `expression` in MATLAB engine.
Parameters
----------
expression : str
Expression is passed to MATLAB engine and evaluated. |
14,482 | def uninstall_pgpm_from_db(self):
drop_schema_cascade_script =
if self._conn.closed:
self._conn = psycopg2.connect(self._connection_string, connection_factory=pgpm.lib.utils.db.MegaConnection)
cur = self._conn.cursor()
cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.current_user_sql)
current_user = cur.fetchone()[0]
cur.execute(pgpm.lib.utils.db.SqlScriptsHelper.is_superuser_sql)
is_cur_superuser = cur.fetchone()[0]
if not is_cur_superuser:
self._logger.debug(
.format(current_user))
sys.exit(1)
self._logger.debug(.format(self._pgpm_schema_name))
cur.execute(drop_schema_cascade_script.format(schema_name=self._pgpm_schema_name))
self._conn.commit()
self._conn.close()
return 0 | Removes pgpm from db and all related metadata (_pgpm schema). Install packages are left as they are
:return: 0 if successful and error otherwise |
14,483 | def binary_gas_search(state: BaseState, transaction: BaseTransaction, tolerance: int=1) -> int:
if not hasattr(transaction, ):
raise TypeError(
"Transaction is missing attribute sender.",
"If sending an unsigned transaction, use SpoofTransaction and provide the",
"sender using the parameter")
minimum_transaction = SpoofTransaction(
transaction,
gas=transaction.intrinsic_gas,
gas_price=0,
)
if _get_computation_error(state, minimum_transaction) is None:
return transaction.intrinsic_gas
maximum_transaction = SpoofTransaction(
transaction,
gas=state.gas_limit,
gas_price=0,
)
error = _get_computation_error(state, maximum_transaction)
if error is not None:
raise error
minimum_viable = state.gas_limit
maximum_out_of_gas = transaction.intrinsic_gas
while minimum_viable - maximum_out_of_gas > tolerance:
midpoint = (minimum_viable + maximum_out_of_gas) // 2
test_transaction = SpoofTransaction(transaction, gas=midpoint)
if _get_computation_error(state, test_transaction) is None:
minimum_viable = midpoint
else:
maximum_out_of_gas = midpoint
return minimum_viable | Run the transaction with various gas limits, progressively
approaching the minimum needed to succeed without an OutOfGas exception.
The starting range of possible estimates is:
[transaction.intrinsic_gas, state.gas_limit].
After the first OutOfGas exception, the range is: (largest_limit_out_of_gas, state.gas_limit].
After the first run not out of gas, the range is: (largest_limit_out_of_gas, smallest_success].
:param int tolerance: When the range of estimates is less than tolerance,
return the top of the range.
:returns int: The smallest confirmed gas to not throw an OutOfGas exception,
subject to tolerance. If OutOfGas is thrown at block limit, return block limit.
:raises VMError: if the computation fails even when given the block gas_limit to complete |
14,484 | def product_data_request(self):
msg = StandardSend(self._address,
COMMAND_PRODUCT_DATA_REQUEST_0X03_0X00)
self._send_msg(msg) | Request product data from a device.
Not supported by all devices.
Required after 01-Feb-2007. |
14,485 | def chunks(seq, chunk_size):
return (seq[i:i + chunk_size] for i in range(0, len(seq), chunk_size)) | Split seq into chunk_size-sized chunks.
:param seq: A sequence to chunk.
:param chunk_size: The size of chunk. |
14,486 | def command(execute=None):
if connexion.request.is_json:
execute = Execute.from_dict(connexion.request.get_json())
if(not hasAccess()):
return redirectUnauthorized()
try:
connector = None
parameters = {}
if (execute.command.parameters):
parameters = execute.command.parameters
credentials = Credentials()
options = Options(debug=execute.command.options[], sensitive=execute.command.options[])
if (execute.auth):
credentials = mapUserAuthToCredentials(execute.auth, credentials)
if (not execute.auth.api_token):
options.sensitive = True
connector = Connector(options=options, credentials=credentials, command=execute.command.command,
parameters=parameters)
commandHandler = connector.execute()
response = Response(status=commandHandler.getRequest().getResponseStatusCode(),
body=json.loads(commandHandler.getRequest().getResponseBody()))
if (execute.command.options[]):
response.log = connector.logBuffer
return response
except:
State.log.error(traceback.format_exc())
if ( in execute.command.options and execute.command.options[]):
return ErrorResponse(status=500,
message="Uncaught exception occured during processing. To get a larger stack trace, visit the logs.",
state=traceback.format_exc(3))
else:
return ErrorResponse(status=500, message="") | Execute a Command
Execute a command # noqa: E501
:param execute: The data needed to execute this command
:type execute: dict | bytes
:rtype: Response |
14,487 | def get_pickled_ontology(filename):
pickledfile = os.path.join(ONTOSPY_LOCAL_CACHE, filename + ".pickle")
if GLOBAL_DISABLE_CACHE:
printDebug(
"WARNING: DEMO MODE cache has been disabled in __init__.py ==============",
"red")
if os.path.isfile(pickledfile) and not GLOBAL_DISABLE_CACHE:
try:
return cPickle.load(open(pickledfile, "rb"))
except:
print(Style.DIM +
"** WARNING: Cache is out of date ** ...recreating it... " +
Style.RESET_ALL)
return None
else:
return None | try to retrieve a cached ontology |
14,488 | def health_node(consul_url=None, token=None, node=None, **kwargs):
*node1
ret = {}
query_params = {}
if not consul_url:
consul_url = _get_config()
if not consul_url:
log.error()
ret[] =
ret[] = False
return ret
if not node:
raise SaltInvocationError()
if in kwargs:
query_params[] = kwargs[]
function = .format(node)
ret = _query(consul_url=consul_url,
function=function,
token=token,
query_params=query_params)
return ret | Health information about the registered node.
:param consul_url: The Consul server URL.
:param node: The node to request health information about.
:param dc: By default, the datacenter of the agent is queried;
however, the dc can be provided using the "dc" parameter.
:return: Health information about the requested node.
CLI Example:
.. code-block:: bash
salt '*' consul.health_node node='node1' |
14,489 | def _extend_support_with_default_value(self, x, f, default_value):
with tf.name_scope("extend_support_with_default_value"):
x = tf.convert_to_tensor(value=x, dtype=self.dtype, name="x")
loc = self.loc + tf.zeros_like(self.scale) + tf.zeros_like(x)
x = x + tf.zeros_like(loc)
y = f(tf.where(x < loc, self._inv_z(0.5) + tf.zeros_like(x), x))
if default_value == 0.:
default_value = tf.zeros_like(y)
elif default_value == 1.:
default_value = tf.ones_like(y)
else:
default_value = tf.fill(
dims=tf.shape(input=y),
value=dtype_util.as_numpy_dtype(self.dtype)(default_value))
return tf.where(x < loc, default_value, y) | Returns `f(x)` if x is in the support, and `default_value` otherwise.
Given `f` which is defined on the support of this distribution
(`x >= loc`), extend the function definition to the real line
by defining `f(x) = default_value` for `x < loc`.
Args:
x: Floating-point `Tensor` to evaluate `f` at.
f: Callable that takes in a `Tensor` and returns a `Tensor`. This
represents the function whose domain of definition we want to extend.
default_value: Python or numpy literal representing the value to use for
extending the domain.
Returns:
`Tensor` representing an extension of `f(x)`. |
14,490 | def fetch_entity_cls_from_registry(entity):
if isinstance(entity, str):
try:
return repo_factory.get_entity(entity)
except AssertionError:
raise
else:
return entity | Util Method to fetch an Entity class from an entity's name |
14,491 | def add_object(self, start, obj, object_size):
self._store(start, obj, object_size, overwrite=False) | Add/Store an object to this region at the given offset.
:param start:
:param obj:
:param int object_size: Size of the object
:return: |
14,492 | def text_input(self, window, allow_resize=False):
window.clear()
except exceptions.EscapeInterrupt:
out = None
self.curs_set(0)
return self.strip_textpad(out) | Transform a window into a text box that will accept user input and loop
until an escape sequence is entered.
If the escape key (27) is pressed, cancel the textbox and return None.
Otherwise, the textbox will wait until it is full (^j, or a new line is
entered on the bottom line) or the BEL key (^g) is pressed. |
14,493 | def _merge(self, value):
if not value:
return []
if value is not None and not isinstance(value, list):
return value
item_spec = self._nested_validator
return [x if x is None else item_spec.get_default_for(x) for x in value] | Returns a list based on `value`:
* missing required value is converted to an empty list;
* missing required items are never created;
* nested items are merged recursively. |
14,494 | def getRegionsByType(self, regionClass):
regions = []
for region in self.regions.values():
if type(region.getSelf()) is regionClass:
regions.append(region)
return regions | Gets all region instances of a given class
(for example, nupic.regions.sp_region.SPRegion). |
14,495 | def get(self, name, default=None):
session_object = super(NotificationManager, self).get(name, default)
if session_object is not None:
self.delete(name)
return session_object | Retrieves the object with "name", like with SessionManager.get(), but
removes the object from the database after retrieval, so that it can be
retrieved only once |
14,496 | def tag_name(cls, tag):
while isinstance(tag, etree._Element):
tag = tag.tag
return tag.split()[-1] | return the name of the tag, with the namespace removed |
14,497 | def get_levenshtein(first, second):
if not first:
return len(second)
if not second:
return len(first)
prev_distances = range(0, len(second) + 1)
curr_distances = None
for first_idx, first_char in enumerate(first, start=1):
curr_distances = [first_idx]
for second_idx, second_char in enumerate(second, start=1):
compare = [
prev_distances[second_idx - 1],
prev_distances[second_idx],
curr_distances[second_idx - 1],
]
distance = min(*compare)
if first_char != second_char:
distance += 1
curr_distances.append(distance)
prev_distances = curr_distances
return curr_distances[-1] | \
Get the Levenshtein distance between two strings.
:param first: the first string
:param second: the second string |
14,498 | def log(arg, base=None):
op = ops.Log(arg, base)
return op.to_expr() | Perform the logarithm using a specified base
Parameters
----------
base : number, default None
If None, base e is used
Returns
-------
logarithm : double type |
14,499 | def _complete_path(path=None):
if not path:
return _listdir()
dirname, rest = os.path.split(path)
tmp = dirname if dirname else
res = [p for p in _listdir(tmp) if p.startswith(rest)]
if len(res) > 1 or not os.path.exists(path):
return res
if os.path.isdir(path):
return [p for p in _listdir(path)]
return [path + ] | Perform completion of filesystem path.
https://stackoverflow.com/questions/5637124/tab-completion-in-pythons-raw-input |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.