Unnamed: 0
int64 0
389k
| code
stringlengths 26
79.6k
| docstring
stringlengths 1
46.9k
|
---|---|---|
12,800 | def _get_referenced_services(specs):
active_services = set()
for app_spec in specs[].values():
for service in app_spec[][]:
active_services.add(service)
for bundle_spec in specs[].values():
for service in bundle_spec[]:
active_services.add(service)
return active_services | Returns all services that are referenced in specs.apps.depends.services,
or in specs.bundles.services |
12,801 | def wrap(self, cause):
if isinstance(cause, ApplicationException):
return cause
self.with_cause(cause)
return self | Wraps another exception into an application exception object.
If original exception is of ApplicationException type it is returned without changes.
Otherwise a new ApplicationException is created and original error is set as its cause.
:param cause: an original error object
:return: an original or newly created ApplicationException |
12,802 | def _sample_oat(problem, N, num_levels=4):
group_membership = np.asmatrix(np.identity(problem[],
dtype=int))
num_params = group_membership.shape[0]
sample = np.zeros((N * (num_params + 1), num_params))
sample = np.array([generate_trajectory(group_membership,
num_levels)
for n in range(N)])
return sample.reshape((N * (num_params + 1), num_params)) | Generate trajectories without groups
Arguments
---------
problem : dict
The problem definition
N : int
The number of samples to generate
num_levels : int, default=4
The number of grid levels |
12,803 | def get_dict(self, only_attributes=None, exclude_attributes=None, df_format=False):
to_exclude = [, , , , ]
if not exclude_attributes:
excluder = to_exclude
else:
excluder = ssbio.utils.force_list(exclude_attributes)
excluder.extend(to_exclude)
summary_dict = StructProp.get_dict(self, only_attributes=only_attributes,
exclude_attributes=excluder,
df_format=df_format)
if self.coach_bsites:
tmp = { + k:v for k, v in self.coach_bsites[0].items()}
summary_dict.update(tmp)
if self.coach_ec:
tmp = { + k: v for k, v in self.coach_ec[0].items()}
summary_dict.update(tmp)
if self.coach_go_mf:
tmp = { + k: v for k, v in self.coach_go_mf[0].items()}
summary_dict.update(tmp)
if self.coach_go_bp:
tmp = { + k: v for k, v in self.coach_go_bp[0].items()}
summary_dict.update(tmp)
if self.coach_go_cc:
tmp = { + k: v for k, v in self.coach_go_cc[0].items()}
summary_dict.update(tmp)
return summary_dict | Summarize the I-TASSER run in a dictionary containing modeling results and top predictions from COACH
Args:
only_attributes (str, list): Attributes that should be returned. If not provided, all are returned.
exclude_attributes (str, list): Attributes that should be excluded.
df_format (bool): If dictionary values should be formatted for a dataframe
(everything possible is transformed into strings, int, or float -
if something can't be transformed it is excluded)
Returns:
dict: Dictionary of attributes |
12,804 | def provide_session(func):
@wraps(func)
def wrapper(*args, **kwargs):
arg_session =
func_params = func.__code__.co_varnames
session_in_args = arg_session in func_params and \
func_params.index(arg_session) < len(args)
session_in_kwargs = arg_session in kwargs
if session_in_kwargs or session_in_args:
return func(*args, **kwargs)
else:
with create_session() as session:
kwargs[arg_session] = session
return func(*args, **kwargs)
return wrapper | Function decorator that provides a session if it isn't provided.
If you want to reuse a session or run the function as part of a
database transaction, you pass it to the function, if not this wrapper
will create one and close it for you. |
12,805 | def write_monitor_keyring(keyring, monitor_keyring, uid=-1, gid=-1):
write_file(keyring, monitor_keyring, 0o600, None, uid, gid) | create the monitor keyring file |
12,806 | def open(self):
return self.workspace._rest.open_intermediate_dataset_contents(
self.workspace.workspace_id,
self.experiment.experiment_id,
self.node_id,
self.port_name
) | Open and return a stream for the dataset contents. |
12,807 | def state_fidelity(state0: State, state1: State) -> bk.BKTensor:
assert state0.qubits == state1.qubits
tensor = bk.absolute(bk.inner(state0.tensor, state1.tensor))**bk.fcast(2)
return tensor | Return the quantum fidelity between pure states. |
12,808 | def render(self):
"Re-render Jupyter cell for batch of images."
clear_output()
self.write_csv()
if self.empty() and self._skipped>0:
return display(f
f)
elif self.empty():
return display()
if self.batch_contains_deleted():
self.next_batch(None)
self._skipped += 1
else:
display(self.make_horizontal_box(self.get_widgets(self._duplicates)))
display(self.make_button_widget(, handler=self.next_batch, style="primary")) | Re-render Jupyter cell for batch of images. |
12,809 | def var_explained(y_true, y_pred):
var_resid = K.var(y_true - y_pred)
var_y_true = K.var(y_true)
return 1 - var_resid / var_y_true | Fraction of variance explained. |
12,810 | def _allocate_address_nova(self, instance, network_ids):
self._init_os_api()
with OpenStackCloudProvider.__node_start_lock:
free_ips = [ip for ip in self.nova_client.floating_ips.list() if not ip.fixed_ip]
if not free_ips:
log.debug("Trying to allocate a new floating IP ...")
free_ips.append(self.nova_client.floating_ips.create())
if free_ips:
ip = free_ips.pop()
else:
raise RuntimeError(
"Could not allocate floating IP for VM {0}"
.format(instance_id))
instance.add_floating_ip(ip)
return ip.ip | Allocates a floating/public ip address to the given instance,
using the OpenStack Compute ('Nova') API.
:param instance: instance to assign address to
:param list network_id: List of IDs (as strings) of networks
where to request allocation the floating IP. **Ignored**
(only used by the corresponding Neutron API function).
:return: public ip address |
12,811 | def RR_calc(classes, TOP):
try:
class_number = len(classes)
result = sum(list(TOP.values()))
return result / class_number
except Exception:
return "None" | Calculate RR (Global performance index).
:param classes: classes
:type classes : list
:param TOP: test outcome positive
:type TOP : dict
:return: RR as float |
12,812 | def latex(self, force=False):
if sys.platform == :
sys.stderr.write()
else:
ret_code = self._sphinx_build()
os.chdir(os.path.join(BUILD_PATH, ))
if force:
for i in range(3):
self._run_os(,
,
)
raise SystemExit(
)
else:
self._run_os()
return ret_code | Build PDF documentation. |
12,813 | def avl_join(t1, t2, node):
if DEBUG_JOIN:
print( % (node,))
if t1 is None and t2 is None:
if DEBUG_JOIN:
print()
top = node
elif t1 is None:
if DEBUG_JOIN:
print()
top = avl_insert_dir(t2, node, 0)
elif t2 is None:
if DEBUG_JOIN:
print()
top = avl_insert_dir(t1, node, 1)
else:
h1 = height(t1)
h2 = height(t2)
if h1 > h2 + 1:
if DEBUG_JOIN:
print()
top = avl_join_dir_recursive(t1, t2, node, 1)
if DEBUG_JOIN:
ascii_tree(t1, )
elif h2 > h1 + 1:
if DEBUG_JOIN:
print()
ascii_tree(t1)
ascii_tree(t2)
top = avl_join_dir_recursive(t1, t2, node, 0)
if DEBUG_JOIN:
ascii_tree(top)
else:
if DEBUG_JOIN:
print()
top = avl_new_top(t1, t2, node, 0)
return top | Joins two trees `t1` and `t1` with an intermediate key-value pair
CommandLine:
python -m utool.experimental.euler_tour_tree_avl avl_join
Example:
>>> # DISABLE_DOCTEST
>>> from utool.experimental.euler_tour_tree_avl import * # NOQA
>>> self = EulerTourTree(['a', 'b', 'c', 'b', 'd', 'b', 'a'])
>>> other = EulerTourTree(['E', 'F', 'G', 'F', 'E'])
>>> node = Node(value='Q')
>>> root = avl_join(self.root, other.root, node)
>>> new = EulerTourTree(root=root)
>>> print('new = %r' % (new,))
>>> ut.quit_if_noshow()
>>> self.print_tree()
>>> other.print_tree()
>>> new.print_tree()
Example:
>>> # DISABLE_DOCTEST
>>> from utool.experimental.euler_tour_tree_avl import * # NOQA
>>> self = EulerTourTree(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K'])
>>> other = EulerTourTree(['X'])
>>> node = Node(value='Q')
>>> root = avl_join(self.root, other.root, node)
>>> new = EulerTourTree(root=root)
>>> print('new = %r' % (new,))
>>> ut.quit_if_noshow()
>>> ut.qtensure()
>>> #self.show_nx(fnum=1)
>>> #other.show_nx(fnum=2)
>>> new.show_nx()
Running Time:
O(abs(r(t1) - r(t2)))
O(abs(height(t1) - height(t2))) |
12,814 | def filter_query(self, query, field, value):
return query.where(field ** "%{}%".format(value.lower())) | Filter a query. |
12,815 | def get_column_keys_and_names(table):
ins = inspect(table)
return ((k, c.name) for k, c in ins.mapper.c.items()) | Return a generator of tuples k, c such that k is the name of the python attribute for
the column and c is the name of the column in the sql table. |
12,816 | def md5(filename:str)->str:
hash_md5 = hashlib.md5()
with open(filename, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest() | Given a filename produce an md5 hash of the contents.
>>> import tempfile, os
>>> f = tempfile.NamedTemporaryFile(delete=False)
>>> f.write(b'Hello Wirld!')
12
>>> f.close()
>>> md5(f.name)
'997c62b6afe9712cad3baffb49cb8c8a'
>>> os.unlink(f.name) |
12,817 | def load_lang_conf():
if osp.isfile(LANG_FILE):
with open(LANG_FILE, ) as f:
lang = f.read()
else:
lang = get_interface_language()
save_lang_conf(lang)
lang = DEFAULT_LANGUAGE
save_lang_conf(lang)
return lang | Load language setting from language config file if it exists, otherwise
try to use the local settings if Spyder provides a translation, or
return the default if no translation provided. |
12,818 | def hmget(self, key, *fields):
def format_response(val_array):
return dict(zip(fields, val_array))
command = [b, key]
command.extend(fields)
return self._execute(command, format_callback=format_response) | Returns the values associated with the specified `fields` in a hash.
For every ``field`` that does not exist in the hash, :data:`None`
is returned. Because a non-existing keys are treated as empty
hashes, calling :meth:`hmget` against a non-existing key will
return a list of :data:`None` values.
.. note::
*Time complexity*: ``O(N)`` where ``N`` is the number of fields
being requested.
:param key: The key of the hash
:type key: :class:`str`, :class:`bytes`
:param fields: iterable of field names to retrieve
:returns: a :class:`dict` of field name to value mappings for
each of the requested fields
:rtype: dict |
12,819 | def GetEntries(self, parser_mediator, match=None, **unused_kwargs):
devices = match.get(, {})
for device_identifier, device_information in iter(devices.items()):
datetime_value = device_information.get(, None)
if not datetime_value:
continue
event_data = IPodPlistEventData()
event_data.device_id = device_identifier
for key, value in iter(device_information.items()):
if key == :
continue
attribute_name = key.lower().replace(, )
setattr(event_data, attribute_name, value)
event = time_events.PythonDatetimeEvent(
datetime_value, definitions.TIME_DESCRIPTION_LAST_CONNECTED)
parser_mediator.ProduceEventWithEventData(event, event_data) | Extract device information from the iPod plist.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
match (Optional[dict[str: object]]): keys extracted from PLIST_KEYS. |
12,820 | def add_fields(self, log_record, record, message_dict):
for field in self._required_fields:
log_record[field] = record.__dict__.get(field)
log_record.update(message_dict)
merge_record_extra(record, log_record, reserved=self._skip_fields)
if self.timestamp:
key = self.timestamp if type(self.timestamp) == str else
log_record[key] = datetime.utcnow() | Override this method to implement custom logic for adding fields. |
12,821 | def rgb(self, **kwargs):
if "bands" in kwargs:
use_bands = kwargs["bands"]
assert len(use_bands) == 3,
del kwargs["bands"]
else:
use_bands = self._rgb_bands
if kwargs.get() == True:
return self.histogram_match(use_bands, **kwargs)
elif kwargs["histogram"] == "ignore" or self.options.get():
data = self._read(self[use_bands,...], **kwargs)
return np.rollaxis(data, 0, 3)
else:
raise KeyError() | Convert the image to a 3 band RGB for plotting
This method shares the same arguments as plot(). It will perform visual adjustment on the
image and prepare the data for plotting in MatplotLib. Values are converted to an
appropriate precision and the axis order is changed to put the band axis last. |
12,822 | def ttl(self, key):
value = self.pttl(key)
if value is None or value < 0:
return value
return value // 1000 | Emulate ttl
Even though the official redis commands documentation at http://redis.io/commands/ttl
states "Return value: Integer reply: TTL in seconds, -2 when key does not exist or -1
when key does not have a timeout." the redis-py lib returns None for both these cases.
The lib behavior has been emulated here.
:param key: key for which ttl is requested.
:returns: the number of seconds till timeout, None if the key does not exist or if the
key has no timeout(as per the redis-py lib behavior). |
12,823 | def api_key(value=None):
if value is None:
return PyGraphistry._config[]
if value is not PyGraphistry._config[]:
PyGraphistry._config[] = value.strip()
PyGraphistry._is_authenticated = False | Set or get the API key.
Also set via environment variable GRAPHISTRY_API_KEY. |
12,824 | def add_uppercase(table):
orig = table.copy()
orig.update(
dict((k.capitalize(), v.capitalize()) for k, v in table.items()))
return orig | Extend the table with uppercase options
>>> print("а" in add_uppercase({"а": "a"}))
True
>>> print(add_uppercase({"а": "a"})["а"] == "a")
True
>>> print("А" in add_uppercase({"а": "a"}))
True
>>> print(add_uppercase({"а": "a"})["А"] == "A")
True
>>> print(len(add_uppercase({"а": "a"}).keys()))
2
>>> print("Аа" in add_uppercase({"аа": "aa"}))
True
>>> print(add_uppercase({"аа": "aa"})["Аа"] == "Aa")
True |
12,825 | def _get_controllers(self):
controllers = dict()
for pkg in pkg_resources.iter_entry_points(group=self.CONTROLLERS):
LOGGER.debug(, pkg.name)
controllers[pkg.name] = importlib.import_module(pkg.module_name)
return controllers | Iterate through the installed controller entry points and import
the module and assign the handle to the CLI._controllers dict.
:return: dict |
12,826 | def object2code(key, code):
if key in ["xscale", "yscale"]:
if code == "log":
code = True
else:
code = False
else:
code = unicode(code)
return code | Returns code for widget from dict object |
12,827 | def _init_go_sets(self, go_fins):
go_sets = []
assert go_fins, "EXPECTED FILES CONTAINING GO IDs"
assert len(go_fins) >= 2, "EXPECTED 2+ GO LISTS. FOUND: {L}".format(
L=.join(go_fins))
obj = GetGOs(self.godag)
for fin in go_fins:
assert os.path.exists(fin), "GO FILE({F}) DOES NOT EXIST".format(F=fin)
go_sets.append(obj.get_usrgos(fin, sys.stdout))
return go_sets | Get lists of GO IDs. |
12,828 | def atom_fractions(atoms):
rHCOHCO
count = sum(atoms.values())
afracs = {}
for i in atoms:
afracs[i] = atoms[i]/count
return afracs | r'''Calculates the atomic fractions of each element in a compound,
given a dictionary of its atoms and their counts, in the format
{symbol: count}.
.. math::
a_i = \frac{n_i}{\sum_i n_i}
Parameters
----------
atoms : dict
dictionary of counts of individual atoms, indexed by symbol with
proper capitalization, [-]
Returns
-------
afracs : dict
dictionary of atomic fractions of individual atoms, indexed by symbol
with proper capitalization, [-]
Notes
-----
No actual data on the elements is used, so incorrect or custom compounds
would not raise an error.
Examples
--------
>>> atom_fractions({'H': 12, 'C': 20, 'O': 5})
{'H': 0.32432432432432434, 'C': 0.5405405405405406, 'O': 0.13513513513513514}
References
----------
.. [1] RDKit: Open-source cheminformatics; http://www.rdkit.org |
12,829 | def V_vertical_conical_concave(D, a, h):
r
if h < abs(a):
Vf = pi*D**2/12.*(3*h + a - (a+h)**3/a**2)
else:
Vf = pi*D**2/12.*(3*h + a)
return Vf | r'''Calculates volume of a vertical tank with a concave conical bottom,
according to [1]_. No provision for the top of the tank is made here.
.. math::
V = \frac{\pi D^2}{12} \left(3h + a - \frac{(a+h)^3}{a^2}\right)
,\;\; 0 \le h < |a|
.. math::
V = \frac{\pi D^2}{12} (3h + a ),\;\; h \ge |a|
Parameters
----------
D : float
Diameter of the main cylindrical section, [m]
a : float
Negative distance the cone head extends inside the main cylinder, [m]
h : float
Height, as measured up to where the fluid ends, [m]
Returns
-------
V : float
Volume [m^3]
Examples
--------
Matching example from [1]_, with inputs in inches and volume in gallons.
>>> V_vertical_conical_concave(D=113., a=-33, h=15)/231
251.15825565795188
References
----------
.. [1] Jones, D. "Compute Fluid Volumes in Vertical Tanks." Chemical
Processing. December 18, 2003.
http://www.chemicalprocessing.com/articles/2003/193/ |
12,830 | def get_lang_array(self):
r = self.yandex_translate_request("getLangs", "")
self.handle_errors(r)
return r.json()["dirs"] | gets supported langs as an array |
12,831 | def format_timedelta(td_object):
def get_total_seconds(td):
return (td.microseconds +
(td.seconds + td.days * 24 * 3600) * 1e6) / 1e6
seconds = int(get_total_seconds(td_object))
periods = [(, 60*60*24*365),
(, 60*60*24*30),
(, 60*60*24),
(, 60*60),
(, 60),
(, 1)]
strings = []
for period_name, period_seconds in periods:
if seconds > period_seconds:
period_value, seconds = divmod(seconds, period_seconds)
if period_value == 1:
strings.append("%s %s" % (period_value, period_name))
else:
strings.append("%s %ss" % (period_value, period_name))
return ", ".join(strings) | Format a timedelta object for display to users
Returns
-------
str |
12,832 | def get_context_json(self, context):
answer = {}
answer[] = self.__jcontext_metadata(context)
answer[] = self.__jcontext_filter(context)
answer[] = {}
answer[][] = self.__jcontext_tablehead(context)
answer[][] = None
answer[][] = None
answer[][] = None
return answer | Return a base answer for a json answer |
12,833 | def action_logging(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
with create_session() as session:
if g.user.is_anonymous:
user =
else:
user = g.user.username
log = Log(
event=f.__name__,
task_instance=None,
owner=user,
extra=str(list(request.args.items())),
task_id=request.args.get(),
dag_id=request.args.get())
if in request.args:
log.execution_date = pendulum.parse(
request.args.get())
session.add(log)
return f(*args, **kwargs)
return wrapper | Decorator to log user actions |
12,834 | def _edge_mapping(G):
edge_mapping = {edge: idx for idx, edge in enumerate(G.edges)}
edge_mapping.update({(e1, e0): idx for (e0, e1), idx in edge_mapping.items()})
return edge_mapping | Assigns a variable for each edge in G.
(u, v) and (v, u) map to the same variable. |
12,835 | def ParseRow(header,
row):
precondition.AssertDictType(row, Text, Text)
result = rdf_osquery.OsqueryRow()
for column in header.columns:
result.values.append(row[column.name])
return result | Parses a single row of osquery output.
Args:
header: A parsed header describing the row format.
row: A row in a "parsed JSON" representation.
Returns:
A parsed `rdf_osquery.OsqueryRow` instance. |
12,836 | def repo_data(PACKAGES_TXT, repo, flag):
(name, location, size, unsize,
rname, rlocation, rsize, runsize) = ([] for i in range(8))
for line in PACKAGES_TXT.splitlines():
if _meta_.rsl_deps in ["on", "ON"] and "--resolve-off" not in flag:
status(0.000005)
if line.startswith("PACKAGE NAME:"):
name.append(line[15:].strip())
if line.startswith("PACKAGE LOCATION:"):
location.append(line[21:].strip())
if line.startswith("PACKAGE SIZE (compressed):"):
size.append(line[28:-2].strip())
if line.startswith("PACKAGE SIZE (uncompressed):"):
unsize.append(line[30:-2].strip())
if repo == "rlw":
(rname,
rlocation,
rsize,
runsize
) = rlw_filter(name, location, size, unsize)
elif repo == "alien":
(rname,
rlocation,
rsize,
runsize
) = alien_filter(name, location, size, unsize)
elif repo == "rested":
(rname,
rlocation,
rsize,
runsize
) = rested_filter(name, location, size, unsize)
elif repo == "ktown":
(rname,
rlocation,
rsize,
runsize
) = ktown_filter(name, location, size, unsize)
else:
rname, rlocation, rsize, runsize = name, location, size, unsize
return [rname, rlocation, rsize, runsize] | Grap data packages |
12,837 | def lal(self):
lal_data = None
if self._data.dtype == float32:
lal_data = _lal.CreateREAL4Vector(len(self))
elif self._data.dtype == float64:
lal_data = _lal.CreateREAL8Vector(len(self))
elif self._data.dtype == complex64:
lal_data = _lal.CreateCOMPLEX8Vector(len(self))
elif self._data.dtype == complex128:
lal_data = _lal.CreateCOMPLEX16Vector(len(self))
lal_data.data[:] = self.numpy()
return lal_data | Returns a LAL Object that contains this data |
12,838 | def correct_rates(rates, opt_qes, combs):
corrected_rates = np.array([
rate / opt_qes[comb[0]] / opt_qes[comb[1]]
for rate, comb in zip(rates, combs)
])
return corrected_rates | Applies optimal qes to rates.
Should be closer to fitted_rates afterwards.
Parameters
----------
rates: numpy array of rates of all PMT combinations
opt_qes: numpy array of optimal qe values for all PMTs
combs: pmt combinations used to correct
Returns
-------
corrected_rates: numpy array of corrected rates for all PMT combinations |
12,839 | def create(args):
with _catalog(args) as cat:
for fname, created, obj in cat.create(args.args[0], {}):
args.log.info(.format(
fname, if created else , obj)) | cdstarcat create PATH
Create objects in CDSTAR specified by PATH.
When PATH is a file, a single object (possibly with multiple bitstreams) is created;
When PATH is a directory, an object will be created for each file in the directory
(recursing into subdirectories). |
12,840 | def extract_package_name(line):
assert not in line
assert not in line
assert not in line
assert not in line
if line.lstrip().startswith((, )):
word = line.split()[1]
else:
return None
package = word.split()[0]
assert not in package
return package | Return package name in import statement. |
12,841 | def getSimilarTermsForTerm(self, term, contextId=None, posType=None, getFingerprint=None, startIndex=0, maxResults=10):
return self._terms.getSimilarTerms(self._retina, term, contextId, posType, getFingerprint, startIndex, maxResults) | Get the similar terms of a given term
Args:
term, str: A term in the retina (required)
contextId, int: The identifier of a context (optional)
posType, str: Part of speech (optional)
getFingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
startIndex, int: The start-index for pagination (optional)
maxResults, int: Max results per page (optional)
Returns:
list of Term
Raises:
CorticalioException: if the request was not successful |
12,842 | def decrypt_from(self, f, mac_bytes=10):
ctx = DecryptionContext(self.curve, f, self, mac_bytes)
yield ctx
ctx.read() | Decrypts a message from f. |
12,843 | def create_aside(self, block_type, keys):
aside_cls = XBlockAside.load_class(block_type)
return aside_cls(runtime=self, scope_ids=keys) | The aside version of construct_xblock: take a type and key. Return an instance |
12,844 | def _make_string_formatter(f, offset=None):
format = f
delta = offset
return lambda v: time.strftime(format, (_date(v, delta)).timetuple()) | A closure-izer for string arguments that include a format and possibly an offset. |
12,845 | def com_google_fonts_check_ttx_roundtrip(font):
from fontTools import ttx
import sys
ttFont = ttx.TTFont(font)
failed = False
class TTXLogger:
msgs = []
def __init__(self):
self.original_stderr = sys.stderr
self.original_stdout = sys.stdout
sys.stderr = self
sys.stdout = self
def write(self, data):
if data not in self.msgs:
self.msgs.append(data)
def restore(self):
sys.stderr = self.original_stderr
sys.stdout = self.original_stdout
from xml.parsers.expat import ExpatError
try:
logger = TTXLogger()
xml_file = font + ".xml"
ttFont.saveXML(xml_file)
export_error_msgs = logger.msgs
if len(export_error_msgs):
failed = True
yield INFO, ("While converting TTF into an XML file,"
" ttx emited the messages listed below.")
for msg in export_error_msgs:
yield FAIL, msg.strip()
f = ttx.TTFont()
f.importXML(font + ".xml")
import_error_msgs = [msg for msg in logger.msgs if msg not in export_error_msgs]
if len(import_error_msgs):
failed = True
yield INFO, ("While importing an XML file and converting"
" it back to TTF, ttx emited the messages"
" listed below.")
for msg in import_error_msgs:
yield FAIL, msg.strip()
logger.restore()
except ExpatError as e:
failed = True
yield FAIL, ("TTX had some problem parsing the generated XML file."
" This most likely mean there's some problem in the font."
" Please inspect the output of ttx in order to find more"
" on what went wrong. A common problem is the presence of"
" control characteres outside the accepted character range"
" as defined in the XML spec. FontTools has got a bug which"
" causes TTX to generate corrupt XML files in those cases."
" So, check the entries of the name table and remove any"
" control chars that you find there."
" The full ttx error message was:\n"
"======\n{}\n======".format(e))
if not failed:
yield PASS, "Hey! It all looks good!"
if os.path.exists(xml_file):
os.remove(xml_file) | Checking with fontTools.ttx |
12,846 | def flattenTrees(root, nodeSelector: Callable[[LNode], bool]):
for ch in root.children:
if ch.children:
flattenTrees(ch, nodeSelector)
reducibleChildren = set()
for ch in root.children:
if nodeSelector(ch):
reducibleChildren.add(ch)
while reducibleChildren:
_treeRoot = reducibleChildren.pop()
reducibleChildren.add(_treeRoot)
treeRoot = searchRootOfTree(reducibleChildren, _treeRoot)
reducedNodes, inputEdges = collectNodesInTree(treeRoot, reducibleChildren)
if len(reducedNodes) > 1:
newName = reducedNodes[0].name
newNode = root.addNode(newName)
o = newNode.addPort("", PortType.OUTPUT, PortSide.EAST)
oEdges = treeRoot.east[0].outgoingEdges
for outputedge in list(oEdges):
dsts = list(outputedge.dsts)
assert len(dsts) > 0
outputedge.remove()
root.addHyperEdge([o, ], dsts, originObj=outputedge.originObj)
for i, (iN, iP, iE) in enumerate(inputEdges):
name = None
index = len(inputEdges) - i - 1
if hasattr(iE.originObj, "_dtype"):
w = iE.originObj._dtype.bit_length()
if w > 1:
name = "[%d:%d]" % ((index + 1) * w, index * w)
else:
name = None
if name is None:
name = "[%d]" % (index)
inp = newNode.addPort(name,
PortType.INPUT, PortSide.WEST)
iE.removeTarget(iP)
iE.addTarget(inp)
for n in reducedNodes:
root.children.remove(n)
reducibleChildren.remove(n)
else:
reducibleChildren.remove(reducedNodes[0]) | Walk all nodes and discover trees of nodes (usually operators)
and reduce them to single node with multiple outputs
:attention: selected nodes has to have single output
and has to be connected to nets with single driver |
12,847 | def woodbury_inv(self):
if self._woodbury_inv is None:
if self._woodbury_chol is not None:
self._woodbury_inv, _ = dpotri(self._woodbury_chol, lower=1)
symmetrify(self._woodbury_inv)
elif self._covariance is not None:
B = np.atleast_3d(self._K) - np.atleast_3d(self._covariance)
self._woodbury_inv = np.empty_like(B)
for i in range(B.shape[-1]):
tmp, _ = dpotrs(self.K_chol, B[:, :, i])
self._woodbury_inv[:, :, i], _ = dpotrs(self.K_chol, tmp.T)
return self._woodbury_inv | The inverse of the woodbury matrix, in the gaussian likelihood case it is defined as
$$
(K_{xx} + \Sigma_{xx})^{-1}
\Sigma_{xx} := \texttt{Likelihood.variance / Approximate likelihood covariance}
$$ |
12,848 | def shape(self):
bf = self.copy()
content = requests.get(bf.dataset_url).json()
rowCount = content[][]
valueCount = content[][]
return (rowCount, valueCount) | Returns (rowCount, valueCount) |
12,849 | def start_stack(awsclient, stack_name, use_suspend=False):
exit_code = 0
if not stack_exists(awsclient, stack_name):
log.warn(%s\, stack_name)
else:
client_cfn = awsclient.get_client()
client_autoscaling = awsclient.get_client()
client_rds = awsclient.get_client()
resources = all_pages(
client_cfn.list_stack_resources,
{ : stack_name },
lambda r: r[]
)
autoscaling_groups = [
r for r in resources
if r[] ==
]
response = client_autoscaling.describe_scaling_process_types()
scaling_process_types = [t[] for t in response.get(, [])]
db_instances = [
r[] for r in resources
if r[] ==
]
stopped_db_instances = _filter_db_instances_by_status(
awsclient, db_instances, []
)
for db in stopped_db_instances:
log.info(%s\, db)
client_rds.start_db_instance(DBInstanceIdentifier=db)
for db in stopped_db_instances:
waiter_db_available = client_rds.get_waiter()
waiter_db_available.wait(DBInstanceIdentifier=db)
instances = [
r[] for r in resources
if r[] ==
]
_start_ec2_instances(awsclient, instances)
services = [
r for r in resources
if r[] ==
]
if (autoscaling_groups and not use_suspend) or services:
template, parameters = _get_template_parameters(awsclient, stack_name)
if services:
_start_ecs_services(awsclient, services, template, parameters)
for asg in autoscaling_groups:
if use_suspend:
instances = all_pages(
client_autoscaling.describe_auto_scaling_instances,
{},
lambda r: [i[] for i in r.get(, [])
if i[] == asg[]],
)
_start_ec2_instances(awsclient, instances)
log.info(%s\,
asg[])
response = client_autoscaling.resume_processes(
AutoScalingGroupName=asg[],
ScalingProcesses=scaling_process_types
)
else:
log.info(%s\,
asg[])
min, max = _get_autoscaling_min_max(
template, parameters, asg[])
response = client_autoscaling.update_auto_scaling_group(
AutoScalingGroupName=asg[],
MinSize=min,
MaxSize=max
)
return exit_code | Start an existing stack on AWS cloud.
:param awsclient:
:param stack_name:
:param use_suspend: use suspend and resume on the autoscaling group
:return: exit_code |
12,850 | def _derive_charge(self, config):
if self._roast.get():
return None
self._window.append(config)
time, temp = list(), list()
for x in list(self._window):
time.append(x[])
temp.append(x[])
slope, intercept, r_value, p_value, std_err = linregress(time, temp)
if slope < 0:
self._roast[] = self._roast[]
self.add_roast_event({: })
return config
return None | Use a temperature window to identify the roast charge.
The charge will manifest as a sudden downward trend on the temperature.
Once found, we save it and avoid overwriting. The charge is needed in
order to derive the turning point.
:param config: Current snapshot of the configuration
:type config: dict
:returns: None |
12,851 | def get_configured_provider():
return config.is_provider_configured(
opts=__opts__,
provider=__active_provider_name__ or __virtualname__,
aliases=__virtual_aliases__,
required_keys=(,)
) | Return the first configured instance. |
12,852 | def addresses(self):
import ns1.rest.ipam
return ns1.rest.ipam.Addresses(self.config) | Return a new raw REST interface to address resources
:rtype: :py:class:`ns1.rest.ipam.Adresses` |
12,853 | def read_status(self, num_bytes=2):
SPIFLASH_RDSR = 0x05
SPIFLASH_RDSR2 = 0x35
SPIFLASH_RDSR3 = 0x15
status = 0
shift = 0
for cmd in [SPIFLASH_RDSR, SPIFLASH_RDSR2, SPIFLASH_RDSR3][0:num_bytes]:
status += self.run_spiflash_command(cmd, read_bits=8) << shift
shift += 8
return status | Read up to 24 bits (num_bytes) of SPI flash status register contents
via RDSR, RDSR2, RDSR3 commands
Not all SPI flash supports all three commands. The upper 1 or 2
bytes may be 0xFF. |
12,854 | def match_url(self, request):
parsed_url = urlparse(request.path_url)
path_url = parsed_url.path
query_params = parsed_url.query
match = None
for path in self.paths:
for item in self.index:
target_path = os.path.join(BASE_PATH, path, path_url[1:])
query_path = target_path.lower() + quote(
+ query_params).lower()
if target_path.lower() == item[0]:
match = item[1]
break
elif query_path == item[0]:
match = item[1]
break
return match | Match the request against a file in the adapter directory
:param request: The request
:type request: :class:`requests.Request`
:return: Path to the file
:rtype: ``str`` |
12,855 | def precondition_u_kn(u_kn, N_k, f_k):
u_kn, N_k, f_k = validate_inputs(u_kn, N_k, f_k)
u_kn = u_kn - u_kn.min(0)
u_kn += (logsumexp(f_k - u_kn.T, b=N_k, axis=1)) - N_k.dot(f_k) / float(N_k.sum())
return u_kn | Subtract a sample-dependent constant from u_kn to improve precision
Parameters
----------
u_kn : np.ndarray, shape=(n_states, n_samples), dtype='float'
The reduced potential energies, i.e. -log unnormalized probabilities
N_k : np.ndarray, shape=(n_states), dtype='int'
The number of samples in each state
f_k : np.ndarray, shape=(n_states), dtype='float'
The reduced free energies of each state
Returns
-------
u_kn : np.ndarray, shape=(n_states, n_samples), dtype='float'
The reduced potential energies, i.e. -log unnormalized probabilities
Notes
-----
Returns u_kn - x_n, where x_n is based on the current estimate of f_k.
Upon subtraction of x_n, the MBAR objective function changes by an
additive constant, but its derivatives remain unchanged. We choose
x_n such that the current objective function value is zero, which
should give maximum precision in the objective function. |
12,856 | def mouseDoubleClickEvent(self, event):
index_clicked = self.indexAt(event.pos())
if self.model.breakpoints:
filename = self.model.breakpoints[index_clicked.row()][0]
line_number_str = self.model.breakpoints[index_clicked.row()][1]
self.edit_goto.emit(filename, int(line_number_str), )
if index_clicked.column()==2:
self.set_or_edit_conditional_breakpoint.emit() | Reimplement Qt method |
12,857 | def _transform_legacy_stats(self, stats):
if stats and not in stats:
pool = stats.copy()
pool[] = self.id
for key in (, ,
, ,
, ):
pool.pop(key, None)
stats[] = [pool]
return stats | Convert legacy stats to new stats with pools key. |
12,858 | def visible_fields(self):
form_visible_fields = self.form.visible_fields()
if self.render_fields:
fields = self.render_fields
else:
fields = [field.name for field in form_visible_fields]
filtered_fields = [field for field in fields if field not in self.exclude_fields]
return [field for field in form_visible_fields if field.name in filtered_fields] | Returns the reduced set of visible fields to output from the form.
This method respects the provided ``fields`` configuration _and_ exlcudes
all fields from the ``exclude`` configuration.
If no ``fields`` where provided when configuring this fieldset, all visible
fields minus the excluded fields will be returned.
:return: List of bound field instances or empty tuple. |
12,859 | def predict_compound_pairs_iterated(
reactions, formulas, prior=(1, 43), max_iterations=None,
element_weight=element_weight):
prior_alpha, prior_beta = prior
reactions = dict(reactions)
pair_reactions = {}
possible_pairs = Counter()
for reaction_id, equation in iteritems(reactions):
for (c1, _), (c2, _) in product(equation.left, equation.right):
spair = tuple(sorted([c1.name, c2.name]))
possible_pairs[spair] += 1
pair_reactions.setdefault(spair, set()).add(reaction_id)
next_reactions = set(reactions)
pairs_predicted = None
prediction = {}
weights = {}
iteration = 0
while len(next_reactions) > 0:
iteration += 1
if max_iterations is not None and iteration > max_iterations:
break
logger.info(.format(
iteration, len(next_reactions)))
for reaction_id in next_reactions:
result = predict_compound_pairs(
reactions[reaction_id], formulas, weights, element_weight)
if result is None:
continue
transfer, balance = result
rpairs = {}
for ((c1, _), (c2, _)), form in iteritems(transfer):
rpairs.setdefault((c1, c2), []).append(form)
prediction[reaction_id] = rpairs, balance
pairs_predicted = Counter()
for reaction_id, (rpairs, _) in iteritems(prediction):
for c1, c2 in rpairs:
spair = tuple(sorted([c1.name, c2.name]))
pairs_predicted[spair] += 1
next_reactions = set()
for spair, total in sorted(iteritems(possible_pairs)):
pred = pairs_predicted[spair]
posterior_alpha = prior_alpha + pred
posterior_beta = prior_beta + total - pred
pair_weight = ((posterior_alpha - 1) /
(posterior_alpha + posterior_beta - 2))
if (spair not in weights or
abs(pair_weight - weights[spair]) > 1e-5):
next_reactions.update(pair_reactions[spair])
c1, c2 = spair
weights[c1, c2] = pair_weight
weights[c2, c1] = pair_weight
return prediction, iteration | Predict reaction pairs using iterated method.
Returns a tuple containing a dictionary of predictions keyed by the
reaction IDs, and the final number of iterations. Each reaction prediction
entry contains a tuple with a dictionary of transfers and a dictionary of
unbalanced compounds. The dictionary of unbalanced compounds is empty only
if the reaction is balanced.
Args:
reactions: Dictionary or pair-iterable of (id, equation) pairs.
IDs must be any hashable reaction identifier (e.g. string) and
equation must be :class:`psamm.reaction.Reaction` objects.
formulas: Dictionary mapping compound IDs to
:class:`psamm.formula.Formula`. Formulas must be flattened.
prior: Tuple of (alpha, beta) parameters for the MAP inference.
If not provided, the default parameters will be used: (1, 43).
max_iterations: Maximum iterations to run before stopping. If the
stopping condition is reached before this number of iterations,
the procedure also stops. If None, the procedure only stops when
the stopping condition is reached.
element_weight: A function providing returning weight value for the
given :class:`psamm.formula.Atom` or
:class:`psamm.formula.Radical`. If not provided, the default weight
will be used (H=0, C=1, *=0.82) |
12,860 | def unpack(self, buff, offset=0):
unpack_length = UBInt16()
unpack_length.unpack(buff, offset)
super().unpack(buff[:offset+unpack_length], offset) | Unpack a binary message into this object's attributes.
Pass the correct length for list unpacking.
Args:
buff (bytes): Binary data package to be unpacked.
offset (int): Where to begin unpacking. |
12,861 | def load(self):
try:
self._read()
self._parse()
except Exception as exc:
self.failed = True
params = {: self._path, : exc}
if self.fail_silently:
LOG.warning("Error processing message json file : "
"%(exception)s", params)
else:
raise exceptions.MessageFailure(
_("Error processing message json file : "
"%(exception)s") % params) | Read and parse the message file. |
12,862 | def start_login_server(self, ):
self.login_server = oauth.LoginServer(session=self)
target = self.login_server.serve_forever
self.login_thread = threading.Thread(target=target)
self.login_thread.setDaemon(True)
log.debug()
self.login_thread.start() | Start a server that will get a request from a user logging in.
This uses the Implicit Grant Flow of OAuth2. The user is asked
to login to twitch and grant PyTwitcher authorization.
Once the user agrees, he is redirected to an url.
This server will respond to that url and get the oauth token.
The server serves in another thread. To shut him down, call
:meth:`TwitchSession.shutdown_login_server`.
This sets the :data:`TwitchSession.login_server`,
:data:`TwitchSession.login_thread` variables.
:returns: The created server
:rtype: :class:`BaseHTTPServer.HTTPServer`
:raises: None |
12,863 | def find_element_by_name(self, name, update=False) -> Elements:
t found.
Usage:
element = driver.find_element_by_name()
'
return self.find_element(by=By.NAME, value=name, update=update) | Finds an element by name.
Args:
name: The name of the element to be found.
update: If the interface has changed, this option should be True.
Returns:
The element if it was found.
Raises:
NoSuchElementException - If the element wasn't found.
Usage:
element = driver.find_element_by_name('foo') |
12,864 | def project_parensemble(self,par_file=None,nsing=None,
inplace=True,enforce_bounds=):
assert self.jco is not None,"MonteCarlo.project_parensemble()" +\
"requires a jacobian attribute"
if par_file is not None:
assert os.path.exists(par_file),"monte_carlo.draw() error: par_file not found:" +\
par_file
self.parensemble.pst.parrep(par_file)
self.log("projecting parameter ensemble")
en = self.parensemble.project(self.get_null_proj(nsing),inplace=inplace,log=self.log)
self.log("projecting parameter ensemble")
return en | perform the null-space projection operations for null-space monte carlo
Parameters
----------
par_file: str
an optional file of parameter values to use
nsing: int
number of singular values to in forming null subspace matrix
inplace: bool
overwrite the existing parameter ensemble with the
projected values
enforce_bounds: str
how to enforce parameter bounds. can be None, 'reset', or 'drop'.
Default is None
Returns
-------
par_en : pyemu.ParameterEnsemble
if inplace is False, otherwise None
Note
----
to use this method, the MonteCarlo instance must have been constructed
with the ``jco`` argument.
Example
-------
``>>>import pyemu``
``>>>mc = pyemu.MonteCarlo(jco="pest.jcb")``
``>>>mc.draw(1000)``
``>>>mc.project_parensemble(par_file="final.par",nsing=100)`` |
12,865 | def mrc_header_from_params(shape, dtype, kind, **kwargs):
shape = [int(n) for n in shape]
kind, kind_in = str(kind).lower(), kind
if kind not in (, ):
raise ValueError("`kind not understood".format(kind_in))
extent = kwargs.pop(, shape)
axis_order = kwargs.pop(, (0, 1, 2))
if tuple(axis_order) not in permutations((0, 1, 2)):
raise ValueError(
.format(axis_order))
dmin = kwargs.pop(, 1.0)
dmax = kwargs.pop(, 0.0)
dmean = kwargs.pop(, min(dmin, dmax) - 1.0)
rms = kwargs.pop(, -1.0)
mrc_version = kwargs.pop(, (2014, 0))
if len(mrc_version) != 2:
raise ValueError(
.format(mrc_version))
text_labels_in = kwargs.pop(, [])
nlabl = len(text_labels_in)
if nlabl > 10:
raise ValueError(
.format(nlabl))
text_labels = [str(label).ljust(80) for label in text_labels_in]
if any(len(label) > 80 for label in text_labels):
raise ValueError()
nx, ny, nz = [np.array(n, dtype=).reshape([1]) for n in shape]
mode = np.array(NPY_DTYPE_TO_MRC_MODE[np.dtype(dtype)],
dtype=).reshape([1])
mx, my, mz = nx, ny, nz
cella = np.array(extent).reshape([3]).astype()
mapc, mapr, maps = [np.array(m, dtype=).reshape([1]) + 1
for m in axis_order]
dmin, dmax, dmean, rms = [np.array(x, dtype=).reshape([1])
for x in (dmin, dmax, dmean, rms)]
ispg = 1 if kind == else 0
ispg = np.array(ispg, dtype=, ndmin=1)
nsymbt = np.array([0], dtype=)
exttype = np.fromstring(, dtype=)
nversion = np.array(10 * mrc_version[0] + mrc_version[1],
dtype=).reshape([1])
origin = np.zeros(3, dtype=)
map = np.fromstring(, dtype=)
machst = np.fromiter(b, dtype=)
nlabl = np.array(nlabl, dtype=).reshape([1])
label = np.zeros((10, 80), dtype=)
for i, label_i in enumerate(text_labels):
label[i] = np.fromstring(label_i, dtype=)
header_fields = header_fields_from_table(
MRC_2014_SPEC_TABLE, MRC_SPEC_KEYS, MRC_DTYPE_TO_NPY_DTYPE)
header = OrderedDict()
for field in header_fields:
header[field[]] = {: field[],
: eval(field[])}
return header | Create a minimal MRC2014 header from the given parameters.
Parameters
----------
shape : 3-sequence of ints
3D shape of the stored data. The values are used as
``'nx', 'ny', 'nz'`` header entries, in this order. Note that
this is different from the actual data storage shape for
non-trivial ``axis_order``.
dtype : {'int8', 'int16', 'float32', 'uint16'}
Data type specifier as understood by `numpy.dtype`. It is
translated to a ``'mode'`` header entry. See `this page
<http://www.ccpem.ac.uk/mrc_format/mrc2014.php>`_ for valid
modes.
kind : {'volume', 'projections'}
Interpretation of the 3D data, either as single 3D volume or as
a stack of 2D projections. The value is used for the ``'ispg'``
header entry.
extent : 3-sequence of floats, optional
Size of the 3D volume in meters. The values are used for
the ``'cella'`` header entry.
Default: ``shape``, resulting in ``(1, 1, 1)`` unit cells
axis_order : permutation of ``(0, 1, 2)`` optional
Order of the data axes as they should appear in the stored file.
The values are used for the ``'mapc', 'mapr', 'maps'`` header
entries.
Default: ``(0, 1, 2)``
dmin, dmax : float, optional
Minimum and maximum values of the data, used for header entries
``'dmin'`` and ``'dmax'``, resp.
Default: 1.0, 0.0. These values indicate according to [Che+2015]
that the values are considered as undetermined.
dmean, rms : float, optional
Mean and variance of the data, used for header entries ``'dmean'``
and ``'rms'``, resp.
Default: ``min(dmin, dmax) - 1, -1.0``. These values indicate
according to [Che+2015] that the values are considered as
undetermined.
mrc_version : 2-tuple of int, optional
Version identifier for the MRC file, used for the ``'nversion'``
header entry.
Default: ``(2014, 0)``
text_labels : sequence of strings, optional
Maximal 10 strings with 80 characters each, used for the
``'nlabl'`` and ``'label'`` header entries.
Default: ``[]``
Returns
-------
header : `OrderedDict`
Header stored in an ordered dictionary, where each entry has the
following form::
'name': {'value': value_as_array,
'offset': offset_in_bytes
'description': description_string}
All ``'value'``'s are `numpy.ndarray`'s with at least one
dimension.
References
----------
[Che+2015] Cheng, A et al. *MRC2014: Extensions to the MRC format header
for electron cryo-microscopy and tomography*. Journal of Structural
Biology, 129 (2015), pp 146--150. |
12,866 | def text_iter(self, context):
for _event, elem in context:
if not self.debug:
yield ExportXMLDocumentGraph(elem, name=elem.attrib[add_ns()])
else:
yield elem
elem.clear()
while elem.getprevious() is not None:
del elem.getparent()[0]
del context | Iterates over all the elements in an iterparse context
(here: <text> elements) and yields an ExportXMLDocumentGraph instance
for each of them. For efficiency, the elements are removed from the
DOM / main memory after processing them.
If ``self.debug`` is set to ``True`` (in the ``__init__`` method),
this method will yield <text> elements, which can be used to construct
``ExportXMLDocumentGraph``s manually. |
12,867 | def next_non_holiday_weekday(holidays, dt):
day_of_week = dt.weekday()
if day_of_week == SUNDAY:
while is_holiday_or_weekend(holidays, dt):
dt += timedelta(1)
return dt | If a holiday falls on a Sunday, observe it on the next non-holiday weekday.
Parameters
----------
holidays : list[pd.tseries.holiday.Holiday]
list of holidays
dt : pd.Timestamp
date of holiday. |
12,868 | def _set_ospf_level12(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="ospf-level12", rest_name="level-1-2", parent=self, choice=(u, u), path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: None, u: u, u: u, u: None}}, namespace=, defining_module=, yang_type=, is_config=True)
except (TypeError, ValueError):
raise ValueError({
: ,
: "empty",
: ,
})
self.__ospf_level12 = t
if hasattr(self, ):
self._set() | Setter method for ospf_level12, mapped from YANG variable /routing_system/router/isis/router_isis_cmds_holder/address_family/ipv6/af_ipv6_unicast/af_ipv6_attributes/af_common_attributes/redistribute/ospf/ospf_level12 (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_ospf_level12 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ospf_level12() directly. |
12,869 | def grep(source, regex, stop_on_first=False):
loader = ClassLoader(source, max_cache=-1)
r = re.compile(regex)
def _matches(constant):
return r.match(constant.value)
for klass in loader.classes:
it = loader.search_constant_pool(path=klass, type_=UTF8, f=_matches)
if next(it, None):
print(klass)
if stop_on_first:
break | Grep the constant pool of all classes in source. |
12,870 | def create_pointing(self,event):
import math
(ra,dec)=self.c2p((self.canvasx(event.x),
self.canvasy(event.y)))
this_camera=camera(camera=self.camera.get())
ccds=this_camera.getGeometry(ra,dec)
items=[]
for ccd in ccds:
(x1,y1)=self.p2c((ccd[0],ccd[1]))
(x2,y2)=self.p2c((ccd[2],ccd[3]))
item=self.create_rectangle(x1,y1,x2,y2)
items.append(item)
label={}
label[]=w.plabel.get()
label[]=self.label(this_camera.ra,this_camera.dec,label[])
self.pointings.append({
"label": label,
"items": items,
"camera": this_camera} )
self.current_pointing(len(self.pointings)-1) | Plot the sky coverage of pointing at event.x,event.y on the canavas |
12,871 | def _can_process_pre_prepare(self, pre_prepare: PrePrepare, sender: str) -> Optional[int]:
if not self.isMsgFromPrimary(pre_prepare, sender):
return PP_CHECK_NOT_FROM_PRIMARY
if (pre_prepare.viewNo, pre_prepare.ppSeqNo) in self.prePrepares:
return PP_CHECK_DUPLICATE
if not self.is_pre_prepare_time_acceptable(pre_prepare, sender):
return PP_CHECK_WRONG_TIME
if compare_3PC_keys((pre_prepare.viewNo, pre_prepare.ppSeqNo),
self.__last_pp_3pc) > 0:
return PP_CHECK_OLD
if self.nonFinalisedReqs(pre_prepare.reqIdr):
return PP_CHECK_REQUEST_NOT_FINALIZED
if not self.__is_next_pre_prepare(pre_prepare.viewNo,
pre_prepare.ppSeqNo):
return PP_CHECK_NOT_NEXT
if f.POOL_STATE_ROOT_HASH.nm in pre_prepare and \
pre_prepare.poolStateRootHash != self.stateRootHash(POOL_LEDGER_ID):
return PP_CHECK_INCORRECT_POOL_STATE_ROOT
status = self._bls_bft_replica.validate_pre_prepare(pre_prepare,
sender)
if status is not None:
return status
return None | Decide whether this replica is eligible to process a PRE-PREPARE.
:param pre_prepare: a PRE-PREPARE msg to process
:param sender: the name of the node that sent the PRE-PREPARE msg |
12,872 | def cal_pth(self, v, temp):
params_t = self._set_params(self.params_therm)
return constq_pth(v, temp, *params_t, self.n, self.z,
t_ref=self.t_ref, three_r=self.three_r) | calculate thermal pressure
:param v: unit-cell volume in A^3
:param temp: temperature in K
:return: thermal pressure in GPa |
12,873 | def _is_valid_token(self, auth_token):
s eauth system.
:return bool: True if valid, False if not valid.
s None, or something other
try:
int(auth_token, 16)
except (TypeError, ValueError):
return False
if salt_token and self.resolver.get_token(salt_token):
return True
return False | Check if this is a valid salt-api token or valid Salt token
salt-api tokens are regular session tokens that tie back to a real Salt
token. Salt tokens are tokens generated by Salt's eauth system.
:return bool: True if valid, False if not valid. |
12,874 | def save_state(internal_request, state):
state_data = {"hash_type": internal_request.user_id_hash_type}
state[UserIdHasher.STATE_KEY] = state_data | Saves all necessary information needed by the UserIdHasher
:type internal_request: satosa.internal_data.InternalRequest
:param internal_request: The request
:param state: The current state |
12,875 | def read_leader_status(self):
api_path =
response = self._adapter.get(
url=api_path,
)
return response.json() | Read the high availability status and current leader instance of Vault.
Supported methods:
GET: /sys/leader. Produces: 200 application/json
:return: The JSON response of the request.
:rtype: dict |
12,876 | def OnCopyResult(self, event):
selection = self.main_window.grid.selection
data = self.main_window.actions.copy_result(selection)
if type(data) is wx._gdi.Bitmap:
self.main_window.clipboard.set_clipboard(data, datatype="bitmap")
else:
self.main_window.clipboard.set_clipboard(data, datatype="text")
event.Skip() | Clipboard copy results event handler |
12,877 | def get_filters(self):
return dict(
moment_format=self.format,
moment_calendar=self.calendar,
moment_fromnow=self.from_now,
) | Returns a collection of momentjs filters |
12,878 | def set_printoptions(**kwargs):
warnings.warn(,
DeprecationWarning)
for key, value in kwargs.items():
if key not in [, , ]:
raise KeyError(.format(key))
set_option( + key, value) | Set printing options.
These options determine the way JPEG 2000 boxes are displayed.
Parameters
----------
short : bool, optional
When True, only the box ID, offset, and length are displayed. Useful
for displaying only the basic structure or skeleton of a JPEG 2000
file.
xml : bool, optional
When False, printing of the XML contents of any XML boxes or UUID XMP
boxes is suppressed.
codestream : bool, optional
When False, the codestream segments are not printed. Otherwise the
segments are printed depending on how set_parseoptions has been used.
See also
--------
get_printoptions
Examples
--------
To put back the default options, you can use:
>>> import glymur
>>> glymur.set_printoptions(short=False, xml=True, codestream=True) |
12,879 | def checkout(self, ref, branch=None):
return git_checkout(self.repo_dir, ref, branch=branch) | Do a git checkout of `ref`. |
12,880 | def set(self, section, key, value, comment=None):
self._read_sources()
if (section, key) in self._dot_keys:
section, key = self._dot_keys[(section, key)]
elif section in self._dot_keys:
section = self._dot_keys[section]
if not isinstance(value, str):
value = str(value)
self._parser.set(section, key, value)
self._add_dot_key(section, key)
if comment:
self._set_comment(section, comment, key) | Set config value with data type transformation (to str)
:param str section: Section to set config for
:param str key: Key to set config for
:param value: Value for key. It can be any primitive type.
:param str comment: Comment for the key |
12,881 | def trial(log_dir=None,
upload_dir=None,
sync_period=None,
trial_prefix="",
param_map=None,
init_logging=True):
global _trial
if _trial:
raise ValueError("A trial already exists in the current context")
local_trial = Trial(
log_dir=log_dir,
upload_dir=upload_dir,
sync_period=sync_period,
trial_prefix=trial_prefix,
param_map=param_map,
init_logging=True)
try:
_trial = local_trial
_trial.start()
yield local_trial
finally:
_trial = None
local_trial.close() | Generates a trial within a with context. |
12,882 | def _init_socket(self):
try:
self._connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._connection.connect((self.host, self.port))
self._connection.settimeout(self.timeout)
self._connection_file = self._connection.makefile()
except:
self._connection = None
self._connection_file = None
raise | Initialises the socket used for communicating with a q service, |
12,883 | def create(cls, bucket, key, value):
with db.session.begin_nested():
obj = cls(
bucket_id=as_bucket_id(bucket),
key=key,
value=value
)
db.session.add(obj)
return obj | Create a new tag for bucket. |
12,884 | def cnst_AT(self, Y):
r
return self.cnst_A0T(self.block_sep0(Y)) + \
np.sum(self.cnst_A1T(self.block_sep1(Y)), axis=-1) | r"""Compute :math:`A^T \mathbf{y}`. In this case
:math:`A^T \mathbf{y} = (I \;\; \Gamma_0^T \;\; \Gamma_1^T \;\;
\ldots) \mathbf{y}`. |
12,885 | def me(self):
json_data = self._session.get(API_ENDPOINT + )
return self._object_factory(OBJECT_TYPE, json_data) | Get the details of the person accessing the API.
Raises:
ApiError: If the Webex Teams cloud returns an error. |
12,886 | def read(self, entity=None, attrs=None, ignore=None, params=None):
if entity is None:
entity = type(self)(
self._server_config,
product=self.product,
)
if ignore is None:
ignore = set()
return super(RepositorySet, self).read(entity, attrs, ignore, params) | Provide a default value for ``entity``.
By default, ``nailgun.entity_mixins.EntityReadMixin.read`` provides a
default value for ``entity`` like so::
entity = type(self)()
However, :class:`RepositorySet` requires that a ``product`` be
provided, so this technique will not work. Do this instead::
entity = type(self)(product=self.product.id) |
12,887 | def caption_mentions(self) -> List[str]:
if not self.caption:
return []
mention_regex = re.compile(r"(?:@)(\w(?:(?:\w|(?:\.(?!\.))){0,28}(?:\w))?)")
return re.findall(mention_regex, self.caption.lower()) | List of all lowercased profiles that are mentioned in the Post's caption, without preceeding @. |
12,888 | def height_to_pressure_std(height):
r
t0 = 288. * units.kelvin
gamma = 6.5 * units()
p0 = 1013.25 * units.mbar
return p0 * (1 - (gamma / t0) * height) ** (mpconsts.g / (mpconsts.Rd * gamma)) | r"""Convert height data to pressures using the U.S. standard atmosphere.
The implementation inverts the formula outlined in [Hobbs1977]_ pg.60-61.
Parameters
----------
height : `pint.Quantity`
Atmospheric height
Returns
-------
`pint.Quantity`
The corresponding pressure value(s)
Notes
-----
.. math:: p = p_0 e^{\frac{g}{R \Gamma} \text{ln}(1-\frac{Z \Gamma}{T_0})} |
12,889 | def operation(self, url, idp_entity_id, op, **opargs):
sp_url = self._sp
try:
respdict = self.parse_soap_message(response.text)
self.ecp_conversation(respdict, idp_entity_id)
response = self.send(url, op, **opargs)
except (soap.XmlParseError, AssertionError, KeyError):
raise
if response.status_code >= 400:
raise SAMLError("Error performing operation: %s" % (
response.text,))
return response | This is the method that should be used by someone that wants
to authenticate using SAML ECP
:param url: The page that access is sought for
:param idp_entity_id: The entity ID of the IdP that should be
used for authentication
:param op: Which HTTP operation (GET/POST/PUT/DELETE)
:param opargs: Arguments to the HTTP call
:return: The page |
12,890 | def main(self, config_filename):
self._read_configuration_file(config_filename)
if self._wrapper_class_name:
self._io.title()
self.__generate_wrapper_class()
else:
self._io.log_verbose()
return 0 | The "main" of the wrapper generator. Returns 0 on success, 1 if one or more errors occurred.
:param str config_filename: The name of the configuration file.
:rtype: int |
12,891 | def set_image(self, image):
if isinstance(image, np.ndarray):
image = util.Image(image)
if isinstance(image, util.NullImage):
self.model_as_data = True
else:
self.model_as_data = False
self.image = image
self._data = self.image.get_padded_image(self.pad)
self.oshape = util.Tile(self._data.shape)
self.ishape = self.oshape.pad(-self.pad)
self.inner = self.ishape.slicer
for c in self.comps:
c.set_shape(self.oshape, self.ishape)
self._model = np.zeros(self._data.shape, dtype=np.float64)
self._residuals = np.zeros(self._data.shape, dtype=np.float64)
self.calculate_model() | Update the current comparison (real) image |
12,892 | def rotatePoints(points, rotationDegrees, pivotx=0, pivoty=0):
rotationRadians = math.radians(rotationDegrees % 360)
for x, y in points:
_checkForIntOrFloat(x)
_checkForIntOrFloat(y)
x -= pivotx
y -= pivoty
x, y = x * math.cos(rotationRadians) - y * math.sin(rotationRadians), x * math.sin(rotationRadians) + y * math.cos(rotationRadians)
x += pivotx
y += pivoty
yield int(x), int(y) | Rotates each x and y tuple in `points`` by `rotationDegrees`. The points
are rotated around the origin by default, but can be rotated around another
pivot point by specifying `pivotx` and `pivoty`.
The points are rotated counterclockwise.
Returns a generator that produces an x and y tuple for each point in `points`.
>>> list(rotatePoints([(10, 0), (7, 7)], 45))
[(7, 7), (0, 9)] |
12,893 | def ping(self, message=_NOTSET, *, encoding=_NOTSET):
if message is not _NOTSET:
args = (message,)
else:
args = ()
return self.execute(, *args, encoding=encoding) | Ping the server.
Accept optional echo message. |
12,894 | def from_name(api_url, name, dry_run=False):
return DataSet(
.join([api_url, name]).rstrip(),
token=None,
dry_run=dry_run
) | doesn't require a token config param
as all of our data is currently public |
12,895 | def trigger_event(self, source, event, args):
actions = []
for action in event.actions:
if callable(action):
ac = action(self, *args)
if not ac:
continue
if not hasattr(ac, "__iter__"):
actions.append(ac)
else:
actions += action(self, *args)
else:
actions.append(action)
ret = source.game.trigger(self, actions, args)
if event.once:
self._events.remove(event)
return ret | Trigger an event on the Entity
* \a source: The source of the event
* \a event: The event being triggered
* \a args: A list of arguments to pass to the callback |
12,896 | def set_legend(self):
leg = super(Coherence, self).set_legend()
if leg is not None:
leg.set_title()
return leg | Create a legend for this product |
12,897 | def det_residual(model,
guess,
start,
final,
shocks,
diff=True,
jactype=):
n_s = len(model.symbols[])
n_x = len(model.symbols[])
N = guess.shape[0]
p = model.calibration[]
f = model.functions[]
g = model.functions[]
vec = guess[:-1, :]
vec_f = guess[1:, :]
s = vec[:, :n_s]
x = vec[:, n_s:]
S = vec_f[:, :n_s]
X = vec_f[:, n_s:]
m = shocks[:-1, :]
M = shocks[1:, :]
if diff:
SS, SS_m, SS_s, SS_x, SS_M = g(m, s, x, M, p, diff=True)
R, R_m, R_s, R_x, R_M, R_S, R_X = f(m, s, x, M, S, X, p, diff=True)
else:
SS = g(m, s, x, M, p)
R = f(m, s, x, M, S, X, p)
res_s = SS - S
res_x = R
res = np.zeros((N, n_s + n_x))
res[1:, :n_s] = res_s
res[:-1, n_s:] = res_x
res[0, :n_s] = -(guess[0, :n_s] - start)
res[-1, n_s:] = -(guess[-1, n_s:] - guess[-2, n_s:])
if not diff:
return res
else:
sparse_jac = False
if not sparse_jac:
res_s_s = SS_s
res_s_x = SS_x
jac = np.zeros((N, n_s + n_x, N, n_s + n_x))
for i in range(N - 1):
jac[i, n_s:, i, :n_s] = R_s[i, :, :]
jac[i, n_s:, i, n_s:] = R_x[i, :, :]
jac[i, n_s:, i + 1, :n_s] = R_S[i, :, :]
jac[i, n_s:, i + 1, n_s:] = R_X[i, :, :]
jac[i + 1, :n_s, i, :n_s] = SS_s[i, :, :]
jac[i + 1, :n_s, i, n_s:] = SS_x[i, :, :]
jac[i + 1, :n_s, i + 1, :n_s] = -np.eye(n_s)
jac[0, :n_s, 0, :n_s] = -np.eye(n_s)
jac[-1, n_s:, -1, n_s:] = -np.eye(n_x)
jac[-1, n_s:, -2, n_s:] = +np.eye(n_x)
nn = jac.shape[0] * jac.shape[1]
res = res.ravel()
jac = jac.reshape((nn, nn))
if jactype == :
from scipy.sparse import csc_matrix, csr_matrix
jac = csc_matrix(jac)
return [res, jac] | Computes the residuals, the derivatives of the stacked-time system.
:param model: an fga model
:param guess: the guess for the simulated values. An `(n_s.n_x) x N` array,
where n_s is the number of states,
n_x the number of controls, and `N` the length of the simulation.
:param start: initial boundary condition (initial value of the states)
:param final: final boundary condition (last value of the controls)
:param shocks: values for the exogenous shocks
:param diff: if True, the derivatives are computes
:return: a list with two elements:
- an `(n_s.n_x) x N` array with the residuals of the system
- a `(n_s.n_x) x N x (n_s.n_x) x N` array representing the jacobian of
the system |
12,898 | def main(argv=None):
try:
_name_of_script, filepath = argv
except ValueError:
raise ValueError(argv)
print(filepath)
make_confidence_report_bundled(filepath=filepath,
test_start=FLAGS.test_start,
test_end=FLAGS.test_end,
which_set=FLAGS.which_set,
recipe=FLAGS.recipe,
report_path=FLAGS.report_path, batch_size=FLAGS.batch_size) | Make a confidence report and save it to disk. |
12,899 | def compile_protofile(proto_file_path):
out_file = tempfile.mkstemp()[1]
try:
subprocess.check_output([, ,
, out_file,
proto_file_path])
except subprocess.CalledProcessError as e:
sys.exit(.format(e.returncode))
return out_file | Compile proto file to descriptor set.
Args:
proto_file_path: Path to proto file to compile.
Returns:
Path to file containing compiled descriptor set.
Raises:
SystemExit if the compilation fails. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.