sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def getParam(self, name=None):
""" Function getParam
Return a dict of parameters or a parameter value
@param key: The parameter name
@return RETURN: dict of parameters or a parameter value
"""
if 'parameters' in self.keys():
l = {x['name']: x['value'] for x in self['parameters'].values()}
if name:
if name in l.keys():
return l[name]
else:
return False
else:
return l | Function getParam
Return a dict of parameters or a parameter value
@param key: The parameter name
@return RETURN: dict of parameters or a parameter value | entailment |
def checkAndCreateClasses(self, classes):
""" Function checkAndCreateClasses
Check and add puppet class
@param classes: The classes ids list
@return RETURN: boolean
"""
actual_classes = self['puppetclasses'].keys()
for i in classes:
if i not in actual_classes:
self['puppetclasses'].append(i)
self.reload()
return set(classes).issubset(set((self['puppetclasses'].keys()))) | Function checkAndCreateClasses
Check and add puppet class
@param classes: The classes ids list
@return RETURN: boolean | entailment |
def checkAndCreateParams(self, params):
""" Function checkAndCreateParams
Check and add global parameters
@param key: The parameter name
@param params: The params dict
@return RETURN: boolean
"""
actual_params = self['parameters'].keys()
for k, v in params.items():
if k not in actual_params:
self['parameters'].append({"name": k, "value": v})
self.reload()
return self['parameters'].keys() == params.keys() | Function checkAndCreateParams
Check and add global parameters
@param key: The parameter name
@param params: The params dict
@return RETURN: boolean | entailment |
def version_dict(version):
"""Turn a version string into a dict with major/minor/... info."""
match = version_re.match(str(version) or '')
letters = 'alpha pre'.split()
numbers = 'major minor1 minor2 minor3 alpha_ver pre_ver'.split()
if match:
d = match.groupdict()
for letter in letters:
d[letter] = d[letter] if d[letter] else None
for num in numbers:
if d[num] == '*':
d[num] = 99
else:
d[num] = int(d[num]) if d[num] else None
else:
d = dict((k, None) for k in numbers)
d.update((k, None) for k in letters)
return d | Turn a version string into a dict with major/minor/... info. | entailment |
def get_diff(source, dest):
"""Get the diff between two records list in this order:
- to_create
- to_delete
"""
# First build a dict from the lists, with the ID as the key.
source_dict = {record['id']: record for record in source}
dest_dict = {record['id']: record for record in dest}
source_keys = set(source_dict.keys())
dest_keys = set(dest_dict.keys())
to_create = source_keys - dest_keys
to_delete = dest_keys - source_keys
to_update = set()
to_check = source_keys - to_create - to_delete
for record_id in to_check:
# Make sure to remove properties that are part of kinto
# records and not amo records.
# Here we will compare the record properties ignoring:
# ID, last_modified and enabled.
new = canonical_json(source_dict[record_id])
old = canonical_json(dest_dict[record_id])
if new != old:
to_update.add(record_id)
return ([source_dict[k] for k in to_create],
[source_dict[k] for k in to_update],
[dest_dict[k] for k in to_delete]) | Get the diff between two records list in this order:
- to_create
- to_delete | entailment |
def object_version_choices(obj):
"""
Return a list of form choices for versions of this object which can be published.
"""
choices = BLANK_CHOICE_DASH + [(PublishAction.UNPUBLISH_CHOICE, 'Unpublish current version')]
# When creating a new object in the Django admin - obj will be None
if obj is not None:
saved_versions = Version.objects.filter(
content_type=ContentType.objects.get_for_model(obj),
object_id=obj.pk,
).exclude(
version_number=None,
)
for version in saved_versions:
choices.append((version.version_number, version))
return choices | Return a list of form choices for versions of this object which can be published. | entailment |
def manifest(self, values, *paths, filename: str = None) -> Dict:
"""Load a manifest file and apply template values
"""
filename = filename or self.filename(*paths)
with open(filename, 'r') as fp:
template = Template(fp.read())
return yaml.load(template.render(values)) | Load a manifest file and apply template values | entailment |
def set_packet_headers(self, headers):
""" Set packet header.
The method will try to set ps_headerprotocol to inform the Xena GUI and tester how to interpret the packet
header byte sequence specified with PS_PACKETHEADER.
This is mainly for information purposes, and the stream will transmit the packet header bytes even if no
protocol segments are specified.
If the method fails to set some segment it will log a warning and skip setup.
:param headers: current packet headers
:type headers: pypacker.layer12.ethernet.Ethernet
"""
bin_headers = '0x' + binascii.hexlify(headers.bin()).decode('utf-8')
self.set_attributes(ps_packetheader=bin_headers)
body_handler = headers
ps_headerprotocol = []
while body_handler:
segment = pypacker_2_xena.get(str(body_handler).split('(')[0].lower(), None)
if not segment:
self.logger.warning('pypacker header {} not in conversion list'.format(segment))
return
ps_headerprotocol.append(segment)
if type(body_handler) is Ethernet and body_handler.vlan:
ps_headerprotocol.append('vlan')
body_handler = body_handler.body_handler
self.set_attributes(ps_headerprotocol=' '.join(ps_headerprotocol)) | Set packet header.
The method will try to set ps_headerprotocol to inform the Xena GUI and tester how to interpret the packet
header byte sequence specified with PS_PACKETHEADER.
This is mainly for information purposes, and the stream will transmit the packet header bytes even if no
protocol segments are specified.
If the method fails to set some segment it will log a warning and skip setup.
:param headers: current packet headers
:type headers: pypacker.layer12.ethernet.Ethernet | entailment |
def add_modifier(self, m_type=XenaModifierType.standard, **kwargs):
""" Add modifier.
:param m_type: modifier type - standard or extended.
:type: xenamanager.xena_stram.ModifierType
:return: newly created modifier.
:rtype: xenamanager.xena_stream.XenaModifier
"""
if m_type == XenaModifierType.standard:
modifier = XenaModifier(self, index='{}/{}'.format(self.index, len(self.modifiers)))
else:
modifier = XenaXModifier(self, index='{}/{}'.format(self.index, len(self.xmodifiers)))
modifier._create()
modifier.get()
modifier.set(**kwargs)
return modifier | Add modifier.
:param m_type: modifier type - standard or extended.
:type: xenamanager.xena_stram.ModifierType
:return: newly created modifier.
:rtype: xenamanager.xena_stream.XenaModifier | entailment |
def remove_modifier(self, index, m_type=XenaModifierType.standard):
""" Remove modifier.
:param m_type: modifier type - standard or extended.
:param index: index of modifier to remove.
"""
if m_type == XenaModifierType.standard:
current_modifiers = OrderedDict(self.modifiers)
del current_modifiers[index]
self.set_attributes(ps_modifiercount=0)
self.del_objects_by_type('modifier')
else:
current_modifiers = OrderedDict(self.xmodifiers)
del current_modifiers[index]
self.set_attributes(ps_modifierextcount=0)
self.del_objects_by_type('xmodifier')
for modifier in current_modifiers.values():
self.add_modifier(m_type,
mask=modifier.mask, action=modifier.action, repeat=modifier.repeat,
min_val=modifier.min_val, step=modifier.step, max_val=modifier.max_val) | Remove modifier.
:param m_type: modifier type - standard or extended.
:param index: index of modifier to remove. | entailment |
def modifiers(self):
"""
:return: dictionary {index: object} of standard modifiers.
"""
if not self.get_objects_by_type('modifier'):
for index in range(int(self.get_attribute('ps_modifiercount'))):
XenaModifier(self, index='{}/{}'.format(self.index, index)).get()
return {s.id: s for s in self.get_objects_by_type('modifier')} | :return: dictionary {index: object} of standard modifiers. | entailment |
def xmodifiers(self):
"""
:return: dictionary {index: object} of extended modifiers.
"""
if not self.get_objects_by_type('xmodifier'):
try:
for index in range(int(self.get_attribute('ps_modifierextcount'))):
XenaXModifier(self, index='{}/{}'.format(self.index, index)).get()
except Exception as _:
pass
return {s.id: s for s in self.get_objects_by_type('xmodifier')} | :return: dictionary {index: object} of extended modifiers. | entailment |
def DRAGONS(flat=False, extras=True):
"""DRAGONS cosmology assumes WMAP7 + BAO + H_0 mean from
Komatsu et al. (2011) ApJS 192 18K (arxiv:1001.4538v1)
Parameters
----------
flat: boolean
If True, sets omega_lambda_0 = 1 - omega_M_0 to ensure omega_k_0
= 0 exactly. Also sets omega_k_0 = 0 explicitly.
extras: boolean
If True, sets neutrino number N_nu = 0, neutrino density
omega_n_0 = 0.0, Helium mass fraction Y_He = 0.24.
"""
omega_c_0 = 0.2292
omega_b_0 = 0.0458
cosmo = {'omega_b_0': omega_b_0,
'omega_M_0': omega_b_0 + omega_c_0,
'omega_lambda_0': 0.725,
'h': 0.702,
'n': 0.963,
'sigma_8': 0.816,
'tau': 0.088,
'z_reion': 10.6,
't_0': 13.76,
}
if flat:
cosmo['omega_lambda_0'] = 1 - cosmo['omega_M_0']
cosmo['omega_k_0'] = 0.0
if extras:
add_extras(cosmo)
return cosmo | DRAGONS cosmology assumes WMAP7 + BAO + H_0 mean from
Komatsu et al. (2011) ApJS 192 18K (arxiv:1001.4538v1)
Parameters
----------
flat: boolean
If True, sets omega_lambda_0 = 1 - omega_M_0 to ensure omega_k_0
= 0 exactly. Also sets omega_k_0 = 0 explicitly.
extras: boolean
If True, sets neutrino number N_nu = 0, neutrino density
omega_n_0 = 0.0, Helium mass fraction Y_He = 0.24. | entailment |
def Planck_2015(flat=False, extras=True):
"""Planck 2015 XII: Cosmological parameters Table 4
column Planck TT, TE, EE + lowP + lensing + ext
from Ade et al. (2015) A&A in press (arxiv:1502.01589v1)
Parameters
----------
flat: boolean
If True, sets omega_lambda_0 = 1 - omega_M_0 to ensure omega_k_0
= 0 exactly. Also sets omega_k_0 = 0 explicitly.
extras: boolean
If True, sets neutrino number N_nu = 0, neutrino density
omega_n_0 = 0.0, Helium mass fraction Y_He = 0.24.
"""
omega_b_0 = 0.02230/(0.6774**2)
cosmo = {'omega_b_0': omega_b_0,
'omega_M_0': 0.3089,
'omega_lambda_0': 0.6911,
'h': 0.6774,
'n': 0.9667,
'sigma_8': 0.8159,
'tau': 0.066,
'z_reion': 8.8,
't_0': 13.799,
}
if flat:
cosmo['omega_lambda_0'] = 1 - cosmo['omega_M_0']
cosmo['omega_k_0'] = 0.0
if extras:
add_extras(cosmo)
return cosmo | Planck 2015 XII: Cosmological parameters Table 4
column Planck TT, TE, EE + lowP + lensing + ext
from Ade et al. (2015) A&A in press (arxiv:1502.01589v1)
Parameters
----------
flat: boolean
If True, sets omega_lambda_0 = 1 - omega_M_0 to ensure omega_k_0
= 0 exactly. Also sets omega_k_0 = 0 explicitly.
extras: boolean
If True, sets neutrino number N_nu = 0, neutrino density
omega_n_0 = 0.0, Helium mass fraction Y_He = 0.24. | entailment |
def add(self, dn: str, mod_list: dict) -> None:
"""
Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled.
"""
return self._do_with_retry(lambda obj: obj.add_s(dn, mod_list)) | Add a DN to the LDAP database; See ldap module. Doesn't return a result
if transactions enabled. | entailment |
def modify(self, dn: str, mod_list: dict) -> None:
"""
Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled.
"""
return self._do_with_retry(lambda obj: obj.modify_s(dn, mod_list)) | Modify a DN in the LDAP database; See ldap module. Doesn't return a
result if transactions enabled. | entailment |
def delete(self, dn: str) -> None:
"""
delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
return self._do_with_retry(lambda obj: obj.delete_s(dn)) | delete a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled. | entailment |
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
return self._do_with_retry(
lambda obj: obj.rename_s(dn, new_rdn, new_base_dn)) | rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled. | entailment |
def get_column_name(self, column_name):
""" Get a column for given column name from META api. """
name = pretty_name(column_name)
if column_name in self._meta.columns:
column_cls = self._meta.columns[column_name]
if column_cls.verbose_name:
name = column_cls.verbose_name
return name | Get a column for given column name from META api. | entailment |
def version(self):
"""Software version of the current repository
"""
branches = self.branches()
if self.info['branch'] == branches.sandbox:
try:
return self.software_version()
except Exception as exc:
raise utils.CommandError(
'Could not obtain repo version, do you have a makefile '
'with version entry?\n%s' % exc
)
else:
branch = self.info['branch'].lower()
branch = re.sub('[^a-z0-9_-]+', '-', branch)
return f"{branch}-{self.info['head']['id'][:8]}" | Software version of the current repository | entailment |
def validate_version(self, prefix='v'):
"""Validate version by checking if it is a valid semantic version
and its value is higher than latest github tag
"""
version = self.software_version()
repo = self.github_repo()
repo.releases.validate_tag(version, prefix)
return version | Validate version by checking if it is a valid semantic version
and its value is higher than latest github tag | entailment |
def skip_build(self):
"""Check if build should be skipped
"""
skip_msg = self.config.get('skip', '[ci skip]')
return (
os.environ.get('CODEBUILD_BUILD_SUCCEEDING') == '0' or
self.info['current_tag'] or
skip_msg in self.info['head']['message']
) | Check if build should be skipped | entailment |
def message(self, msg):
"""Send a message to third party applications
"""
for broker in self.message_brokers:
try:
broker(msg)
except Exception as exc:
utils.error(exc) | Send a message to third party applications | entailment |
def get_kinto_records(kinto_client, bucket, collection, permissions,
config=None):
"""Return all the kinto records for this bucket/collection."""
# Create bucket if needed
try:
kinto_client.create_bucket(id=bucket, if_not_exists=True)
except KintoException as e:
if hasattr(e, 'response') and e.response.status_code == 403:
# The user cannot create buckets on this server, ignore the creation.
pass
try:
kinto_client.create_collection(id=collection, bucket=bucket,
permissions=permissions, if_not_exists=True)
except KintoException as e:
if hasattr(e, 'response') and e.response.status_code == 403:
# The user cannot create collection on this bucket, ignore the creation.
pass
return kinto_client.get_records(bucket=bucket, collection=collection) | Return all the kinto records for this bucket/collection. | entailment |
def add_chassis(self, chassis):
"""
:param ip: chassis object
"""
self.chassis_list[chassis] = XenaSocket(self.logger, chassis.ip, chassis.port)
self.chassis_list[chassis].connect()
KeepAliveThread(self.chassis_list[chassis]).start()
self.send_command(chassis, 'c_logon', '"{}"'.format(chassis.password))
self.send_command(chassis, 'c_owner', '"{}"'.format(chassis.owner)) | :param ip: chassis object | entailment |
def send_command(self, obj, command, *arguments):
""" Send command and do not parse output (except for communication errors).
:param obj: requested object.
:param command: command to send.
:param arguments: list of command arguments.
"""
index_command = obj._build_index_command(command, *arguments)
self.chassis_list[obj.chassis].sendQueryVerify(index_command) | Send command and do not parse output (except for communication errors).
:param obj: requested object.
:param command: command to send.
:param arguments: list of command arguments. | entailment |
def send_command_return(self, obj, command, *arguments):
""" Send command and wait for single line output. """
index_command = obj._build_index_command(command, *arguments)
return obj._extract_return(command, self.chassis_list[obj.chassis].sendQuery(index_command)) | Send command and wait for single line output. | entailment |
def send_command_return_multilines(self, obj, command, *arguments):
""" Send command and wait for multiple lines output. """
index_command = obj._build_index_command(command, *arguments)
return self.chassis_list[obj.chassis].sendQuery(index_command, True) | Send command and wait for multiple lines output. | entailment |
def get_attribute(self, obj, attribute):
""" Returns single object attribute.
:param obj: requested object.
:param attribute: requested attribute to query.
:returns: returned value.
:rtype: str
"""
raw_return = self.send_command_return(obj, attribute, '?')
if len(raw_return) > 2 and raw_return[0] == '"' and raw_return[-1] == '"':
return raw_return[1:-1]
return raw_return | Returns single object attribute.
:param obj: requested object.
:param attribute: requested attribute to query.
:returns: returned value.
:rtype: str | entailment |
def get_attributes(self, obj):
""" Get all object's attributes.
Sends multi-parameter info/config queries and returns the result as dictionary.
:param obj: requested object.
:returns: dictionary of <name, value> of all attributes returned by the query.
:rtype: dict of (str, str)
"""
attributes = {}
for info_config_command in obj.info_config_commands:
index_commands_values = self.send_command_return_multilines(obj, info_config_command, '?')
# poor implementation...
li = obj._get_index_len()
ci = obj._get_command_len()
for index_command_value in index_commands_values:
command = index_command_value.split()[ci].lower()
if len(index_command_value.split()) > li + 1:
value = ' '.join(index_command_value.split()[li+1:]).replace('"', '')
else:
value = ''
attributes[command] = value
return attributes | Get all object's attributes.
Sends multi-parameter info/config queries and returns the result as dictionary.
:param obj: requested object.
:returns: dictionary of <name, value> of all attributes returned by the query.
:rtype: dict of (str, str) | entailment |
def set_attributes(self, obj, **attributes):
""" Set attributes.
:param obj: requested object.
:param attributes: dictionary of {attribute: value} to set
"""
for attribute, value in attributes.items():
self.send_command(obj, attribute, value) | Set attributes.
:param obj: requested object.
:param attributes: dictionary of {attribute: value} to set | entailment |
def get_stats(self, obj, stat_name):
""" Send CLI command that returns list of integer counters.
:param obj: requested object.
:param stat_name: statistics command name.
:return: list of counters.
:rtype: list(int)
"""
return [int(v) for v in self.get_attribute(obj, stat_name).split()] | Send CLI command that returns list of integer counters.
:param obj: requested object.
:param stat_name: statistics command name.
:return: list of counters.
:rtype: list(int) | entailment |
def depth_first(self, top_down=True):
"""
Iterate depth-first.
::
>>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode
>>> root_container = UniqueTreeContainer(name="root")
>>> outer_container = UniqueTreeContainer(name="outer")
>>> inner_container = UniqueTreeContainer(name="inner")
>>> node_a = UniqueTreeNode(name="a")
>>> node_b = UniqueTreeNode(name="b")
>>> node_c = UniqueTreeNode(name="c")
>>> node_d = UniqueTreeNode(name="d")
>>> root_container.extend([node_a, outer_container])
>>> outer_container.extend([inner_container, node_d])
>>> inner_container.extend([node_b, node_c])
::
>>> for node in root_container.depth_first():
... print(node.name)
...
a
outer
inner
b
c
d
::
>>> for node in root_container.depth_first(top_down=False):
... print(node.name)
...
a
b
c
inner
d
outer
"""
for child in tuple(self):
if top_down:
yield child
if isinstance(child, UniqueTreeContainer):
yield from child.depth_first(top_down=top_down)
if not top_down:
yield child | Iterate depth-first.
::
>>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode
>>> root_container = UniqueTreeContainer(name="root")
>>> outer_container = UniqueTreeContainer(name="outer")
>>> inner_container = UniqueTreeContainer(name="inner")
>>> node_a = UniqueTreeNode(name="a")
>>> node_b = UniqueTreeNode(name="b")
>>> node_c = UniqueTreeNode(name="c")
>>> node_d = UniqueTreeNode(name="d")
>>> root_container.extend([node_a, outer_container])
>>> outer_container.extend([inner_container, node_d])
>>> inner_container.extend([node_b, node_c])
::
>>> for node in root_container.depth_first():
... print(node.name)
...
a
outer
inner
b
c
d
::
>>> for node in root_container.depth_first(top_down=False):
... print(node.name)
...
a
b
c
inner
d
outer | entailment |
def reserve(self, force=False):
""" Reserve port.
XenaManager-2G -> Reserve/Relinquish Port.
:param force: True - take forcefully, False - fail if port is reserved by other user
"""
p_reservation = self.get_attribute('p_reservation')
if p_reservation == 'RESERVED_BY_YOU':
return
elif p_reservation == 'RESERVED_BY_OTHER' and not force:
raise TgnError('Port {} reserved by {}'.format(self, self.get_attribute('p_reservedby')))
self.relinquish()
self.send_command('p_reservation', 'reserve') | Reserve port.
XenaManager-2G -> Reserve/Relinquish Port.
:param force: True - take forcefully, False - fail if port is reserved by other user | entailment |
def load_config(self, config_file_name):
""" Load configuration file from xpc file.
:param config_file_name: full path to the configuration file.
"""
with open(config_file_name) as f:
commands = f.read().splitlines()
for command in commands:
if not command.startswith(';'):
try:
self.send_command(command)
except XenaCommandException as e:
self.logger.warning(str(e)) | Load configuration file from xpc file.
:param config_file_name: full path to the configuration file. | entailment |
def save_config(self, config_file_name):
""" Save configuration file to xpc file.
:param config_file_name: full path to the configuration file.
"""
with open(config_file_name, 'w+') as f:
f.write('P_RESET\n')
for line in self.send_command_return_multilines('p_fullconfig', '?'):
f.write(line.split(' ', 1)[1].lstrip()) | Save configuration file to xpc file.
:param config_file_name: full path to the configuration file. | entailment |
def add_stream(self, name=None, tpld_id=None, state=XenaStreamState.enabled):
""" Add stream.
:param name: stream description.
:param tpld_id: TPLD ID. If None the a unique value will be set.
:param state: new stream state.
:type state: xenamanager.xena_stream.XenaStreamState
:return: newly created stream.
:rtype: xenamanager.xena_stream.XenaStream
"""
stream = XenaStream(parent=self, index='{}/{}'.format(self.index, len(self.streams)), name=name)
stream._create()
tpld_id = tpld_id if tpld_id else XenaStream.next_tpld_id
stream.set_attributes(ps_comment='"{}"'.format(stream.name), ps_tpldid=tpld_id)
XenaStream.next_tpld_id = max(XenaStream.next_tpld_id + 1, tpld_id + 1)
stream.set_state(state)
return stream | Add stream.
:param name: stream description.
:param tpld_id: TPLD ID. If None the a unique value will be set.
:param state: new stream state.
:type state: xenamanager.xena_stream.XenaStreamState
:return: newly created stream.
:rtype: xenamanager.xena_stream.XenaStream | entailment |
def read_port_stats(self):
"""
:return: dictionary {group name {stat name: value}}.
Sea XenaPort.stats_captions.
"""
stats_with_captions = OrderedDict()
for stat_name in self.stats_captions.keys():
stats_with_captions[stat_name] = self.read_stat(self.stats_captions[stat_name], stat_name)
return stats_with_captions | :return: dictionary {group name {stat name: value}}.
Sea XenaPort.stats_captions. | entailment |
def read_stream_stats(self):
"""
:return: dictionary {stream index {stat name: value}}.
Sea XenaStream.stats_captions.
"""
stream_stats = OrderedDict()
for stream in self.streams.values():
stream_stats[stream] = stream.read_stats()
return stream_stats | :return: dictionary {stream index {stat name: value}}.
Sea XenaStream.stats_captions. | entailment |
def read_tpld_stats(self):
"""
:return: dictionary {tpld index {group name {stat name: value}}}.
Sea XenaTpld.stats_captions.
"""
payloads_stats = OrderedDict()
for tpld in self.tplds.values():
payloads_stats[tpld] = tpld.read_stats()
return payloads_stats | :return: dictionary {tpld index {group name {stat name: value}}}.
Sea XenaTpld.stats_captions. | entailment |
def streams(self):
"""
:return: dictionary {id: object} of all streams.
:rtype: dict of (int, xenamanager.xena_stream.XenaStream)
"""
if not self.get_objects_by_type('stream'):
tpld_ids = []
for index in self.get_attribute('ps_indices').split():
stream = XenaStream(parent=self, index='{}/{}'.format(self.index, index))
tpld_ids.append(stream.get_attribute('ps_tpldid'))
if tpld_ids:
XenaStream.next_tpld_id = max([XenaStream.next_tpld_id] + [int(t) for t in tpld_ids]) + 1
return {s.id: s for s in self.get_objects_by_type('stream')} | :return: dictionary {id: object} of all streams.
:rtype: dict of (int, xenamanager.xena_stream.XenaStream) | entailment |
def tplds(self):
"""
:return: dictionary {id: object} of all current tplds.
:rtype: dict of (int, xenamanager.xena_port.XenaTpld)
"""
# As TPLDs are dynamic we must re-read them each time from the port.
self.parent.del_objects_by_type('tpld')
for tpld in self.get_attribute('pr_tplds').split():
XenaTpld(parent=self, index='{}/{}'.format(self.index, tpld))
return {t.id: t for t in self.get_objects_by_type('tpld')} | :return: dictionary {id: object} of all current tplds.
:rtype: dict of (int, xenamanager.xena_port.XenaTpld) | entailment |
def read_stats(self):
"""
:return: dictionary {group name {stat name: value}}.
Sea XenaTpld.stats_captions.
"""
stats_with_captions = OrderedDict()
for stat_name in self.stats_captions.keys():
stats_with_captions[stat_name] = self.read_stat(self.stats_captions[stat_name], stat_name)
return stats_with_captions | :return: dictionary {group name {stat name: value}}.
Sea XenaTpld.stats_captions. | entailment |
def get_packets(self, from_index=0, to_index=None, cap_type=XenaCaptureBufferType.text,
file_name=None, tshark=None):
""" Get captured packets from chassis.
:param from_index: index of first packet to read.
:param to_index: index of last packet to read. If None - read all packets.
:param cap_type: returned capture format. If pcap then file name and tshark must be provided.
:param file_name: if specified, capture will be saved in file.
:param tshark: tshark object for pcap type only.
:type: xenamanager.xena_tshark.Tshark
:return: list of requested packets, None for pcap type.
"""
to_index = to_index if to_index else len(self.packets)
raw_packets = []
for index in range(from_index, to_index):
raw_packets.append(self.packets[index].get_attribute('pc_packet').split('0x')[1])
if cap_type == XenaCaptureBufferType.raw:
self._save_captue(file_name, raw_packets)
return raw_packets
text_packets = []
for raw_packet in raw_packets:
text_packet = ''
for c, b in zip(range(len(raw_packet)), raw_packet):
if c % 32 == 0:
text_packet += '\n{:06x} '.format(int(c / 2))
elif c % 2 == 0:
text_packet += ' '
text_packet += b
text_packets.append(text_packet)
if cap_type == XenaCaptureBufferType.text:
self._save_captue(file_name, text_packets)
return text_packets
temp_file_name = file_name + '_'
self._save_captue(temp_file_name, text_packets)
tshark.text_to_pcap(temp_file_name, file_name)
os.remove(temp_file_name) | Get captured packets from chassis.
:param from_index: index of first packet to read.
:param to_index: index of last packet to read. If None - read all packets.
:param cap_type: returned capture format. If pcap then file name and tshark must be provided.
:param file_name: if specified, capture will be saved in file.
:param tshark: tshark object for pcap type only.
:type: xenamanager.xena_tshark.Tshark
:return: list of requested packets, None for pcap type. | entailment |
def packets(self):
"""
:return: dictionary {id: object} of all packets.
:rtype: dict of (int, xenamanager.xena_port.XenaCapturePacket)
"""
if not self.get_object_by_type('cappacket'):
for index in range(0, self.read_stats()['packets']):
XenaCapturePacket(parent=self, index='{}/{}'.format(self.index, index))
return {p.id: p for p in self.get_objects_by_type('cappacket')} | :return: dictionary {id: object} of all packets.
:rtype: dict of (int, xenamanager.xena_port.XenaCapturePacket) | entailment |
def add(self, rule: ControlRule = None, *, supply: float):
"""
Register a new rule above a given ``supply`` threshold
Registration supports a single-argument form for use as a decorator,
as well as a two-argument form for direct application.
Use the former for ``def`` or ``class`` definitions,
and the later for ``lambda`` functions and existing callables.
.. code:: python
@control.add(supply=10)
def linear(pool, interval):
if pool.utilisation < 0.75:
return pool.supply - interval
elif pool.allocation > 0.95:
return pool.supply + interval
control.add(
lambda pool, interval: pool.supply * (1.2 if pool.allocation > 0.75 else 0.9),
supply=100
)
"""
if supply in self._thresholds:
raise ValueError('rule for threshold %s re-defined' % supply)
if rule is not None:
self.rules.append((supply, rule))
self._thresholds.add(supply)
return rule
else:
return partial(self.add, supply=supply) | Register a new rule above a given ``supply`` threshold
Registration supports a single-argument form for use as a decorator,
as well as a two-argument form for direct application.
Use the former for ``def`` or ``class`` definitions,
and the later for ``lambda`` functions and existing callables.
.. code:: python
@control.add(supply=10)
def linear(pool, interval):
if pool.utilisation < 0.75:
return pool.supply - interval
elif pool.allocation > 0.95:
return pool.supply + interval
control.add(
lambda pool, interval: pool.supply * (1.2 if pool.allocation > 0.75 else 0.9),
supply=100
) | entailment |
def s(self, *args, **kwargs) -> Partial[Stepwise]:
"""
Create an unbound prototype of this class, partially applying arguments
.. code:: python
@stepwise
def control(pool: Pool, interval):
return 10
pipeline = control.s(interval=20) >> pool
:note: The partial rules are sealed, and :py:meth:`~.UnboundStepwise.add`
cannot be called on it.
"""
return Partial(Stepwise, self.base, *self.rules, *args, **kwargs) | Create an unbound prototype of this class, partially applying arguments
.. code:: python
@stepwise
def control(pool: Pool, interval):
return 10
pipeline = control.s(interval=20) >> pool
:note: The partial rules are sealed, and :py:meth:`~.UnboundStepwise.add`
cannot be called on it. | entailment |
def revisionId(self):
"""
revisionId differs from id, it is details of implementation use self.id
:return: RevisionId
"""
log.warning("'RevisionId' requested, ensure that you are don't need 'id'")
revision_id = self.json()['revisionId']
assert revision_id == self.id, "RevisionId differs id-{}!=revisionId-{}".format(self.id, revision_id)
return revision_id | revisionId differs from id, it is details of implementation use self.id
:return: RevisionId | entailment |
def changeset(python_data: LdapObject, d: dict) -> Changeset:
""" Generate changes object for ldap object. """
table: LdapObjectClass = type(python_data)
fields = table.get_fields()
changes = Changeset(fields, src=python_data, d=d)
return changes | Generate changes object for ldap object. | entailment |
def _db_to_python(db_data: dict, table: LdapObjectClass, dn: str) -> LdapObject:
""" Convert a DbDate object to a LdapObject. """
fields = table.get_fields()
python_data = table({
name: field.to_python(db_data[name])
for name, field in fields.items()
if field.db_field
})
python_data = python_data.merge({
'dn': dn,
})
return python_data | Convert a DbDate object to a LdapObject. | entailment |
def _python_to_mod_new(changes: Changeset) -> Dict[str, List[List[bytes]]]:
""" Convert a LdapChanges object to a modlist for add operation. """
table: LdapObjectClass = type(changes.src)
fields = table.get_fields()
result: Dict[str, List[List[bytes]]] = {}
for name, field in fields.items():
if field.db_field:
try:
value = field.to_db(changes.get_value_as_list(name))
if len(value) > 0:
result[name] = value
except ValidationError as e:
raise ValidationError(f"{name}: {e}.")
return result | Convert a LdapChanges object to a modlist for add operation. | entailment |
def _python_to_mod_modify(changes: Changeset) -> Dict[str, List[Tuple[Operation, List[bytes]]]]:
""" Convert a LdapChanges object to a modlist for a modify operation. """
table: LdapObjectClass = type(changes.src)
changes = changes.changes
result: Dict[str, List[Tuple[Operation, List[bytes]]]] = {}
for key, l in changes.items():
field = _get_field_by_name(table, key)
if field.db_field:
try:
new_list = [
(operation, field.to_db(value))
for operation, value in l
]
result[key] = new_list
except ValidationError as e:
raise ValidationError(f"{key}: {e}.")
return result | Convert a LdapChanges object to a modlist for a modify operation. | entailment |
def search(table: LdapObjectClass, query: Optional[Q] = None,
database: Optional[Database] = None, base_dn: Optional[str] = None) -> Iterator[LdapObject]:
""" Search for a object of given type in the database. """
fields = table.get_fields()
db_fields = {
name: field
for name, field in fields.items()
if field.db_field
}
database = get_database(database)
connection = database.connection
search_options = table.get_search_options(database)
iterator = tldap.query.search(
connection=connection,
query=query,
fields=db_fields,
base_dn=base_dn or search_options.base_dn,
object_classes=search_options.object_class,
pk=search_options.pk_field,
)
for dn, data in iterator:
python_data = _db_to_python(data, table, dn)
python_data = table.on_load(python_data, database)
yield python_data | Search for a object of given type in the database. | entailment |
def get_one(table: LdapObjectClass, query: Optional[Q] = None,
database: Optional[Database] = None, base_dn: Optional[str] = None) -> LdapObject:
""" Get exactly one result from the database or fail. """
results = search(table, query, database, base_dn)
try:
result = next(results)
except StopIteration:
raise ObjectDoesNotExist(f"Cannot find result for {query}.")
try:
next(results)
raise MultipleObjectsReturned(f"Found multiple results for {query}.")
except StopIteration:
pass
return result | Get exactly one result from the database or fail. | entailment |
def preload(python_data: LdapObject, database: Optional[Database] = None) -> LdapObject:
""" Preload all NotLoaded fields in LdapObject. """
changes = {}
# Load objects within lists.
def preload_item(value: Any) -> Any:
if isinstance(value, NotLoaded):
return value.load(database)
else:
return value
for name in python_data.keys():
value_list = python_data.get_as_list(name)
# Check for errors.
if isinstance(value_list, NotLoadedObject):
raise RuntimeError(f"{name}: Unexpected NotLoadedObject outside list.")
elif isinstance(value_list, NotLoadedList):
value_list = value_list.load(database)
else:
if any(isinstance(v, NotLoadedList) for v in value_list):
raise RuntimeError(f"{name}: Unexpected NotLoadedList in list.")
elif any(isinstance(v, NotLoadedObject) for v in value_list):
value_list = [preload_item(value) for value in value_list]
else:
value_list = None
if value_list is not None:
changes[name] = value_list
return python_data.merge(changes) | Preload all NotLoaded fields in LdapObject. | entailment |
def insert(python_data: LdapObject, database: Optional[Database] = None) -> LdapObject:
""" Insert a new python_data object in the database. """
assert isinstance(python_data, LdapObject)
table: LdapObjectClass = type(python_data)
# ADD NEW ENTRY
empty_data = table()
changes = changeset(empty_data, python_data.to_dict())
return save(changes, database) | Insert a new python_data object in the database. | entailment |
def save(changes: Changeset, database: Optional[Database] = None) -> LdapObject:
""" Save all changes in a LdapChanges. """
assert isinstance(changes, Changeset)
if not changes.is_valid:
raise RuntimeError(f"Changeset has errors {changes.errors}.")
database = get_database(database)
connection = database.connection
table = type(changes._src)
# Run hooks on changes
changes = table.on_save(changes, database)
# src dn | changes dn | result | action
# ---------------------------------------|--------
# None | None | error | error
# None | provided | use changes dn | create
# provided | None | use src dn | modify
# provided | provided | error | error
src_dn = changes.src.get_as_single('dn')
if src_dn is None and 'dn' not in changes:
raise RuntimeError("No DN was given")
elif src_dn is None and 'dn' in changes:
dn = changes.get_value_as_single('dn')
assert dn is not None
create = True
elif src_dn is not None and 'dn' not in changes:
dn = src_dn
assert dn is not None
create = False
else:
raise RuntimeError("Changes to DN are not supported.")
assert dn is not None
if create:
# Add new entry
mod_list = _python_to_mod_new(changes)
try:
connection.add(dn, mod_list)
except ldap3.core.exceptions.LDAPEntryAlreadyExistsResult:
raise ObjectAlreadyExists(
"Object with dn %r already exists doing add" % dn)
else:
mod_list = _python_to_mod_modify(changes)
if len(mod_list) > 0:
try:
connection.modify(dn, mod_list)
except ldap3.core.exceptions.LDAPNoSuchObjectResult:
raise ObjectDoesNotExist(
"Object with dn %r doesn't already exist doing modify" % dn)
# get new values
python_data = table(changes.src.to_dict())
python_data = python_data.merge(changes.to_dict())
python_data = python_data.on_load(python_data, database)
return python_data | Save all changes in a LdapChanges. | entailment |
def delete(python_data: LdapObject, database: Optional[Database] = None) -> None:
""" Delete a LdapObject from the database. """
dn = python_data.get_as_single('dn')
assert dn is not None
database = get_database(database)
connection = database.connection
connection.delete(dn) | Delete a LdapObject from the database. | entailment |
def _get_field_by_name(table: LdapObjectClass, name: str) -> tldap.fields.Field:
""" Lookup a field by its name. """
fields = table.get_fields()
return fields[name] | Lookup a field by its name. | entailment |
def rename(python_data: LdapObject, new_base_dn: str = None,
database: Optional[Database] = None, **kwargs) -> LdapObject:
""" Move/rename a LdapObject in the database. """
table = type(python_data)
dn = python_data.get_as_single('dn')
assert dn is not None
database = get_database(database)
connection = database.connection
# extract key and value from kwargs
if len(kwargs) == 1:
name, value = list(kwargs.items())[0]
# work out the new rdn of the object
split_new_rdn = [[(name, value, 1)]]
field = _get_field_by_name(table, name)
assert field.db_field
python_data = python_data.merge({
name: value,
})
elif len(kwargs) == 0:
split_new_rdn = [str2dn(dn)[0]]
else:
assert False
new_rdn = dn2str(split_new_rdn)
connection.rename(
dn,
new_rdn,
new_base_dn,
)
if new_base_dn is not None:
split_base_dn = str2dn(new_base_dn)
else:
split_base_dn = str2dn(dn)[1:]
tmp_list = [split_new_rdn[0]]
tmp_list.extend(split_base_dn)
new_dn = dn2str(tmp_list)
python_data = python_data.merge({
'dn': new_dn,
})
return python_data | Move/rename a LdapObject in the database. | entailment |
def route(route_str): # decorator param
"""
Provides play2 likes routes, with python formatter
All string fileds should be named parameters
:param route_str: a route "GET /parent/{parentID}/child/{childId}{ctype}"
:return: the response of requests.request
"""
def ilog(elapsed):
# statistic
last_stat = _routes_stat.get(route_str, {"count": 0, "min": sys.maxint, "max": 0, "avg": 0})
last_count = last_stat["count"]
_routes_stat[route_str] = {
"count": last_count + 1,
"min": min(elapsed, last_stat["min"]),
"max": max(elapsed, last_stat["max"]),
"avg": (last_count * last_stat["avg"] + elapsed) / (last_count + 1)
}
# log.debug('Route Time: {0} took {1} ms'.format(route_str, elapsed))
def wrapper(f): # decorated function
@wraps(f)
def wrapped_func(*args, **kwargs): # params of function
self = args[0]
method, url = route_str.split(" ")
def defaults_dict():
f_args, varargs, keywords, defaults = inspect.getargspec(f)
defaults = defaults or []
return dict(zip(f_args[-len(defaults):], defaults))
defs = defaults_dict()
route_args = dict(defs.items() + kwargs.items())
def get_destination_url():
try:
return url.format(**route_args)
except KeyError as e:
# KeyError in format have a message with key
raise AttributeError("Define {0} as named argument for route.".format(e))
destination_url = self.base_url + get_destination_url()
f(*args, **kwargs) # generally this is "pass"
bypass_args = dict([
(param, route_args[param]) for param in
["data", "json", "cookies", "auth", "files", "content_type", "params"] if
param in route_args
])
# add json content type for:
# - unless files are sent
# - private that ends with .json
# - all public api with POST/PUT method, meaning have basic auth
# - json parameter is present
if "files" not in bypass_args and (destination_url.endswith('.json') or "json" in route_args or (
"auth" in bypass_args and method in ["POST", "PUT"])):
bypass_args['headers'] = {'Content-Type': 'application/json'}
if "content_type" in bypass_args and bypass_args['content_type'] == "yaml":
del bypass_args["content_type"]
bypass_args['headers'] = {'Content-Type': 'application/x-yaml'}
start = time.time()
try:
response = self._session.request(method, destination_url, verify=self.verify_ssl, **bypass_args)
except requests.ConnectionError:
log.info('ConnectionError caught. Trying again: \n %s:%s ' % (method, destination_url))
import traceback
def log_exception(exc_class, exc, tb):
log.info('Got exception: %s' % exc)
log.info('Class: %s' % exc_class)
log.info('Trace: %s' % traceback.format_tb(tb))
log.error('Got exception while executing: %s' % exc)
log_exception(*sys.exc_info())
time.sleep(2)
response = self._session.request(method, destination_url, verify=self.verify_ssl, **bypass_args)
end = time.time()
elapsed = int((end - start) * 1000.0)
ilog(elapsed)
if self.verify_codes:
if response.status_code is not 200:
msg = "Route {0} {1} returned code={2} and error: {3}".format(method,
get_destination_url(),
response.status_code,
response.text)
if response.status_code in api_http_code_errors.keys():
raise api_http_code_errors[response.status_code](msg)
else:
log.debug(response.text)
log.debug(response.request.body)
raise ApiError(msg)
return response
return wrapped_func
return wrapper | Provides play2 likes routes, with python formatter
All string fileds should be named parameters
:param route_str: a route "GET /parent/{parentID}/child/{childId}{ctype}"
:return: the response of requests.request | entailment |
def play_auth(f):
"""
Injects cookies, into requests call over route
:return: route
"""
def wrapper(*args, **kwargs):
self = args[0]
if 'cookies' in kwargs:
raise AttributeError("don't set cookies explicitly")
if 'auth' in kwargs:
raise AttributeError("don't set auth token explicitly")
assert self.is_connected, "not connected, call router.connect(email, password) first"
if self._jwt_auth:
kwargs['auth'] = self._jwt_auth
kwargs['cookies'] = None
elif self._cookies:
kwargs['cookies'] = self._cookies
kwargs['auth'] = None
else:
assert False, "no cookies, no JWT, but connected o_O"
return f(*args, **kwargs)
return wrapper | Injects cookies, into requests call over route
:return: route | entailment |
def basic_auth(f):
"""
Injects auth, into requests call over route
:return: route
"""
def wrapper(*args, **kwargs):
self = args[0]
if 'auth' in kwargs:
raise AttributeError("don't set auth token explicitly")
assert self.is_connected, "not connected, call router.connect(email, password) first"
if self._jwt_auth:
kwargs['auth'] = self._jwt_auth
elif self._auth:
kwargs['auth'] = self._auth
else:
assert False, "no basic token, no JWT, but connected o_O"
return f(*args, **kwargs)
return wrapper | Injects auth, into requests call over route
:return: route | entailment |
def _list_dict(l: Iterator[str], case_insensitive: bool = False):
"""
return a dictionary with all items of l being the keys of the dictionary
If argument case_insensitive is non-zero ldap.cidict.cidict will be
used for case-insensitive string keys
"""
if case_insensitive:
raise NotImplementedError()
d = tldap.dict.CaseInsensitiveDict()
else:
d = {}
for i in l:
d[i] = None
return d | return a dictionary with all items of l being the keys of the dictionary
If argument case_insensitive is non-zero ldap.cidict.cidict will be
used for case-insensitive string keys | entailment |
def addModlist(entry: dict, ignore_attr_types: Optional[List[str]] = None) -> Dict[str, List[bytes]]:
"""Build modify list for call of method LDAPObject.add()"""
ignore_attr_types = _list_dict(map(str.lower, (ignore_attr_types or [])))
modlist: Dict[str, List[bytes]] = {}
for attrtype in entry.keys():
if attrtype.lower() in ignore_attr_types:
# This attribute type is ignored
continue
for value in entry[attrtype]:
assert value is not None
if len(entry[attrtype]) > 0:
modlist[attrtype] = escape_list(entry[attrtype])
return modlist | Build modify list for call of method LDAPObject.add() | entailment |
def modifyModlist(
old_entry: dict, new_entry: dict, ignore_attr_types: Optional[List[str]] = None,
ignore_oldexistent: bool = False) -> Dict[str, Tuple[str, List[bytes]]]:
"""
Build differential modify list for calling LDAPObject.modify()/modify_s()
:param old_entry:
Dictionary holding the old entry
:param new_entry:
Dictionary holding what the new entry should be
:param ignore_attr_types:
List of attribute type names to be ignored completely
:param ignore_oldexistent:
If true attribute type names which are in old_entry
but are not found in new_entry at all are not deleted.
This is handy for situations where your application
sets attribute value to '' for deleting an attribute.
In most cases leave zero.
:return: List of tuples suitable for
:py:meth:`ldap:ldap.LDAPObject.modify`.
This function is the same as :py:func:`ldap:ldap.modlist.modifyModlist`
except for the following changes:
* MOD_DELETE/MOD_DELETE used in preference to MOD_REPLACE when updating
an existing value.
"""
ignore_attr_types = _list_dict(map(str.lower, (ignore_attr_types or [])))
modlist: Dict[str, Tuple[str, List[bytes]]] = {}
attrtype_lower_map = {}
for a in old_entry.keys():
attrtype_lower_map[a.lower()] = a
for attrtype in new_entry.keys():
attrtype_lower = attrtype.lower()
if attrtype_lower in ignore_attr_types:
# This attribute type is ignored
continue
# Filter away null-strings
new_value = list(filter(lambda x: x is not None, new_entry[attrtype]))
if attrtype_lower in attrtype_lower_map:
old_value = old_entry.get(attrtype_lower_map[attrtype_lower], [])
old_value = list(filter(lambda x: x is not None, old_value))
del attrtype_lower_map[attrtype_lower]
else:
old_value = []
if not old_value and new_value:
# Add a new attribute to entry
modlist[attrtype] = (ldap3.MODIFY_ADD, escape_list(new_value))
elif old_value and new_value:
# Replace existing attribute
old_value_dict = _list_dict(old_value)
new_value_dict = _list_dict(new_value)
delete_values = []
for v in old_value:
if v not in new_value_dict:
delete_values.append(v)
add_values = []
for v in new_value:
if v not in old_value_dict:
add_values.append(v)
if len(delete_values) > 0 or len(add_values) > 0:
modlist[attrtype] = (
ldap3.MODIFY_REPLACE, escape_list(new_value))
elif old_value and not new_value:
# Completely delete an existing attribute
modlist[attrtype] = (ldap3.MODIFY_DELETE, [])
if not ignore_oldexistent:
# Remove all attributes of old_entry which are not present
# in new_entry at all
for a in attrtype_lower_map.keys():
if a in ignore_attr_types:
# This attribute type is ignored
continue
attrtype = attrtype_lower_map[a]
modlist[attrtype] = (ldap3.MODIFY_DELETE, [])
return modlist | Build differential modify list for calling LDAPObject.modify()/modify_s()
:param old_entry:
Dictionary holding the old entry
:param new_entry:
Dictionary holding what the new entry should be
:param ignore_attr_types:
List of attribute type names to be ignored completely
:param ignore_oldexistent:
If true attribute type names which are in old_entry
but are not found in new_entry at all are not deleted.
This is handy for situations where your application
sets attribute value to '' for deleting an attribute.
In most cases leave zero.
:return: List of tuples suitable for
:py:meth:`ldap:ldap.LDAPObject.modify`.
This function is the same as :py:func:`ldap:ldap.modlist.modifyModlist`
except for the following changes:
* MOD_DELETE/MOD_DELETE used in preference to MOD_REPLACE when updating
an existing value. | entailment |
def connect(tenant=None, user=None, password=None, token=None, is_public=False):
"""
Authenticates user and returns new platform to user.
This is an entry point to start working with Qubell Api.
:rtype: QubellPlatform
:param str tenant: url to tenant, default taken from 'QUBELL_TENANT'
:param str user: user email, default taken from 'QUBELL_USER'
:param str password: user password, default taken from 'QUBELL_PASSWORD'
:param str token: session token, default taken from 'QUBELL_TOKEN'
:param bool is_public: either to use public or private api (public is not fully supported use with caution)
:return: New Platform instance
"""
if not is_public:
router = PrivatePath(tenant)
else:
router = PublicPath(tenant)
router.public_api_in_use = is_public
if token or (user and password):
router.connect(user, password, token)
return QubellPlatform().init_router(router) | Authenticates user and returns new platform to user.
This is an entry point to start working with Qubell Api.
:rtype: QubellPlatform
:param str tenant: url to tenant, default taken from 'QUBELL_TENANT'
:param str user: user email, default taken from 'QUBELL_USER'
:param str password: user password, default taken from 'QUBELL_PASSWORD'
:param str token: session token, default taken from 'QUBELL_TOKEN'
:param bool is_public: either to use public or private api (public is not fully supported use with caution)
:return: New Platform instance | entailment |
def connect_to_another_user(self, user, password, token=None, is_public=False):
"""
Authenticates user with the same tenant as current platform using and returns new platform to user.
:rtype: QubellPlatform
:param str user: user email
:param str password: user password
:param str token: session token
:param bool is_public: either to use public or private api (public is not fully supported use with caution)
:return: New Platform instance
"""
return QubellPlatform.connect(self._router.base_url, user, password, token, is_public) | Authenticates user with the same tenant as current platform using and returns new platform to user.
:rtype: QubellPlatform
:param str user: user email
:param str password: user password
:param str token: session token
:param bool is_public: either to use public or private api (public is not fully supported use with caution)
:return: New Platform instance | entailment |
def create_organization(self, name):
"""
Creates new organization
:rtype: Organization
"""
org = Organization.new(name, self._router)
assert org.ready(), "Organization {} hasn't got ready after creation".format(name)
return org | Creates new organization
:rtype: Organization | entailment |
def get_organization(self, id=None, name=None):
"""
Gets existing and accessible organization
:rtype: Organization
"""
log.info("Picking organization: %s (%s)" % (name, id))
return self.organizations[id or name] | Gets existing and accessible organization
:rtype: Organization | entailment |
def get_or_create_organization(self, id=None, name=None):
"""
Gets existing or creates new organization
:rtype: Organization
"""
if id:
return self.get_organization(id)
else:
assert name
try:
return self.get_organization(name=name)
except exceptions.NotFoundError:
return self.create_organization(name) | Gets existing or creates new organization
:rtype: Organization | entailment |
def get_backends_versions(self):
"""
Get backends versions
:return: dict containing name of backend and version.
"""
# We are not always have permission, so find open.
for i in range(0, len(self.organizations)):
try:
backends = self.organizations[i].environments['default'].backends
except ApiAuthenticationError:
pass
else:
break
versions = dict([(x['name'], x['version']) for x in backends])
return versions | Get backends versions
:return: dict containing name of backend and version. | entailment |
def make_driver(loop=None):
''' Returns a stop driver.
The optional loop argument can be provided to use the driver in another
loop than the default one.
Parameters
-----------
loop: BaseEventLoop
The event loop to use instead of the default one.
'''
loop = loop or asyncio.get_event_loop()
def stop(i = None):
loop.stop()
def driver(sink):
''' The stop driver stops the asyncio event loop.
The event loop is stopped as soon as an event is received on the
control observable or when it completes (both in case of success or
error).
Parameters
----------
sink: Sink
'''
sink.control.subscribe(
on_next=stop,
on_error=stop,
on_completed=stop)
return None
return Component(call=driver, input=Sink) | Returns a stop driver.
The optional loop argument can be provided to use the driver in another
loop than the default one.
Parameters
-----------
loop: BaseEventLoop
The event loop to use instead of the default one. | entailment |
def rdn_to_dn(changes: Changeset, name: str, base_dn: str) -> Changeset:
""" Convert the rdn to a fully qualified DN for the specified LDAP
connection.
:param changes: The changes object to lookup.
:param name: rdn to convert.
:param base_dn: The base_dn to lookup.
:return: fully qualified DN.
"""
dn = changes.get_value_as_single('dn')
if dn is not None:
return changes
value = changes.get_value_as_single(name)
if value is None:
raise tldap.exceptions.ValidationError(
"Cannot use %s in dn as it is None" % name)
if isinstance(value, list):
raise tldap.exceptions.ValidationError(
"Cannot use %s in dn as it is a list" % name)
assert base_dn is not None
split_base = str2dn(base_dn)
split_new_dn = [[(name, value, 1)]] + split_base
new_dn = dn2str(split_new_dn)
return changes.set('dn', new_dn) | Convert the rdn to a fully qualified DN for the specified LDAP
connection.
:param changes: The changes object to lookup.
:param name: rdn to convert.
:param base_dn: The base_dn to lookup.
:return: fully qualified DN. | entailment |
def _stdin_(p):
"""Takes input from user. Works for Python 2 and 3."""
_v = sys.version[0]
return input(p) if _v is '3' else raw_input(p) | Takes input from user. Works for Python 2 and 3. | entailment |
def survey_loader(sur_dir=SUR_DIR, sur_file=SUR_FILE):
"""Loads up the given survey in the given dir."""
survey_path = os.path.join(sur_dir, sur_file)
survey = None
with open(survey_path) as survey_file:
survey = Survey(survey_file.read())
return survey | Loads up the given survey in the given dir. | entailment |
def format_choices(self):
"""Return the choices in string form."""
ce = enumerate(self.choices)
f = lambda i, c: '%s (%d)' % (c, i+1)
# apply formatter and append help token
toks = [f(i,c) for i, c in ce] + ['Help (?)']
return ' '.join(toks) | Return the choices in string form. | entailment |
def is_answer_valid(self, ans):
"""Validate user's answer against available choices."""
return ans in [str(i+1) for i in range(len(self.choices))] | Validate user's answer against available choices. | entailment |
def run_question(self, question, input_func=_stdin_):
"""Run the given question."""
qi = '[%d/%d] ' % (self.qcount, self.qtotal)
print('%s %s:' % (qi, question['label']))
while True:
# ask for user input until we get a valid one
ans = input_func('%s > ' % self.format_choices())
if self.is_answer_valid(ans):
question['answer'] = int(ans)
break
else:
if ans is '?': print(question['description'])
else: print('Invalid answer.')
self.qcount += 1 | Run the given question. | entailment |
def run_section(self, name, input_func=_stdin_):
"""Run the given section."""
print('\nStuff %s by the license:\n' % name)
section = self.survey[name]
for question in section:
self.run_question(question, input_func) | Run the given section. | entailment |
def run(self, input_func=_stdin_):
"""Run the sections."""
# reset question count
self.qcount = 1
for section_name in self.survey:
self.run_section(section_name, input_func) | Run the sections. | entailment |
def get_vector(self):
"""Return the vector for this survey."""
vec = {}
for dim in ['forbidden', 'required', 'permitted']:
if self.survey[dim] is None:
continue
dim_vec = map(lambda x: (x['tag'], x['answer']),
self.survey[dim])
vec[dim] = dict(dim_vec)
return vec | Return the vector for this survey. | entailment |
def update(self, span: typing.Tuple[int, int], line_type: LineType) -> None:
"""
Updates line types for a block's span.
Args:
span: First and last relative line number of a Block.
line_type: The type of line to update to.
Raises:
ValidationError: A special error on collision. This prevents Flake8
from crashing because it is converted to a Flake8 error tuple,
but it indicates to the user that something went wrong with
processing the function.
"""
first_block_line, last_block_line = span
for i in range(first_block_line, last_block_line + 1):
try:
self.__setitem__(i, line_type)
except ValueError as error:
raise ValidationError(i + self.fn_offset, 1, 'AAA99 {}'.format(error)) | Updates line types for a block's span.
Args:
span: First and last relative line number of a Block.
line_type: The type of line to update to.
Raises:
ValidationError: A special error on collision. This prevents Flake8
from crashing because it is converted to a Flake8 error tuple,
but it indicates to the user that something went wrong with
processing the function. | entailment |
def check_arrange_act_spacing(self) -> typing.Generator[AAAError, None, None]:
"""
* When no spaces found, point error at line above act block
* When too many spaces found, point error at 2nd blank line
"""
yield from self.check_block_spacing(
LineType.arrange,
LineType.act,
'AAA03 expected 1 blank line before Act block, found {}',
) | * When no spaces found, point error at line above act block
* When too many spaces found, point error at 2nd blank line | entailment |
def check_act_assert_spacing(self) -> typing.Generator[AAAError, None, None]:
"""
* When no spaces found, point error at line above assert block
* When too many spaces found, point error at 2nd blank line
"""
yield from self.check_block_spacing(
LineType.act,
LineType._assert,
'AAA04 expected 1 blank line before Assert block, found {}',
) | * When no spaces found, point error at line above assert block
* When too many spaces found, point error at 2nd blank line | entailment |
def check_block_spacing(
self,
first_block_type: LineType,
second_block_type: LineType,
error_message: str,
) -> typing.Generator[AAAError, None, None]:
"""
Checks there is a clear single line between ``first_block_type`` and
``second_block_type``.
Note:
Is tested via ``check_arrange_act_spacing()`` and
``check_act_assert_spacing()``.
"""
numbered_lines = list(enumerate(self))
first_block_lines = filter(lambda l: l[1] is first_block_type, numbered_lines)
try:
first_block_lineno = list(first_block_lines)[-1][0]
except IndexError:
# First block has no lines
return
second_block_lines = filter(lambda l: l[1] is second_block_type, numbered_lines)
try:
second_block_lineno = next(second_block_lines)[0]
except StopIteration:
# Second block has no lines
return
blank_lines = [
bl for bl in numbered_lines[first_block_lineno + 1:second_block_lineno] if bl[1] is LineType.blank_line
]
if not blank_lines:
# Point at line above second block
yield AAAError(
line_number=self.fn_offset + second_block_lineno - 1,
offset=0,
text=error_message.format('none'),
)
return
if len(blank_lines) > 1:
# Too many blank lines - point at the first extra one, the 2nd
yield AAAError(
line_number=self.fn_offset + blank_lines[1][0],
offset=0,
text=error_message.format(len(blank_lines)),
) | Checks there is a clear single line between ``first_block_type`` and
``second_block_type``.
Note:
Is tested via ``check_arrange_act_spacing()`` and
``check_act_assert_spacing()``. | entailment |
def vector_distance(v1, v2):
"""Given 2 vectors of multiple dimensions, calculate the euclidean
distance measure between them."""
dist = 0
for dim in v1:
for x in v1[dim]:
dd = int(v1[dim][x]) - int(v2[dim][x])
dist = dist + dd**2
return dist | Given 2 vectors of multiple dimensions, calculate the euclidean
distance measure between them. | entailment |
def send_command(self, command, *arguments):
""" Send command with no output.
:param command: command to send.
:param arguments: list of command arguments.
"""
self.api.send_command(self, command, *arguments) | Send command with no output.
:param command: command to send.
:param arguments: list of command arguments. | entailment |
def send_command_return(self, command, *arguments):
""" Send command and wait for single line output. """
return self.api.send_command_return(self, command, *arguments) | Send command and wait for single line output. | entailment |
def send_command_return_multilines(self, command, *arguments):
""" Send command and wait for multiple lines output. """
return self.api.send_command_return_multilines(self, command, *arguments) | Send command and wait for multiple lines output. | entailment |
def load(self, limit=9999):
""" Function list
Get the list of all interfaces
@param key: The targeted object
@param limit: The limit of items to return
@return RETURN: A ForemanItem list
"""
subItemList = self.api.list('{}/{}/{}'.format(self.parentObjName,
self.parentKey,
self.objName,
),
limit=limit)
if self.objName == 'puppetclass_ids':
subItemList = list(map(lambda x: {'id': x}, subItemList))
if self.objName == 'puppetclasses':
sil_tmp = subItemList.values()
subItemList = []
for i in sil_tmp:
subItemList.extend(i)
return {x[self.index]: self.objType(self.api, x['id'],
self.parentObjName,
self.parentPayloadObj,
self.parentKey,
x)
for x in subItemList} | Function list
Get the list of all interfaces
@param key: The targeted object
@param limit: The limit of items to return
@return RETURN: A ForemanItem list | entailment |
def append(self, payload):
""" Function __iadd__
@param payload: The payload corresponding to the object to add
@return RETURN: A ForemanItem
"""
if self.objType.setInParentPayload:
print('Error, {} is not elibible to addition, but only set'
.format(self.objName))
return False
ret = self.api.create("{}/{}/{}".format(self.parentObjName,
self.parentKey,
self.objNameSet),
self.getPayloadStruct(payload))
return ret | Function __iadd__
@param payload: The payload corresponding to the object to add
@return RETURN: A ForemanItem | entailment |
def getPayloadStruct(self, payload):
""" Function getPayloadStruct
@param payload: The payload structure to the object to add
@return RETURN: A dict
"""
newSubItem = self.objType(self.api, 0, self.parentObjName,
self.parentPayloadObj, self.parentKey, {})
return newSubItem.getPayloadStruct(payload, self.parentPayloadObj) | Function getPayloadStruct
@param payload: The payload structure to the object to add
@return RETURN: A dict | entailment |
def get_repr(expr, multiline=False):
"""
Build a repr string for ``expr`` from its vars and signature.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> print(uqbar.objects.get_repr(my_object))
MyObject(
'a',
'b',
'c',
'd',
foo='x',
quux=['y', 'z'],
)
"""
signature = _get_object_signature(expr)
if signature is None:
return "{}()".format(type(expr).__name__)
defaults = {}
for name, parameter in signature.parameters.items():
if parameter.default is not inspect._empty:
defaults[name] = parameter.default
args, var_args, kwargs = get_vars(expr)
args_parts = collections.OrderedDict()
var_args_parts = []
kwargs_parts = {}
has_lines = multiline
parts = []
# Format keyword-optional arguments.
# print(type(expr), args)
for i, (key, value) in enumerate(args.items()):
arg_repr = _dispatch_formatting(value)
if "\n" in arg_repr:
has_lines = True
args_parts[key] = arg_repr
# Format *args
for arg in var_args:
arg_repr = _dispatch_formatting(arg)
if "\n" in arg_repr:
has_lines = True
var_args_parts.append(arg_repr)
# Format **kwargs
for key, value in sorted(kwargs.items()):
if key in defaults and value == defaults[key]:
continue
value = _dispatch_formatting(value)
arg_repr = "{}={}".format(key, value)
has_lines = True
kwargs_parts[key] = arg_repr
for _, part in args_parts.items():
parts.append(part)
parts.extend(var_args_parts)
for _, part in sorted(kwargs_parts.items()):
parts.append(part)
# If we should format on multiple lines, add the appropriate formatting.
if has_lines and parts:
for i, part in enumerate(parts):
parts[i] = "\n".join(" " + line for line in part.split("\n"))
parts.append(" )")
parts = ",\n".join(parts)
return "{}(\n{}".format(type(expr).__name__, parts)
parts = ", ".join(parts)
return "{}({})".format(type(expr).__name__, parts) | Build a repr string for ``expr`` from its vars and signature.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> print(uqbar.objects.get_repr(my_object))
MyObject(
'a',
'b',
'c',
'd',
foo='x',
quux=['y', 'z'],
) | entailment |
def get_vars(expr):
"""
Get ``args``, ``var args`` and ``kwargs`` for an object ``expr``.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> args, var_args, kwargs = uqbar.objects.get_vars(my_object)
::
>>> args
OrderedDict([('arg1', 'a'), ('arg2', 'b')])
::
>>> var_args
['c', 'd']
::
>>> kwargs
{'foo': 'x', 'quux': ['y', 'z']}
"""
# print('TYPE?', type(expr))
signature = _get_object_signature(expr)
if signature is None:
return ({}, [], {})
# print('SIG?', signature)
args = collections.OrderedDict()
var_args = []
kwargs = {}
if expr is None:
return args, var_args, kwargs
for i, (name, parameter) in enumerate(signature.parameters.items()):
# print(' ', parameter)
if i == 0 and name in ("self", "cls", "class_", "klass"):
continue
if parameter.kind is inspect._POSITIONAL_ONLY:
try:
args[name] = getattr(expr, name)
except AttributeError:
args[name] = expr[name]
elif (
parameter.kind is inspect._POSITIONAL_OR_KEYWORD
or parameter.kind is inspect._KEYWORD_ONLY
):
found = False
for x in (name, "_" + name):
try:
value = getattr(expr, x)
found = True
break
except AttributeError:
try:
value = expr[x]
found = True
break
except (KeyError, TypeError):
pass
if not found:
raise ValueError("Cannot find value for {!r}".format(name))
if parameter.default is inspect._empty:
args[name] = value
elif parameter.default != value:
kwargs[name] = value
elif parameter.kind is inspect._VAR_POSITIONAL:
value = None
try:
value = expr[:]
except TypeError:
value = getattr(expr, name)
if value:
var_args.extend(value)
elif parameter.kind is inspect._VAR_KEYWORD:
items = {}
if hasattr(expr, "items"):
items = expr.items()
elif hasattr(expr, name):
mapping = getattr(expr, name)
if not isinstance(mapping, dict):
mapping = dict(mapping)
items = mapping.items()
elif hasattr(expr, "_" + name):
mapping = getattr(expr, "_" + name)
if not isinstance(mapping, dict):
mapping = dict(mapping)
items = mapping.items()
for key, value in items:
if key not in args:
kwargs[key] = value
return args, var_args, kwargs | Get ``args``, ``var args`` and ``kwargs`` for an object ``expr``.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> args, var_args, kwargs = uqbar.objects.get_vars(my_object)
::
>>> args
OrderedDict([('arg1', 'a'), ('arg2', 'b')])
::
>>> var_args
['c', 'd']
::
>>> kwargs
{'foo': 'x', 'quux': ['y', 'z']} | entailment |
def new(expr, *args, **kwargs):
"""
Template an object.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> new_object = uqbar.objects.new(my_object, foo=666, bar=1234)
>>> print(uqbar.objects.get_repr(new_object))
MyObject(
'a',
'b',
'c',
'd',
bar=1234,
foo=666,
quux=['y', 'z'],
)
Original object is unchanged:
::
>>> print(uqbar.objects.get_repr(my_object))
MyObject(
'a',
'b',
'c',
'd',
foo='x',
quux=['y', 'z'],
)
"""
# TODO: Clarify old vs. new variable naming here.
current_args, current_var_args, current_kwargs = get_vars(expr)
new_kwargs = current_kwargs.copy()
recursive_arguments = {}
for key in tuple(kwargs):
if "__" in key:
value = kwargs.pop(key)
key, _, subkey = key.partition("__")
recursive_arguments.setdefault(key, []).append((subkey, value))
for key, pairs in recursive_arguments.items():
recursed_object = current_args.get(key, current_kwargs.get(key))
if recursed_object is None:
continue
kwargs[key] = new(recursed_object, **dict(pairs))
if args:
current_var_args = args
for key, value in kwargs.items():
if key in current_args:
current_args[key] = value
else:
new_kwargs[key] = value
new_args = list(current_args.values()) + list(current_var_args)
return type(expr)(*new_args, **new_kwargs) | Template an object.
::
>>> class MyObject:
... def __init__(self, arg1, arg2, *var_args, foo=None, bar=None, **kwargs):
... self.arg1 = arg1
... self.arg2 = arg2
... self.var_args = var_args
... self.foo = foo
... self.bar = bar
... self.kwargs = kwargs
...
>>> my_object = MyObject('a', 'b', 'c', 'd', foo='x', quux=['y', 'z'])
::
>>> import uqbar
>>> new_object = uqbar.objects.new(my_object, foo=666, bar=1234)
>>> print(uqbar.objects.get_repr(new_object))
MyObject(
'a',
'b',
'c',
'd',
bar=1234,
foo=666,
quux=['y', 'z'],
)
Original object is unchanged:
::
>>> print(uqbar.objects.get_repr(my_object))
MyObject(
'a',
'b',
'c',
'd',
foo='x',
quux=['y', 'z'],
) | entailment |
def on_builder_inited(app):
"""
Hooks into Sphinx's ``builder-inited`` event.
Builds out the ReST API source.
"""
config = app.builder.config
target_directory = (
pathlib.Path(app.builder.env.srcdir) / config.uqbar_api_directory_name
)
initial_source_paths: List[str] = []
source_paths = config.uqbar_api_source_paths
for source_path in source_paths:
if isinstance(source_path, types.ModuleType):
if hasattr(source_path, "__path__"):
initial_source_paths.extend(getattr(source_path, "__path__"))
else:
initial_source_paths.extend(source_path.__file__)
continue
try:
module = importlib.import_module(source_path)
if hasattr(module, "__path__"):
initial_source_paths.extend(getattr(module, "__path__"))
else:
initial_source_paths.append(module.__file__)
except ImportError:
initial_source_paths.append(source_path)
root_documenter_class = config.uqbar_api_root_documenter_class
if isinstance(root_documenter_class, str):
module_name, _, class_name = root_documenter_class.rpartition(".")
module = importlib.import_module(module_name)
root_documenter_class = getattr(module, class_name)
module_documenter_class = config.uqbar_api_module_documenter_class
if isinstance(module_documenter_class, str):
module_name, _, class_name = module_documenter_class.rpartition(".")
module = importlib.import_module(module_name)
module_documenter_class = getattr(module, class_name)
# Don't modify the list in Sphinx's config. Sphinx won't pickle class
# references, and strips them from the saved config. That leads to Sphinx
# believing that the config has changed on every run.
member_documenter_classes = list(config.uqbar_api_member_documenter_classes or [])
for i, member_documenter_class in enumerate(member_documenter_classes):
if isinstance(member_documenter_class, str):
module_name, _, class_name = member_documenter_class.rpartition(".")
module = importlib.import_module(module_name)
member_documenter_classes[i] = getattr(module, class_name)
api_builder = uqbar.apis.APIBuilder(
initial_source_paths=initial_source_paths,
target_directory=target_directory,
document_empty_modules=config.uqbar_api_document_empty_modules,
document_private_members=config.uqbar_api_document_private_members,
document_private_modules=config.uqbar_api_document_private_modules,
member_documenter_classes=member_documenter_classes or None,
module_documenter_class=module_documenter_class,
root_documenter_class=root_documenter_class,
title=config.uqbar_api_title,
logger_func=logger_func,
)
api_builder() | Hooks into Sphinx's ``builder-inited`` event.
Builds out the ReST API source. | entailment |
def setup(app) -> Dict[str, Any]:
"""
Sets up Sphinx extension.
"""
app.add_config_value("uqbar_api_directory_name", "api", "env")
app.add_config_value("uqbar_api_document_empty_modules", False, "env")
app.add_config_value("uqbar_api_document_private_members", False, "env")
app.add_config_value("uqbar_api_document_private_modules", False, "env")
app.add_config_value("uqbar_api_member_documenter_classes", None, "env")
app.add_config_value("uqbar_api_module_documenter_class", None, "env")
app.add_config_value("uqbar_api_root_documenter_class", None, "env")
app.add_config_value("uqbar_api_source_paths", None, "env")
app.add_config_value("uqbar_api_title", "API", "html")
app.connect("builder-inited", on_builder_inited)
return {
"version": uqbar.__version__,
"parallel_read_safe": True,
"parallel_write_safe": True,
} | Sets up Sphinx extension. | entailment |
def get_glitter_app(self, glitter_app_name):
"""
Retrieve the Glitter App config for a specific Glitter App.
"""
if not self.discovered:
self.discover_glitter_apps()
try:
glitter_app = self.glitter_apps[glitter_app_name]
return glitter_app
except KeyError:
return None | Retrieve the Glitter App config for a specific Glitter App. | entailment |
def discover_glitter_apps(self):
"""
Find all the Glitter App configurations in the current project.
"""
for app_name in settings.INSTALLED_APPS:
module_name = '{app_name}.glitter_apps'.format(app_name=app_name)
try:
glitter_apps_module = import_module(module_name)
if hasattr(glitter_apps_module, 'apps'):
self.glitter_apps.update(glitter_apps_module.apps)
except ImportError:
pass
self.discovered = True | Find all the Glitter App configurations in the current project. | entailment |
Subsets and Splits