text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Returns a list of ComponentResult from the json_data
<END_TASK>
<USER_TASK:>
Description:
def _create_component_results(json_data, result_key):
""" Returns a list of ComponentResult from the json_data""" |
component_results = []
for key, value in list(json_data.items()):
if key not in [result_key, "meta"]:
component_result = ComponentResult(
key,
value["result"],
value["api_code"],
value["api_code_description"]
)
component_results.append(component_result)
return component_results |
<SYSTEM_TASK:>
Returns whether there was a business logic error when fetching data
<END_TASK>
<USER_TASK:>
Description:
def has_error(self):
"""Returns whether there was a business logic error when fetching data
for any components for this property.
Returns:
boolean
""" |
return next(
(True for cr in self.component_results
if cr.has_error()),
False
) |
<SYSTEM_TASK:>
If there were any business errors fetching data for this property,
<END_TASK>
<USER_TASK:>
Description:
def get_errors(self):
"""If there were any business errors fetching data for this property,
returns the error messages.
Returns:
string - the error message, or None if there was no error.
""" |
return [{cr.component_name: cr.get_error()}
for cr in self.component_results if cr.has_error()] |
<SYSTEM_TASK:>
Deserialize property json data into a Property object
<END_TASK>
<USER_TASK:>
Description:
def create_from_json(cls, json_data):
"""Deserialize property json data into a Property object
Args:
json_data (dict): The json data for this property
Returns:
Property object
""" |
prop = Property()
address_info = json_data["address_info"]
prop.address = address_info["address"]
prop.block_id = address_info["block_id"]
prop.zipcode = address_info["zipcode"]
prop.zipcode_plus4 = address_info["zipcode_plus4"]
prop.address_full = address_info["address_full"]
prop.city = address_info["city"]
prop.county_fips = address_info["county_fips"]
prop.geo_precision = address_info["geo_precision"]
prop.lat = address_info["lat"]
prop.lng = address_info["lng"]
prop.slug = address_info["slug"]
prop.state = address_info["state"]
prop.unit = address_info["unit"]
prop.meta = None
if "meta" in json_data:
prop.meta = json_data["meta"]
prop.component_results = _create_component_results(json_data, "address_info")
return prop |
<SYSTEM_TASK:>
Deserialize block json data into a Block object
<END_TASK>
<USER_TASK:>
Description:
def create_from_json(cls, json_data):
"""Deserialize block json data into a Block object
Args:
json_data (dict): The json data for this block
Returns:
Block object
""" |
block = Block()
block_info = json_data["block_info"]
block.block_id = block_info["block_id"]
block.num_bins = block_info["num_bins"] if "num_bins" in block_info else None
block.property_type = block_info["property_type"] if "property_type" in block_info else None
block.meta = json_data["meta"] if "meta" in json_data else None
block.component_results = _create_component_results(json_data, "block_info")
return block |
<SYSTEM_TASK:>
Start yielding items when a condition arise.
<END_TASK>
<USER_TASK:>
Description:
def starts_when(iterable, condition):
# type: (Iterable, Union[Callable, Any]) -> Iterable
"""Start yielding items when a condition arise.
Args:
iterable: the iterable to filter.
condition: if the callable returns True once, start yielding
items. If it's not a callable, it will be converted
to one as `lambda condition: condition == item`.
Example:
>>> list(starts_when(range(10), lambda x: x > 5))
[6, 7, 8, 9]
>>> list(starts_when(range(10), 7))
[7, 8, 9]
""" |
if not callable(condition):
cond_value = condition
def condition(x):
return x == cond_value
return itertools.dropwhile(lambda x: not condition(x), iterable) |
<SYSTEM_TASK:>
Stop yielding items when a condition arise.
<END_TASK>
<USER_TASK:>
Description:
def stops_when(iterable, condition):
# type: (Iterable, Union[Callable, Any]) -> Iterable
"""Stop yielding items when a condition arise.
Args:
iterable: the iterable to filter.
condition: if the callable returns True once, stop yielding
items. If it's not a callable, it will be converted
to one as `lambda condition: condition == item`.
Example:
>>> list(stops_when(range(10), lambda x: x > 5))
[0, 1, 2, 3, 4, 5]
>>> list(stops_when(range(10), 7))
[0, 1, 2, 3, 4, 5, 6]
""" |
if not callable(condition):
cond_value = condition
def condition(x):
return x == cond_value
return itertools.takewhile(lambda x: not condition(x), iterable) |
<SYSTEM_TASK:>
Returns a generator that will yield all objects from iterable, skipping
<END_TASK>
<USER_TASK:>
Description:
def skip_duplicates(iterable, key=None, fingerprints=()):
# type: (Iterable, Callable, Any) -> Iterable
"""
Returns a generator that will yield all objects from iterable, skipping
duplicates.
Duplicates are identified using the `key` function to calculate a
unique fingerprint. This does not use natural equality, but the
result use a set() to remove duplicates, so defining __eq__
on your objects would have no effect.
By default the fingerprint is the object itself,
which ensure the functions works as-is with an iterable of primitives
such as int, str or tuple.
:Example:
>>> list(skip_duplicates([1, 2, 3, 4, 4, 2, 1, 3 , 4]))
[1, 2, 3, 4]
The return value of `key` MUST be hashable, which means for
non hashable objects such as dict, set or list, you need to specify
a a function that returns a hashable fingerprint.
:Example:
>>> list(skip_duplicates(([], [], (), [1, 2], (1, 2)),
... lambda x: tuple(x)))
[[], [1, 2]]
>>> list(skip_duplicates(([], [], (), [1, 2], (1, 2)),
... lambda x: (type(x), tuple(x))))
[[], (), [1, 2], (1, 2)]
For more complex types, such as custom classes, the default behavior
is to remove nothing. You MUST provide a `key` function is you wish
to filter those.
:Example:
>>> class Test(object):
... def __init__(self, foo='bar'):
... self.foo = foo
... def __repr__(self):
... return "Test('%s')" % self.foo
...
>>> list(skip_duplicates([Test(), Test(), Test('other')]))
[Test('bar'), Test('bar'), Test('other')]
>>> list(skip_duplicates([Test(), Test(), Test('other')],\
lambda x: x.foo))
[Test('bar'), Test('other')]
""" |
fingerprints = fingerprints or set()
fingerprint = None # needed on type errors unrelated to hashing
try:
# duplicate some code to gain perf in the most common case
if key is None:
for x in iterable:
if x not in fingerprints:
yield x
fingerprints.add(x)
else:
for x in iterable:
fingerprint = key(x)
if fingerprint not in fingerprints:
yield x
fingerprints.add(fingerprint)
except TypeError:
try:
hash(fingerprint)
except TypeError:
raise TypeError(
"The 'key' function returned a non hashable object of type "
"'%s' when receiving '%s'. Make sure this function always "
"returns a hashable object. Hint: immutable primitives like"
"int, str or tuple, are hashable while dict, set and list are "
"not." % (type(fingerprint), x))
else:
raise |
<SYSTEM_TASK:>
Yields items from an iterator in iterable chunks.
<END_TASK>
<USER_TASK:>
Description:
def chunks(iterable, chunksize, cast=tuple):
# type: (Iterable, int, Callable) -> Iterable
"""
Yields items from an iterator in iterable chunks.
""" |
it = iter(iterable)
while True:
yield cast(itertools.chain([next(it)],
itertools.islice(it, chunksize - 1))) |
<SYSTEM_TASK:>
Yields iterms by bunch of a given size, but rolling only one item
<END_TASK>
<USER_TASK:>
Description:
def window(iterable, size=2, cast=tuple):
# type: (Iterable, int, Callable) -> Iterable
"""
Yields iterms by bunch of a given size, but rolling only one item
in and out at a time when iterating.
>>> list(window([1, 2, 3]))
[(1, 2), (2, 3)]
By default, this will cast the window to a tuple before yielding it;
however, any function that will accept an iterable as its argument
is a valid target.
If you pass None as a cast value, the deque will be returned as-is,
which is more performant. However, since only one deque is used
for the entire iteration, you'll get the same reference everytime,
only the deque will contains different items. The result might not
be what you want :
>>> list(window([1, 2, 3], cast=None))
[deque([2, 3], maxlen=2), deque([2, 3], maxlen=2)]
""" |
iterable = iter(iterable)
d = deque(itertools.islice(iterable, size), size)
if cast:
yield cast(d)
for x in iterable:
d.append(x)
yield cast(d)
else:
yield d
for x in iterable:
d.append(x)
yield d |
<SYSTEM_TASK:>
Return the item at the index of this iterable or raises IndexError.
<END_TASK>
<USER_TASK:>
Description:
def at_index(iterable, index):
# type: (Iterable[T], int) -> T
"""" Return the item at the index of this iterable or raises IndexError.
WARNING: this will consume generators.
Negative indices are allowed but be aware they will cause n items to
be held in memory, where n = abs(index)
""" |
try:
if index < 0:
return deque(iterable, maxlen=abs(index)).popleft()
return next(itertools.islice(iterable, index, index + 1))
except (StopIteration, IndexError) as e:
raise_from(IndexError('Index "%d" out of range' % index), e) |
<SYSTEM_TASK:>
Like itertools.islice, but accept int and callables.
<END_TASK>
<USER_TASK:>
Description:
def iterslice(iterable, start=0, stop=None, step=1):
# type: (Iterable[T], int, int, int) -> Iterable[T]
""" Like itertools.islice, but accept int and callables.
If `start` is a callable, start the slice after the first time
start(item) == True.
If `stop` is a callable, stop the slice after the first time
stop(item) == True.
""" |
if step < 0:
raise ValueError("The step can not be negative: '%s' given" % step)
if not isinstance(start, int):
# [Callable:Callable]
if not isinstance(stop, int) and stop:
return stops_when(starts_when(iterable, start), stop)
# [Callable:int]
return starts_when(itertools.islice(iterable, None, stop, step), start)
# [int:Callable]
if not isinstance(stop, int) and stop:
return stops_when(itertools.islice(iterable, start, None, step), stop)
# [int:int]
return itertools.islice(iterable, start, stop, step) |
<SYSTEM_TASK:>
Lazily return the first x items from this iterable or default.
<END_TASK>
<USER_TASK:>
Description:
def firsts(iterable, items=1, default=None):
# type: (Iterable[T], int, T) -> Iterable[T]
""" Lazily return the first x items from this iterable or default. """ |
try:
items = int(items)
except (ValueError, TypeError):
raise ValueError("items should be usable as an int but is currently "
"'{}' of type '{}'".format(items, type(items)))
# TODO: replace this so that it returns lasts()
if items < 0:
raise ValueError(ww.f("items is {items} but should "
"be greater than 0. If you wish to get the last "
"items, use the lasts() function."))
i = 0
for i, item in zip(range(items), iterable):
yield item
for x in range(items - (i + 1)):
yield default |
<SYSTEM_TASK:>
Lazily return the last x items from this iterable or default.
<END_TASK>
<USER_TASK:>
Description:
def lasts(iterable, items=1, default=None):
# type: (Iterable[T], int, T) -> Iterable[T]
""" Lazily return the last x items from this iterable or default. """ |
last_items = deque(iterable, maxlen=items)
for _ in range(items - len(last_items)):
yield default
for y in last_items:
yield y |
<SYSTEM_TASK:>
The callback for when a PUBLISH message is received from the server.
<END_TASK>
<USER_TASK:>
Description:
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
""" |
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True) |
<SYSTEM_TASK:>
The callback for when a DISCONNECT occurs.
<END_TASK>
<USER_TASK:>
Description:
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
""" |
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.") |
<SYSTEM_TASK:>
The client start method. Starts the thread for the MQTT Client
<END_TASK>
<USER_TASK:>
Description:
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
""" |
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True) |
<SYSTEM_TASK:>
The client stop method. If the client is currently connected
<END_TASK>
<USER_TASK:>
Description:
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
""" |
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True) |
<SYSTEM_TASK:>
Add a node to the NodeServer
<END_TASK>
<USER_TASK:>
Description:
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
""" |
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message) |
<SYSTEM_TASK:>
Delete a node from the NodeServer
<END_TASK>
<USER_TASK:>
Description:
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
""" |
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message) |
<SYSTEM_TASK:>
Get Node by Address of existing nodes.
<END_TASK>
<USER_TASK:>
Description:
def getNode(self, address):
"""
Get Node by Address of existing nodes.
""" |
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False |
<SYSTEM_TASK:>
Save incoming config received from Polyglot to Interface.config and then do any functions
<END_TASK>
<USER_TASK:>
Description:
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
""" |
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True) |
<SYSTEM_TASK:>
Just send it along if requested, should be able to delete the node even if it isn't
<END_TASK>
<USER_TASK:>
Description:
def delNode(self, address):
"""
Just send it along if requested, should be able to delete the node even if it isn't
in our config anywhere. Usually used for normalization.
""" |
if address in self.nodes:
del self.nodes[address]
self.poly.delNode(address) |
<SYSTEM_TASK:>
Returns human readable string from number of seconds
<END_TASK>
<USER_TASK:>
Description:
def get_readable_time_string(seconds):
"""Returns human readable string from number of seconds""" |
seconds = int(seconds)
minutes = seconds // 60
seconds = seconds % 60
hours = minutes // 60
minutes = minutes % 60
days = hours // 24
hours = hours % 24
result = ""
if days > 0:
result += "%d %s " % (days, "Day" if (days == 1) else "Days")
if hours > 0:
result += "%d %s " % (hours, "Hour" if (hours == 1) else "Hours")
if minutes > 0:
result += "%d %s " % (minutes, "Minute" if (minutes == 1) else "Minutes")
if seconds > 0:
result += "%d %s " % (seconds, "Second" if (seconds == 1) else "Seconds")
return result.strip() |
<SYSTEM_TASK:>
Returns a list of rate limit information from a given response's headers.
<END_TASK>
<USER_TASK:>
Description:
def get_rate_limits(response):
"""Returns a list of rate limit information from a given response's headers.""" |
periods = response.headers['X-RateLimit-Period']
if not periods:
return []
rate_limits = []
periods = periods.split(',')
limits = response.headers['X-RateLimit-Limit'].split(',')
remaining = response.headers['X-RateLimit-Remaining'].split(',')
reset = response.headers['X-RateLimit-Reset'].split(',')
for idx, period in enumerate(periods):
rate_limit = {}
limit_period = get_readable_time_string(period)
rate_limit["period"] = limit_period
rate_limit["period_seconds"] = period
rate_limit["request_limit"] = limits[idx]
rate_limit["requests_remaining"] = remaining[idx]
reset_datetime = get_datetime_from_timestamp(reset[idx])
rate_limit["reset"] = reset_datetime
right_now = datetime.now()
if (reset_datetime is not None) and (right_now < reset_datetime):
# add 1 second because of rounding
seconds_remaining = (reset_datetime - right_now).seconds + 1
else:
seconds_remaining = 0
rate_limit["reset_in_seconds"] = seconds_remaining
rate_limit["time_to_reset"] = get_readable_time_string(seconds_remaining)
rate_limits.append(rate_limit)
return rate_limits |
<SYSTEM_TASK:>
Swap key and value
<END_TASK>
<USER_TASK:>
Description:
def swap(self):
# type: () -> DictWrapper
"""Swap key and value
/!\ Be carreful, if there are duplicate values, only one will
survive /!\
Example:
>>> from ww import d
>>> d({1: 2, 2: 2, 3: 3}).swap()
{2: 2, 3: 3}
""" |
return self.__class__((v, k) for k, v in self.items()) |
<SYSTEM_TASK:>
Create a new d from
<END_TASK>
<USER_TASK:>
Description:
def fromkeys(cls, iterable, value=None):
# TODO : type: (Iterable, Union[Any, Callable]) -> DictWrapper
# https://github.com/python/mypy/issues/2254
"""Create a new d from
Args:
iterable: Iterable containing keys
value: value to associate with each key.
If callable, will be value[key]
Returns: new DictWrapper
Example:
>>> from ww import d
>>> sorted(d.fromkeys('123', value=4).items())
[('1', 4), ('2', 4), ('3', 4)]
>>> sorted(d.fromkeys(range(3), value=lambda e:e**2).items())
[(0, 0), (1, 1), (2, 4)]
""" |
if not callable(value):
return cls(dict.fromkeys(iterable, value))
return cls((key, value(key)) for key in iterable) |
<SYSTEM_TASK:>
Generates an Excel workbook object given api_data returned by the Analytics API
<END_TASK>
<USER_TASK:>
Description:
def get_excel_workbook(api_data, result_info_key, identifier_keys):
"""Generates an Excel workbook object given api_data returned by the Analytics API
Args:
api_data: Analytics API data as a list of dicts (one per identifier)
result_info_key: the key in api_data dicts that contains the data results
identifier_keys: the list of keys used as requested identifiers
(address, zipcode, block_id, etc)
Returns:
raw excel file data
""" |
cleaned_data = []
for item_data in api_data:
result_info = item_data.pop(result_info_key, {})
cleaned_item_data = {}
if 'meta' in item_data:
meta = item_data.pop('meta')
cleaned_item_data['meta'] = meta
for key in item_data:
cleaned_item_data[key] = item_data[key]['result']
cleaned_item_data[result_info_key] = result_info
cleaned_data.append(cleaned_item_data)
data_list = copy.deepcopy(cleaned_data)
workbook = openpyxl.Workbook()
write_worksheets(workbook, data_list, result_info_key, identifier_keys)
return workbook |
<SYSTEM_TASK:>
Writes rest of the worksheets to workbook.
<END_TASK>
<USER_TASK:>
Description:
def write_worksheets(workbook, data_list, result_info_key, identifier_keys):
"""Writes rest of the worksheets to workbook.
Args:
workbook: workbook to write into
data_list: Analytics API data as a list of dicts
result_info_key: the key in api_data dicts that contains the data results
identifier_keys: the list of keys used as requested identifiers
(address, zipcode, block_id, etc)
""" |
# we can use the first item to figure out the worksheet keys
worksheet_keys = get_worksheet_keys(data_list[0], result_info_key)
for key in worksheet_keys:
title = key.split('/')[1]
title = utilities.convert_snake_to_title_case(title)
title = KEY_TO_WORKSHEET_MAP.get(title, title)
if key == 'property/nod':
# the property/nod endpoint needs to be split into two worksheets
create_property_nod_worksheets(workbook, data_list, result_info_key, identifier_keys)
else:
# all other endpoints are written to a single worksheet
# Maximum 31 characters allowed in sheet title
worksheet = workbook.create_sheet(title=title[:31])
processed_data = process_data(key, data_list, result_info_key, identifier_keys)
write_data(worksheet, processed_data)
# remove the first, unused empty sheet
workbook.remove_sheet(workbook.active) |
<SYSTEM_TASK:>
Gets all possible keys from a list of dicts, sorting by leading_columns first
<END_TASK>
<USER_TASK:>
Description:
def get_keys(data_list, leading_columns=LEADING_COLUMNS):
"""Gets all possible keys from a list of dicts, sorting by leading_columns first
Args:
data_list: list of dicts to pull keys from
leading_columns: list of keys to put first in the result
Returns:
list of keys to be included as columns in excel worksheet
""" |
all_keys = set().union(*(list(d.keys()) for d in data_list))
leading_keys = []
for key in leading_columns:
if key not in all_keys:
continue
leading_keys.append(key)
all_keys.remove(key)
return leading_keys + sorted(all_keys) |
<SYSTEM_TASK:>
Writes data into worksheet.
<END_TASK>
<USER_TASK:>
Description:
def write_data(worksheet, data):
"""Writes data into worksheet.
Args:
worksheet: worksheet to write into
data: data to be written
""" |
if not data:
return
if isinstance(data, list):
rows = data
else:
rows = [data]
if isinstance(rows[0], dict):
keys = get_keys(rows)
worksheet.append([utilities.convert_snake_to_title_case(key) for key in keys])
for row in rows:
values = [get_value_from_row(row, key) for key in keys]
worksheet.append(values)
elif isinstance(rows[0], list):
for row in rows:
values = [utilities.normalize_cell_value(value) for value in row]
worksheet.append(values)
else:
for row in rows:
worksheet.append([utilities.normalize_cell_value(row)]) |
<SYSTEM_TASK:>
Given a key as the endpoint name, pulls the data for that endpoint out
<END_TASK>
<USER_TASK:>
Description:
def process_data(key, data_list, result_info_key, identifier_keys):
""" Given a key as the endpoint name, pulls the data for that endpoint out
of the data_list for each address, processes the data into a more
excel-friendly format and returns that data.
Args:
key: the endpoint name of the data to process
data_list: the main data list to take the data from
result_info_key: the key in api_data dicts that contains the data results
identifier_keys: the list of keys used as requested identifiers
(address, zipcode, block_id, etc)
Returns:
A list of dicts (rows) to be written to a worksheet
""" |
master_data = []
for item_data in data_list:
data = item_data[key]
if data is None:
current_item_data = {}
else:
if key == 'property/value':
current_item_data = data['value']
elif key == 'property/details':
top_level_keys = ['property', 'assessment']
current_item_data = flatten_top_level_keys(data, top_level_keys)
elif key == 'property/school':
current_item_data = data['school']
school_list = []
for school_type_key in current_item_data:
schools = current_item_data[school_type_key]
for school in schools:
school['school_type'] = school_type_key
school['school_address'] = school['address']
school['school_zipcode'] = school['zipcode']
school_list.append(school)
current_item_data = school_list
elif key == 'property/value_forecast':
current_item_data = {}
for month_key in data:
current_item_data[month_key] = data[month_key]['value']
elif key in ['property/value_within_block', 'property/rental_value_within_block']:
current_item_data = flatten_top_level_keys(data, [
'housecanary_value_percentile_range',
'housecanary_value_sqft_percentile_range',
'client_value_percentile_range',
'client_value_sqft_percentile_range'
])
elif key in ['property/zip_details', 'zip/details']:
top_level_keys = ['multi_family', 'single_family']
current_item_data = flatten_top_level_keys(data, top_level_keys)
else:
current_item_data = data
if isinstance(current_item_data, dict):
_set_identifier_fields(current_item_data, item_data, result_info_key, identifier_keys)
master_data.append(current_item_data)
else:
# it's a list
for item in current_item_data:
_set_identifier_fields(item, item_data, result_info_key, identifier_keys)
master_data.extend(current_item_data)
return master_data |
<SYSTEM_TASK:>
A Django check to see if connecting to the configured default
<END_TASK>
<USER_TASK:>
Description:
def check_database_connected(app_configs, **kwargs):
"""
A Django check to see if connecting to the configured default
database backend succeeds.
""" |
errors = []
try:
connection.ensure_connection()
except OperationalError as e:
msg = 'Could not connect to database: {!s}'.format(e)
errors.append(checks.Error(msg,
id=health.ERROR_CANNOT_CONNECT_DATABASE))
except ImproperlyConfigured as e:
msg = 'Datbase misconfigured: "{!s}"'.format(e)
errors.append(checks.Error(msg,
id=health.ERROR_MISCONFIGURED_DATABASE))
else:
if not connection.is_usable():
errors.append(checks.Error('Database connection is not usable',
id=health.ERROR_UNUSABLE_DATABASE))
return errors |
<SYSTEM_TASK:>
A Django check to see if all migrations have been applied correctly.
<END_TASK>
<USER_TASK:>
Description:
def check_migrations_applied(app_configs, **kwargs):
"""
A Django check to see if all migrations have been applied correctly.
""" |
from django.db.migrations.loader import MigrationLoader
errors = []
# Load migrations from disk/DB
try:
loader = MigrationLoader(connection, ignore_no_migrations=True)
except (ImproperlyConfigured, ProgrammingError, OperationalError):
msg = "Can't connect to database to check migrations"
return [checks.Info(msg, id=health.INFO_CANT_CHECK_MIGRATIONS)]
if app_configs:
app_labels = [app.label for app in app_configs]
else:
app_labels = loader.migrated_apps
for node, migration in loader.graph.nodes.items():
if migration.app_label not in app_labels:
continue
if node not in loader.applied_migrations:
msg = 'Unapplied migration {}'.format(migration)
# NB: This *must* be a Warning, not an Error, because Errors
# prevent migrations from being run.
errors.append(checks.Warning(msg,
id=health.WARNING_UNAPPLIED_MIGRATION))
return errors |
<SYSTEM_TASK:>
A Django check to connect to the default redis connection
<END_TASK>
<USER_TASK:>
Description:
def check_redis_connected(app_configs, **kwargs):
"""
A Django check to connect to the default redis connection
using ``django_redis.get_redis_connection`` and see if Redis
responds to a ``PING`` command.
""" |
import redis
from django_redis import get_redis_connection
errors = []
try:
connection = get_redis_connection('default')
except redis.ConnectionError as e:
msg = 'Could not connect to redis: {!s}'.format(e)
errors.append(checks.Error(msg, id=health.ERROR_CANNOT_CONNECT_REDIS))
except NotImplementedError as e:
msg = 'Redis client not available: {!s}'.format(e)
errors.append(checks.Error(msg, id=health.ERROR_MISSING_REDIS_CLIENT))
except ImproperlyConfigured as e:
msg = 'Redis misconfigured: "{!s}"'.format(e)
errors.append(checks.Error(msg, id=health.ERROR_MISCONFIGURED_REDIS))
else:
result = connection.ping()
if not result:
msg = 'Redis ping failed'
errors.append(checks.Error(msg, id=health.ERROR_REDIS_PING_FAILED))
return errors |
<SYSTEM_TASK:>
function takes the devId of a specific device and issues a RESTFUL call to get the current
<END_TASK>
<USER_TASK:>
Description:
def get_dev_alarms(auth, url, devid=None, devip=None):
"""
function takes the devId of a specific device and issues a RESTFUL call to get the current
alarms for the target device.
:param devid: int or str value of the target device
:param devip: str of ipv4 address of the target device
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return:list of dictionaries containing the alarms for this device
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.alarms import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> dev_alarms = get_dev_alarms(auth.creds, auth.url, devip='10.101.0.221')
>>> assert 'ackStatus' in dev_alarms[0]
""" |
# checks to see if the imc credentials are already available
if devip is not None:
devid = get_dev_details(devip, auth, url)['id']
f_url = url + "/imcrs/fault/alarm?operatorName=admin&deviceId=" + \
str(devid) + "&desc=false"
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
dev_alarm = (json.loads(response.text))
if 'alarm' in dev_alarm:
return dev_alarm['alarm']
else:
return "Device has no alarms"
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + ' get_dev_alarms: An Error has occured' |
<SYSTEM_TASK:>
List files in a path of a virtual folder.
<END_TASK>
<USER_TASK:>
Description:
def ls(name, path):
"""
List files in a path of a virtual folder.
\b
NAME: Name of a virtual folder.
PATH: Path inside vfolder.
""" |
with Session() as session:
try:
print_wait('Retrieving list of files in "{}"...'.format(path))
result = session.VFolder(name).list_files(path)
if 'error_msg' in result and result['error_msg']:
print_fail(result['error_msg'])
return
files = json.loads(result['files'])
table = []
headers = ['file name', 'size', 'modified', 'mode']
for file in files:
mdt = datetime.fromtimestamp(file['mtime'])
mtime = mdt.strftime('%b %d %Y %H:%M:%S')
row = [file['filename'], file['size'], mtime, file['mode']]
table.append(row)
print_done('Retrived.')
print(tabulate(table, headers=headers))
except Exception as e:
print_error(e) |
<SYSTEM_TASK:>
Invite other users to access the virtual folder.
<END_TASK>
<USER_TASK:>
Description:
def invite(name, emails, perm):
"""Invite other users to access the virtual folder.
\b
NAME: Name of a virtual folder.
EMAIL: Emails to invite.
""" |
with Session() as session:
try:
assert perm in ['rw', 'ro'], \
'Invalid permission: {}'.format(perm)
result = session.VFolder(name).invite(perm, emails)
invited_ids = result.get('invited_ids', [])
if len(invited_ids) > 0:
print('Invitation sent to:')
for invitee in invited_ids:
print('\t- ' + invitee)
else:
print('No users found. Invitation was not sent.')
except Exception as e:
print_error(e)
sys.exit(1) |
<SYSTEM_TASK:>
List and manage received invitations.
<END_TASK>
<USER_TASK:>
Description:
def invitations():
"""List and manage received invitations.
""" |
with Session() as session:
try:
result = session.VFolder.invitations()
invitations = result.get('invitations', [])
if len(invitations) < 1:
print('No invitations.')
return
print('List of invitations (inviter, vfolder id, permission):')
for cnt, inv in enumerate(invitations):
if inv['perm'] == 'rw':
perm = 'read-write'
elif inv['perm'] == 'ro':
perm = 'read-only'
else:
perm = inv['perm']
print('[{}] {}, {}, {}'.format(cnt + 1, inv['inviter'],
inv['vfolder_id'], perm))
selection = input('Choose invitation number to manage: ')
if selection.isdigit():
selection = int(selection) - 1
else:
return
if 0 <= selection < len(invitations):
while True:
action = input('Choose action. (a)ccept, (r)eject, (c)ancel: ')
if action.lower() == 'a':
# TODO: Let user can select access_key among many.
# Currently, the config objects holds only one key.
config = get_config()
result = session.VFolder.accept_invitation(
invitations[selection]['id'], config.access_key)
print(result['msg'])
break
elif action.lower() == 'r':
result = session.VFolder.delete_invitation(
invitations[selection]['id'])
print(result['msg'])
break
elif action.lower() == 'c':
break
except Exception as e:
print_error(e)
sys.exit(1) |
<SYSTEM_TASK:>
Adds a given check callback with the provided object to the list
<END_TASK>
<USER_TASK:>
Description:
def init_check(self, check, obj):
"""
Adds a given check callback with the provided object to the list
of checks. Useful for built-ins but also advanced custom checks.
""" |
self.logger.info('Adding extension check %s' % check.__name__)
check = functools.wraps(check)(functools.partial(check, obj))
self.check(func=check) |
<SYSTEM_TASK:>
Initializes the extension with the given app, registers the
<END_TASK>
<USER_TASK:>
Description:
def init_app(self, app):
"""
Initializes the extension with the given app, registers the
built-in views with an own blueprint and hooks up our signal
callbacks.
""" |
# If no version path was provided in the init of the Dockerflow
# class we'll use the parent directory of the app root path.
if self.version_path is None:
self.version_path = os.path.dirname(app.root_path)
for view in (
('/__version__', 'version', self._version_view),
('/__heartbeat__', 'heartbeat', self._heartbeat_view),
('/__lbheartbeat__', 'lbheartbeat', self._lbheartbeat_view),
):
self._blueprint.add_url_rule(*view)
self._blueprint.before_app_request(self._before_request)
self._blueprint.after_app_request(self._after_request)
self._blueprint.app_errorhandler(HeartbeatFailure)(self._heartbeat_exception_handler)
app.register_blueprint(self._blueprint)
got_request_exception.connect(self._got_request_exception, sender=app)
if not hasattr(app, 'extensions'): # pragma: nocover
app.extensions = {}
app.extensions['dockerflow'] = self |
<SYSTEM_TASK:>
The before_request callback.
<END_TASK>
<USER_TASK:>
Description:
def _before_request(self):
"""
The before_request callback.
""" |
g._request_id = str(uuid.uuid4())
g._start_timestamp = time.time() |
<SYSTEM_TASK:>
The signal handler for the request_finished signal.
<END_TASK>
<USER_TASK:>
Description:
def _after_request(self, response):
"""
The signal handler for the request_finished signal.
""" |
if not getattr(g, '_has_exception', False):
extra = self.summary_extra()
self.summary_logger.info('', extra=extra)
return response |
<SYSTEM_TASK:>
Return the ID of the current request's user
<END_TASK>
<USER_TASK:>
Description:
def user_id(self):
"""
Return the ID of the current request's user
""" |
# This needs flask-login to be installed
if not has_flask_login:
return
# and the actual login manager installed
if not hasattr(current_app, 'login_manager'):
return
# fail if no current_user was attached to the request context
try:
is_authenticated = current_user.is_authenticated
except AttributeError:
return
# because is_authenticated could be a callable, call it
if callable(is_authenticated):
is_authenticated = is_authenticated()
# and fail if the user isn't authenticated
if not is_authenticated:
return
# finally return the user id
return current_user.get_id() |
<SYSTEM_TASK:>
View that returns the contents of version.json or a 404.
<END_TASK>
<USER_TASK:>
Description:
def _version_view(self):
"""
View that returns the contents of version.json or a 404.
""" |
version_json = self._version_callback(self.version_path)
if version_json is None:
return 'version.json not found', 404
else:
return jsonify(version_json) |
<SYSTEM_TASK:>
Runs all the registered checks and returns a JSON response with either
<END_TASK>
<USER_TASK:>
Description:
def _heartbeat_view(self):
"""
Runs all the registered checks and returns a JSON response with either
a status code of 200 or 500 depending on the results of the checks.
Any check that returns a warning or worse (error, critical) will
return a 500 response.
""" |
details = {}
statuses = {}
level = 0
for name, check in self.checks.items():
detail = self._heartbeat_check_detail(check)
statuses[name] = detail['status']
level = max(level, detail['level'])
if detail['level'] > 0:
details[name] = detail
payload = {
'status': checks.level_to_text(level),
'checks': statuses,
'details': details,
}
def render(status_code):
return make_response(jsonify(payload), status_code)
if level < checks.WARNING:
status_code = 200
heartbeat_passed.send(self, level=level)
return render(status_code)
else:
status_code = 500
heartbeat_failed.send(self, level=level)
raise HeartbeatFailure(response=render(status_code)) |
<SYSTEM_TASK:>
function takes a list of devIDs from devices discovered in the HPE IMC platform and and issues a RESTFUL call to
<END_TASK>
<USER_TASK:>
Description:
def add_devs_custom_views(custom_view_name, dev_list, auth, url):
"""
function takes a list of devIDs from devices discovered in the HPE IMC platform and and issues a RESTFUL call to
add the list of devices to a specific custom views from HPE IMC.
:param dev_list: list containing the devID of all devices to be contained in this custom view.
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: str of creation results ( "view " + name + "created successfully"
:rtype: str
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.groups import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
""" |
view_id = get_custom_views(auth, url, name=custom_view_name)[0]['symbolId']
add_devs_custom_views_url = '/imcrs/plat/res/view/custom/'+str(view_id)
payload = '''{"device" : '''+ json.dumps(dev_list) + '''}'''
f_url = url + add_devs_custom_views_url
r = requests.put(f_url, data = payload, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents
try:
if r.status_code == 204:
print ('View ' + custom_view_name +' : Devices Successfully Added')
return r.status_code
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + ' get_custom_views: An Error has occured' |
<SYSTEM_TASK:>
Function takes input of devID to issue RESTUL call to HP IMC
<END_TASK>
<USER_TASK:>
Description:
def get_dev_vlans(auth, url, devid=None, devip=None):
"""Function takes input of devID to issue RESTUL call to HP IMC
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param devid: str requires devId as the only input parameter
:param devip: str of ipv4 address of the target device
:return: list of dictionaries where each element of the list represents one vlan on the
target device
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.vlanm import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> vlans = get_dev_vlans('350', auth.creds, auth.url)
>>> assert type(vlans) is list
>>> assert 'vlanId' in vlans[0]
""" |
if devip is not None:
devid = get_dev_details(devip, auth, url)['id']
get_dev_vlans_url = "/imcrs/vlan?devId=" + str(devid) + "&start=0&size=5000&total=false"
f_url = url + get_dev_vlans_url
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
dev_vlans = (json.loads(response.text))
return dev_vlans['vlan']
elif response.status_code == 409:
return {'vlan': 'no vlans'}
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + ' get_dev_vlans: An Error has occured' |
<SYSTEM_TASK:>
Function takes devId as input to RESTFULL call to HP IMC platform
<END_TASK>
<USER_TASK:>
Description:
def get_trunk_interfaces(auth, url, devid=None, devip=None):
"""Function takes devId as input to RESTFULL call to HP IMC platform
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param devid: str requires devid of the target device
:param devip: str of ipv4 address of the target device
:return: list of dictionaries where each element of the list represents an interface which
has been configured as a
VLAN trunk port
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.vlanm import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> trunk_interfaces = get_trunk_interfaces('10', auth.creds, auth.url)
>>> assert type(trunk_interfaces) is list
>>> assert len(trunk_interfaces[0]) == 3
>>> assert 'allowedVlans' in trunk_interfaces[0]
>>> assert 'ifIndex' in trunk_interfaces[0]
>>> assert 'pvid' in trunk_interfaces[0]
>>> get_trunk_interfaces('350', auth.creds, auth.url)
['No trunk inteface']
""" |
if devip is not None:
devid = get_dev_details(devip, auth, url)['id']
get_trunk_interfaces_url = "/imcrs/vlan/trunk?devId=" + str(devid) + \
"&start=1&size=5000&total=false"
f_url = url + get_trunk_interfaces_url
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
dev_trunk_interfaces = (json.loads(response.text))
if len(dev_trunk_interfaces) == 2:
if isinstance(dev_trunk_interfaces['trunkIf'], list):
return dev_trunk_interfaces['trunkIf']
elif isinstance(dev_trunk_interfaces['trunkIf'], dict):
return [dev_trunk_interfaces['trunkIf']]
else:
dev_trunk_interfaces['trunkIf'] = ["No trunk inteface"]
return dev_trunk_interfaces['trunkIf']
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + ' get_trunk_interfaces: An Error has occured' |
<SYSTEM_TASK:>
Function takes devid pr devip as input to RESTFUL call to HP IMC platform
<END_TASK>
<USER_TASK:>
Description:
def get_device_access_interfaces(auth, url, devid=None, devip=None):
"""
Function takes devid pr devip as input to RESTFUL call to HP IMC platform
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param devid: str requires devid of the target device
:param devip: str of ipv4 address of the target device
:return: list of dictionaries where each element of the list represents an interface which
has been configured as a
VLAN access port
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.vlanm import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> access_interfaces = get_device_access_interfaces('10', auth.creds, auth.url)
>>> assert type(access_interfaces) is list
>>> assert (len(access_interfaces[0])) is 2
>>> assert 'ifIndex' in access_interfaces[0]
>>> assert 'pvid' in access_interfaces[0]
""" |
if devip is not None:
devid = get_dev_details(devip, auth, url)['id']
get_access_interface_vlan_url = "/imcrs/vlan/access?devId=" + str(devid) + \
"&start=1&size=500&total=false"
f_url = url + get_access_interface_vlan_url
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
dev_access_interfaces = (json.loads(response.text))
if type(dev_access_interfaces['accessIf']) is dict:
return [dev_access_interfaces['accessIf']]
if len(dev_access_interfaces) == 2:
return dev_access_interfaces['accessIf']
else:
dev_access_interfaces['accessIf'] = ["No access inteface"]
return dev_access_interfaces['accessIf']
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + " get_device_access_interfaces: An Error has occured" |
<SYSTEM_TASK:>
Function takes devId as input to RESTFUL call to HP IMC platform
<END_TASK>
<USER_TASK:>
Description:
def get_device_hybrid_interfaces(auth, url, devid=None, devip=None):
"""
Function takes devId as input to RESTFUL call to HP IMC platform
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param devid: str requires devid of the target device
:param devip: str of ipv4 address of the target device
:return: list of dictionaries where each element of the list represents an interface which
has been configured as a
VLAN access port
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.vlanm import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> hybrid_interfaces = get_device_hybrid_interfaces('10', auth.creds, auth.url)
>>> assert type(access_interfaces) is list
>>> assert (len(access_interfaces[0])) is 2
>>> assert 'ifIndex' in access_interfaces[0]
>>> assert 'pvid' in access_interfaces[0]
""" |
if devip is not None:
devid = get_dev_details(devip, auth, url)['id']
get_hybrid_interface_vlan_url = "/imcrs/vlan/hybrid?devId=" + str(devid) + \
"&start=1&size=500&total=false"
f_url = url + get_hybrid_interface_vlan_url
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
dev_hybrid_interfaces = (json.loads(response.text))
if len(dev_hybrid_interfaces) == 2:
dev_hybrid = dev_hybrid_interfaces['hybridIf']
if isinstance(dev_hybrid, dict):
dev_hybrid = [dev_hybrid]
return dev_hybrid
else:
dev_hybrid_interfaces['hybridIf'] = ["No hybrid inteface"]
return dev_hybrid_interfaces['hybridIf']
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + " get_device_hybrid_interfaces: An Error has occured" |
<SYSTEM_TASK:>
Quick aliases for run command.
<END_TASK>
<USER_TASK:>
Description:
def run_alias():
"""
Quick aliases for run command.
""" |
mode = Path(sys.argv[0]).stem
help = True if len(sys.argv) <= 1 else False
if mode == 'lcc':
sys.argv.insert(1, 'c')
elif mode == 'lpython':
sys.argv.insert(1, 'python')
sys.argv.insert(1, 'run')
if help:
sys.argv.append('--help')
main.main(prog_name='backend.ai') |
<SYSTEM_TASK:>
Returns list of rate limit information from the response
<END_TASK>
<USER_TASK:>
Description:
def rate_limits(self):
"""Returns list of rate limit information from the response""" |
if not self._rate_limits:
self._rate_limits = utilities.get_rate_limits(self._response)
return self._rate_limits |
<SYSTEM_TASK:>
function takes no input and issues a RESTFUL call to get a list of custom views from HPE IMC.
<END_TASK>
<USER_TASK:>
Description:
def create_custom_views(auth, url, name=None, upperview=None):
"""
function takes no input and issues a RESTFUL call to get a list of custom views from HPE IMC.
Optional Name input will return only the specified view.
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param name: string containg the name of the desired custom view
:param upperview: str contraining the name of the desired parent custom view
:return: str of creation results ( "view " + name + "created successfully"
:rtype: str
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.groups import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
#Create L1 custom view
>>> create_custom_views(auth.creds, auth.url, name='L1 View')
'View L1 View created successfully'
>>> view_1 =get_custom_views( auth.creds, auth.url, name = 'L1 View')
>>> assert type(view_1) is list
>>> assert view_1[0]['name'] == 'L1 View'
#Create Nested custome view
>>> create_custom_views(auth.creds, auth.url, name='L2 View', upperview='L1 View')
'View L2 View created successfully'
>>> view_2 = get_custom_views( auth.creds, auth.url, name = 'L2 View')
>>> assert type(view_2) is list
>>> assert view_2[0]['name'] == 'L2 View'
""" |
create_custom_views_url = '/imcrs/plat/res/view/custom?resPrivilegeFilter=false&desc=false' \
'&total=false'
f_url = url + create_custom_views_url
if upperview is None:
payload = '''{ "name": "''' + name + '''",
"upLevelSymbolId" : ""}'''
else:
parentviewid = get_custom_views(auth, url, upperview)[0]['symbolId']
payload = '''{ "name": "''' + name + '''",
"upLevelSymbolId" : "''' + str(parentviewid) + '''"}'''
response = requests.post(f_url, data=payload, auth=auth, headers=HEADERS)
try:
if response.status_code == 201:
print('View ' + name + ' created successfully')
return response.status_code
elif response.status_code == 409:
print("View " + name + " already exists")
return response.status_code
else:
return response.status_code
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + ' get_custom_views: An Error has occured' |
<SYSTEM_TASK:>
function takes a list of devIDs from devices discovered in the HPE IMC platform and issues a
<END_TASK>
<USER_TASK:>
Description:
def add_devs_custom_views(custom_view_name, dev_list, auth, url):
"""
function takes a list of devIDs from devices discovered in the HPE IMC platform and issues a
RESTFUL call to add the list of devices to a specific custom views from HPE IMC.
:param custom_view_name: str of the target custom view name
:param dev_list: list containing the devID of all devices to be contained in this custom view.
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: str of creation results ( "view " + name + "created successfully"
:rtype: str
>>> from pyhpeimc.auth import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
""" |
view_id = get_custom_views(auth, url, name=custom_view_name)
if view_id is None:
print("View " + custom_view_name + " doesn't exist")
return view_id
view_id = get_custom_views(auth, url, name=custom_view_name)[0]['symbolId']
add_devs_custom_views_url = '/imcrs/plat/res/view/custom/' + str(view_id)
device_list = []
for dev in dev_list:
new_dev = {"id": dev}
device_list.append(new_dev)
payload = '''{"device" : ''' + json.dumps(device_list) + '''}'''
print(payload)
f_url = url + add_devs_custom_views_url
response = requests.put(f_url, data=payload, auth=auth, headers=HEADERS)
try:
if response.status_code == 204:
print('View ' + custom_view_name + ' : Devices Successfully Added')
return response.status_code
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + ' get_custom_views: An Error has occured' |
<SYSTEM_TASK:>
Format and encode dict for make authentication on microsoft
<END_TASK>
<USER_TASK:>
Description:
def _set_format_oauth(self):
"""
Format and encode dict for make authentication on microsoft
servers.
""" |
format_oauth = urllib.parse.urlencode({
'client_id': self._client_id,
'client_secret': self._client_secret,
'scope': self._url_request,
'grant_type': self._grant_type
}).encode("utf-8")
return format_oauth |
<SYSTEM_TASK:>
This is the final step, where the request is made, the data is
<END_TASK>
<USER_TASK:>
Description:
def _make_request(self, params, translation_url, headers):
"""
This is the final step, where the request is made, the data is
retrieved and returned.
""" |
resp = requests.get(translation_url, params=params, headers=headers)
resp.encoding = "UTF-8-sig"
result = resp.json()
return result |
<SYSTEM_TASK:>
Upload files to user's home folder.
<END_TASK>
<USER_TASK:>
Description:
def upload(sess_id_or_alias, files):
"""
Upload files to user's home folder.
\b
SESSID: Session ID or its alias given when creating the session.
FILES: Path to upload.
""" |
if len(files) < 1:
return
with Session() as session:
try:
print_wait('Uploading files...')
kernel = session.Kernel(sess_id_or_alias)
kernel.upload(files, show_progress=True)
print_done('Uploaded.')
except Exception as e:
print_error(e)
sys.exit(1) |
<SYSTEM_TASK:>
Download files from a running container.
<END_TASK>
<USER_TASK:>
Description:
def download(sess_id_or_alias, files, dest):
"""
Download files from a running container.
\b
SESSID: Session ID or its alias given when creating the session.
FILES: Paths inside container.
""" |
if len(files) < 1:
return
with Session() as session:
try:
print_wait('Downloading file(s) from {}...'
.format(sess_id_or_alias))
kernel = session.Kernel(sess_id_or_alias)
kernel.download(files, dest, show_progress=True)
print_done('Downloaded to {}.'.format(dest.resolve()))
except Exception as e:
print_error(e)
sys.exit(1) |
<SYSTEM_TASK:>
List files in a path of a running container.
<END_TASK>
<USER_TASK:>
Description:
def ls(sess_id_or_alias, path):
"""
List files in a path of a running container.
\b
SESSID: Session ID or its alias given when creating the session.
PATH: Path inside container.
""" |
with Session() as session:
try:
print_wait('Retrieving list of files in "{}"...'.format(path))
kernel = session.Kernel(sess_id_or_alias)
result = kernel.list_files(path)
if 'errors' in result and result['errors']:
print_fail(result['errors'])
sys.exit(1)
files = json.loads(result['files'])
table = []
headers = ['file name', 'size', 'modified', 'mode']
for file in files:
mdt = datetime.fromtimestamp(file['mtime'])
mtime = mdt.strftime('%b %d %Y %H:%M:%S')
row = [file['filename'], file['size'], mtime, file['mode']]
table.append(row)
print_done('Retrived.')
print('Path in container:', result['abspath'], end='')
print(tabulate(table, headers=headers))
except Exception as e:
print_error(e)
sys.exit(1) |
<SYSTEM_TASK:>
Function to set the password of an existing operator
<END_TASK>
<USER_TASK:>
Description:
def set_operator_password(operator, password, auth, url):
"""
Function to set the password of an existing operator
:param operator: str Name of the operator account
:param password: str New password
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: int of 204 if successfull,
:rtype: int
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.operator import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> operator = { "fullName" : "test administrator", "sessionTimeout" : "30",
"password" : "password","operatorGroupId" : "1",
"name" : "testadmin","desc" : "test admin account",
"defaultAcl" : "","authType" : "0"}
>>> new_operator = create_operator(operator, auth.creds, auth.url)
>>> set_new_password = set_operator_password('testadmin', 'newpassword', auth.creds, auth.url)
>>> assert type(set_new_password) is int
>>> assert set_new_password == 204
""" |
if operator is None:
operator = input(
'''\n What is the username you wish to change the password?''')
oper_id = ''
authtype = None
plat_oper_list = get_plat_operator(auth, url)
for i in plat_oper_list:
if i['name'] == operator:
oper_id = i['id']
authtype = i['authType']
if oper_id == '':
return "User does not exist"
change_pw_url = "/imcrs/plat/operator/"
f_url = url + change_pw_url + oper_id
if password is None:
password = input(
'''\n ============ Please input the operators new password:\n ============ ''')
payload = json.dumps({'password': password, 'authType': authtype})
response = requests.put(f_url, data=payload, auth=auth, headers=HEADERS)
try:
if response.status_code == 204:
# print("Operator:" + operator +
# " password was successfully changed")
return response.status_code
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + ' set_operator_password: An Error has occured' |
<SYSTEM_TASK:>
Funtion takes no inputs and returns a list of dictionaties of all of the operators currently
<END_TASK>
<USER_TASK:>
Description:
def get_plat_operator(auth, url):
"""
Funtion takes no inputs and returns a list of dictionaties of all of the operators currently
configured on the HPE IMC system
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionaries where each element represents one operator
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.operator import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> plat_operators = get_plat_operator(auth.creds, auth.url)
>>> assert type(plat_operators) is list
>>> assert 'name' in plat_operators[0]
""" |
f_url = url + '/imcrs/plat/operator?start=0&size=1000&orderBy=id&desc=false&total=false'
try:
response = requests.get(f_url, auth=auth, headers=HEADERS)
plat_oper_list = json.loads(response.text)['operator']
if isinstance(plat_oper_list, dict):
oper_list = [plat_oper_list]
return oper_list
return plat_oper_list
except requests.exceptions.RequestException as error:
print("Error:\n" + str(error) + ' get_plat_operator: An Error has occured')
return "Error:\n" + str(error) + ' get_plat_operator: An Error has occured' |
<SYSTEM_TASK:>
Makes a request to the specified url endpoint with the
<END_TASK>
<USER_TASK:>
Description:
def execute_request(self, url, http_method, query_params, post_data):
"""Makes a request to the specified url endpoint with the
specified http method, params and post data.
Args:
url (string): The url to the API without query params.
Example: "https://api.housecanary.com/v2/property/value"
http_method (string): The http method to use for the request.
query_params (dict): Dictionary of query params to add to the request.
post_data: Json post data to send in the body of the request.
Returns:
The result of calling this instance's OutputGenerator process_response method
on the requests.Response object.
If no OutputGenerator is specified for this instance, returns the requests.Response.
""" |
response = requests.request(http_method, url, params=query_params,
auth=self._auth, json=post_data,
headers={'User-Agent': USER_AGENT})
if isinstance(self._output_generator, str) and self._output_generator.lower() == "json":
# shortcut for just getting json back
return response.json()
elif self._output_generator is not None:
return self._output_generator.process_response(response)
else:
return response |
<SYSTEM_TASK:>
Makes a POST request to the specified url endpoint.
<END_TASK>
<USER_TASK:>
Description:
def post(self, url, post_data, query_params=None):
"""Makes a POST request to the specified url endpoint.
Args:
url (string): The url to the API without query params.
Example: "https://api.housecanary.com/v2/property/value"
post_data: Json post data to send in the body of the request.
query_params (dict): Optional. Dictionary of query params to add to the request.
Returns:
The result of calling this instance's OutputGenerator process_response method
on the requests.Response object.
If no OutputGenerator is specified for this instance, returns the requests.Response.
""" |
if query_params is None:
query_params = {}
return self.execute_request(url, "POST", query_params, post_data) |
<SYSTEM_TASK:>
function requires no inputs and returns all IP address scopes currently configured on the HPE
<END_TASK>
<USER_TASK:>
Description:
def get_ip_scope(auth, url, scopeid=None, ):
"""
function requires no inputs and returns all IP address scopes currently configured on the HPE
IMC server. If the optional scopeid parameter is included, this will automatically return
only the desired scope id.
:param scopeid: integer of the desired scope id ( optional )
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: list of dictionary objects where each element of the list represents one IP scope
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.termaccess import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> ip_scope_list = get_ip_scope(auth.creds, auth.url)
>>> assert type(ip_scope_list) is list
>>> assert 'ip' in ip_scope_list[0]
""" |
if scopeid is None:
get_ip_scope_url = "/imcrs/res/access/assignedIpScope"
else:
get_ip_scope_url = "/imcrs/res/access/assignedIpScope/ip?ipScopeId=" + str(scopeid)
f_url = url + get_ip_scope_url
response = requests.get(f_url, auth=auth, headers=HEADERS)
try:
if response.status_code == 200:
ipscopelist = (json.loads(response.text))['assignedIpScope']
if isinstance(ipscopelist, list):
return ipscopelist
elif isinstance(ipscopelist, dict):
return [ipscopelist]
except requests.exceptions.RequestException as error:
return "Error:\n" + str(error) + " get_ip_scope: An Error has occured" |
<SYSTEM_TASK:>
A built-in check to see if connecting to the configured default
<END_TASK>
<USER_TASK:>
Description:
def check_database_connected(db):
"""
A built-in check to see if connecting to the configured default
database backend succeeds.
It's automatically added to the list of Dockerflow checks if a
:class:`~flask_sqlalchemy.SQLAlchemy` object is passed
to the :class:`~dockerflow.flask.app.Dockerflow` class during
instantiation, e.g.::
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from dockerflow.flask import Dockerflow
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
db = SQLAlchemy(app)
dockerflow = Dockerflow(app, db=db)
""" |
from sqlalchemy.exc import DBAPIError, SQLAlchemyError
errors = []
try:
with db.engine.connect() as connection:
connection.execute('SELECT 1;')
except DBAPIError as e:
msg = 'DB-API error: {!s}'.format(e)
errors.append(Error(msg, id=health.ERROR_DB_API_EXCEPTION))
except SQLAlchemyError as e:
msg = 'Database misconfigured: "{!s}"'.format(e)
errors.append(Error(msg, id=health.ERROR_SQLALCHEMY_EXCEPTION))
return errors |
<SYSTEM_TASK:>
A built-in check to see if all migrations have been applied correctly.
<END_TASK>
<USER_TASK:>
Description:
def check_migrations_applied(migrate):
"""
A built-in check to see if all migrations have been applied correctly.
It's automatically added to the list of Dockerflow checks if a
`flask_migrate.Migrate <https://flask-migrate.readthedocs.io/>`_ object
is passed to the :class:`~dockerflow.flask.app.Dockerflow` class during
instantiation, e.g.::
from flask import Flask
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from dockerflow.flask import Dockerflow
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db'
db = SQLAlchemy(app)
migrate = Migrate(app, db)
dockerflow = Dockerflow(app, db=db, migrate=migrate)
""" |
errors = []
from alembic.migration import MigrationContext
from alembic.script import ScriptDirectory
from sqlalchemy.exc import DBAPIError, SQLAlchemyError
# pass in Migrate.directory here explicitly to be compatible with
# older versions of Flask-Migrate that required the directory to be passed
config = migrate.get_config(directory=migrate.directory)
script = ScriptDirectory.from_config(config)
try:
with migrate.db.engine.connect() as connection:
context = MigrationContext.configure(connection)
db_heads = set(context.get_current_heads())
script_heads = set(script.get_heads())
except (DBAPIError, SQLAlchemyError) as e:
msg = "Can't connect to database to check migrations: {!s}".format(e)
return [Info(msg, id=health.INFO_CANT_CHECK_MIGRATIONS)]
if db_heads != script_heads:
msg = "Unapplied migrations found: {}".format(', '.join(script_heads))
errors.append(Warning(msg, id=health.WARNING_UNAPPLIED_MIGRATION))
return errors |
<SYSTEM_TASK:>
A built-in check to connect to Redis using the given client and see
<END_TASK>
<USER_TASK:>
Description:
def check_redis_connected(client):
"""
A built-in check to connect to Redis using the given client and see
if it responds to the ``PING`` command.
It's automatically added to the list of Dockerflow checks if a
:class:`~redis.StrictRedis` instances is passed
to the :class:`~dockerflow.flask.app.Dockerflow` class during
instantiation, e.g.::
import redis
from flask import Flask
from dockerflow.flask import Dockerflow
app = Flask(__name__)
redis_client = redis.StrictRedis(host='localhost', port=6379, db=0)
dockerflow = Dockerflow(app, redis=redis)
An alternative approach to instantiating a Redis client directly
would be using the `Flask-Redis <https://github.com/underyx/flask-redis>`_
Flask extension::
from flask import Flask
from flask_redis import FlaskRedis
from dockerflow.flask import Dockerflow
app = Flask(__name__)
app.config['REDIS_URL'] = 'redis://:password@localhost:6379/0'
redis_store = FlaskRedis(app)
dockerflow = Dockerflow(app, redis=redis_store)
""" |
import redis
errors = []
try:
result = client.ping()
except redis.ConnectionError as e:
msg = 'Could not connect to redis: {!s}'.format(e)
errors.append(Error(msg, id=health.ERROR_CANNOT_CONNECT_REDIS))
except redis.RedisError as e:
errors.append(Error('Redis error: "{!s}"'.format(e),
id=health.ERROR_REDIS_EXCEPTION))
else:
if not result:
errors.append(Error('Redis ping failed',
id=health.ERROR_REDIS_PING_FAILED))
return errors |
<SYSTEM_TASK:>
Creates a new keypair resource policy with the given options.
<END_TASK>
<USER_TASK:>
Description:
async def create(cls, name: str,
default_for_unspecified: int,
total_resource_slots: int,
max_concurrent_sessions: int,
max_containers_per_session: int,
max_vfolder_count: int,
max_vfolder_size: int,
idle_timeout: int,
allowed_vfolder_hosts: Sequence[str],
fields: Iterable[str] = None) -> dict:
"""
Creates a new keypair resource policy with the given options.
You need an admin privilege for this operation.
""" |
if fields is None:
fields = ('name',)
q = 'mutation($name: String!, $input: CreateKeyPairResourcePolicyInput!) {' \
+ \
' create_keypair_resource_policy(name: $name, props: $input) {' \
' ok msg resource_policy { $fields }' \
' }' \
'}'
q = q.replace('$fields', ' '.join(fields))
variables = {
'name': name,
'input': {
'default_for_unspecified': default_for_unspecified,
'total_resource_slots': total_resource_slots,
'max_concurrent_sessions': max_concurrent_sessions,
'max_containers_per_session': max_containers_per_session,
'max_vfolder_count': max_vfolder_count,
'max_vfolder_size': max_vfolder_size,
'idle_timeout': idle_timeout,
'allowed_vfolder_hosts': allowed_vfolder_hosts,
},
}
rqst = Request(cls.session, 'POST', '/admin/graphql')
rqst.set_json({
'query': q,
'variables': variables,
})
async with rqst.fetch() as resp:
data = await resp.json()
return data['create_keypair_resource_policy'] |
<SYSTEM_TASK:>
perform API auth test returning user and team
<END_TASK>
<USER_TASK:>
Description:
def login(self):
""" perform API auth test returning user and team """ |
log.debug('performing auth test')
test = self._get(urls['test'])
user = User({ 'name': test['user'], 'id': test['user_id'] })
self._refresh()
return test['team'], user |
<SYSTEM_TASK:>
Return Channel object for a given Slack ID or name
<END_TASK>
<USER_TASK:>
Description:
def channel(self, match):
""" Return Channel object for a given Slack ID or name """ |
if len(match) == 9 and match[0] in ('C','G','D'):
return self._lookup(Channel, 'id', match)
return self._lookup(Channel, 'name', match) |
<SYSTEM_TASK:>
refresh internal directory cache
<END_TASK>
<USER_TASK:>
Description:
def _refresh(self):
""" refresh internal directory cache """ |
log.debug('refreshing directory cache')
self._users.update(list(self._user_gen()))
self._channels.update(list(self._channel_gen())) |
<SYSTEM_TASK:>
lookup object in directory with attribute matching value
<END_TASK>
<USER_TASK:>
Description:
def match(self, attr, val):
""" lookup object in directory with attribute matching value """ |
self._lock.acquire()
try:
for x in self:
if getattr(x, attr) == val:
return x
finally:
self._lock.release() |
<SYSTEM_TASK:>
A function that lazily evaluates a biggus.Chunk. This is useful for
<END_TASK>
<USER_TASK:>
Description:
def biggus_chunk(chunk_key, biggus_array, masked):
"""
A function that lazily evaluates a biggus.Chunk. This is useful for
passing through as a dask task so that we don't have to compute the
chunk in order to compute the graph.
""" |
if masked:
array = biggus_array.masked_array()
else:
array = biggus_array.ndarray()
return biggus._init.Chunk(chunk_key, array) |
<SYSTEM_TASK:>
Create a lazy chunk creating function with a nice name that is suitable
<END_TASK>
<USER_TASK:>
Description:
def lazy_chunk_creator(name):
"""
Create a lazy chunk creating function with a nice name that is suitable
for representation in a dask graph.
""" |
# TODO: Could this become a LazyChunk class?
def biggus_chunk(chunk_key, biggus_array, masked):
"""
A function that lazily evaluates a biggus.Chunk. This is useful for
passing through as a dask task so that we don't have to compute the
chunk in order to compute the graph.
"""
if masked:
array = biggus_array.masked_array()
else:
array = biggus_array.ndarray()
return biggus._init.Chunk(chunk_key, array)
biggus_chunk.__name__ = name
return biggus_chunk |
<SYSTEM_TASK:>
Recursive function that returns the dask items for the given array.
<END_TASK>
<USER_TASK:>
Description:
def _make_nodes(self, dsk_graph, array, iteration_order, masked,
top=False):
"""
Recursive function that returns the dask items for the given array.
NOTE: Currently assuming that all tasks are a tuple, with the second
item being the keys used to index the source of the respective input
array.
""" |
cache_key = _array_id(array, iteration_order, masked)
# By the end of this function Nodes will be a dictionary with one item
# per chunk to be processed for this array.
nodes = self._node_cache.get(cache_key, None)
if nodes is None:
if hasattr(array, 'streams_handler'):
nodes = self._make_stream_handler_nodes(dsk_graph, array,
iteration_order,
masked)
else:
nodes = {}
chunks = []
name = '{}\n{}'.format(array.__class__.__name__, array.shape)
biggus_chunk_func = self.lazy_chunk_creator(name)
chunk_index_gen = biggus._init.ProducerNode.chunk_index_gen
for chunk_key in chunk_index_gen(array.shape,
iteration_order[::-1]):
biggus_array = array[chunk_key]
pretty_key = ', '.join(map(slice_repr, chunk_key))
chunk_id = ('chunk shape: {}\nsource key: [{}]\n\n{}'
''.format(biggus_array.shape, pretty_key,
uuid.uuid4()))
task = (biggus_chunk_func, chunk_key, biggus_array, masked)
chunks.append(task)
assert chunk_id not in dsk_graph
dsk_graph[chunk_id] = task
nodes[chunk_id] = task
self._node_cache[cache_key] = nodes
return nodes |
<SYSTEM_TASK:>
Waits until the element identified by `locator` has focus.
<END_TASK>
<USER_TASK:>
Description:
def wait_until_element_has_focus(self, locator, timeout=None):
"""Waits until the element identified by `locator` has focus.
You might rather want to use `Element Focus Should Be Set`
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" |
self._info("Waiting for focus on '%s'" % (locator))
self._wait_until_no_error(timeout, self._check_element_focus_exp, True, locator, timeout) |
<SYSTEM_TASK:>
Waits until the element identified by `locator` doesn't have focus.
<END_TASK>
<USER_TASK:>
Description:
def wait_until_element_does_not_have_focus(self, locator, timeout=None):
"""Waits until the element identified by `locator` doesn't have focus.
You might rather want to use `Element Focus Should Not Be Set`
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" |
self._info("Waiting until '%s' does not have focus" % (locator))
self._wait_until_no_error(timeout, self._check_element_focus_exp, False, locator, timeout) |
<SYSTEM_TASK:>
Waits until the element identified by `locator` value is exactly the
<END_TASK>
<USER_TASK:>
Description:
def wait_until_element_value_is(self, locator, expected, strip=False, timeout=None):
"""Waits until the element identified by `locator` value is exactly the
expected value. You might want to use `Element Value Should Be` instead.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected value | My Name Is Slim Shady |
| strip | boolean, determines whether it should strip the value of the field before comparison | ${True} / ${False} |
| timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" |
self._info("Waiting for '%s' value to be '%s'" % (locator, expected))
self._wait_until_no_error(timeout, self._check_element_value_exp, False, locator, expected, strip, timeout) |
<SYSTEM_TASK:>
Waits until the element identified by `locator` contains
<END_TASK>
<USER_TASK:>
Description:
def wait_until_element_value_contains(self, locator, expected, timeout=None):
"""Waits until the element identified by `locator` contains
the expected value. You might want to use `Element Value Should Contain` instead.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected value | Slim Shady |
| timeout | maximum time to wait before the function throws an element not found error (default=None) | 5s |""" |
self._info("Waiting for '%s' value to contain '%s'" % (locator, expected))
self._wait_until_no_error(timeout, self._check_element_value_exp, True, locator, expected, False, timeout) |
<SYSTEM_TASK:>
Sets focus on the element identified by `locator`. Should
<END_TASK>
<USER_TASK:>
Description:
def set_element_focus(self, locator):
"""Sets focus on the element identified by `locator`. Should
be used with elements meant to have focus only, such as
text fields. This keywords also waits for the focus to be
active by calling the `Wait Until Element Has Focus` keyword.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |""" |
self._info("Setting focus on element '%s'" % (locator))
element = self._element_find(locator, True, True)
element.send_keys(Keys.NULL)
self._wait_until_no_error(None, self._check_element_focus, True, locator) |
<SYSTEM_TASK:>
Clears the text field identified by `locator`
<END_TASK>
<USER_TASK:>
Description:
def clear_input_field(self, locator, method=0):
"""Clears the text field identified by `locator`
The element.clear() method doesn't seem to work properly on
all browsers, so this keyword was created to offer alternatives.
The `method` argument defines the method it should use in order
to clear the target field.
0 = Uses the selenium method by doing element.clear \n
1 = Sets focus on the field and presses CTRL + A, and then DELETE \n
2 = Repeatedly presses BACKSPACE until the field is empty
This keyword, when using a method other than '2' does not validate it
successfully cleared the field, you should handle this verification by yourself.
When using the method '2', it presses delete until the field's value is empty.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| method | the clearing method that should be used | no example provided |""" |
element = self._element_find(locator, True, True)
if (int(method) == 0):
self._info("Clearing input on element '%s'" % (locator))
element.clear()
elif (int(method) == 1):
self._info("Clearing input on element '%s' by pressing 'CTRL + A + DELETE'" % (locator))
element.send_keys(Keys.CONTROL + 'a')
element.send_keys(Keys.DELETE)
elif (int(method) == 2):
self._info("Clearing input on element '%s' by repeatedly pressing BACKSPACE" % (locator))
while (len(element.get_attribute('value')) != 0):
element.send_keys(Keys.BACKSPACE)
else: element.clear() |
<SYSTEM_TASK:>
Verifies the element identified by `locator` has the expected
<END_TASK>
<USER_TASK:>
Description:
def element_width_should_be(self, locator, expected):
"""Verifies the element identified by `locator` has the expected
width. Expected width should be in pixels.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected width | 800 |""" |
self._info("Verifying element '%s' width is '%s'" % (locator, expected))
self._check_element_size(locator, 'width', expected) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` has the expected
<END_TASK>
<USER_TASK:>
Description:
def element_height_should_be(self, locator, expected):
"""Verifies the element identified by `locator` has the expected
height. Expected height should be in pixels.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected height | 600 |""" |
self._info("Verifying element '%s' height is '%s'" % (locator, expected))
self._check_element_size(locator, 'height', expected) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` has the expected value.
<END_TASK>
<USER_TASK:>
Description:
def element_value_should_be(self, locator, expected, strip=False):
"""Verifies the element identified by `locator` has the expected value.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected value | My Name Is Slim Shady |
| strip | Boolean, determines whether it should strip the field's value before comparison or not | ${True} / ${False} |""" |
self._info("Verifying element '%s' value is '%s'" % (locator, expected))
element = self._element_find(locator, True, True)
value = element.get_attribute('value')
if (strip):
value = value.strip()
if str(value) == expected:
return
else:
raise AssertionError("Element '%s' value was not '%s', it was '%s'" % (locator, expected, value)) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` is not the specified value.
<END_TASK>
<USER_TASK:>
Description:
def element_value_should_not_be(self, locator, value, strip=False):
"""Verifies the element identified by `locator` is not the specified value.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| value | value it should not be | My Name Is Slim Shady |
| strip | Boolean, determines whether it should strip the field's value before comparison or not | ${True} / ${False} |""" |
self._info("Verifying element '%s' value is not '%s'" % (locator, value))
element = self._element_find(locator, True, True)
elem_value = str(element.get_attribute('value'))
if (strip):
elem_value = elem_value.strip()
if elem_value == value:
raise AssertionError("Value was '%s' for element '%s' while it shouldn't have" % (elem_value, locator)) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` contains the expected value.
<END_TASK>
<USER_TASK:>
Description:
def element_value_should_contain(self, locator, expected):
"""Verifies the element identified by `locator` contains the expected value.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected value | Slim Shady |""" |
self._info("Verifying element '%s' value contains '%s'" % (locator, expected))
element = self._element_find(locator, True, True)
value = str(element.get_attribute('value'))
if expected in value:
return
else:
raise AssertionError("Value '%s' did not appear in element '%s'. It's value was '%s'" % (expected, locator, value)) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` does not contain the specified value.
<END_TASK>
<USER_TASK:>
Description:
def element_value_should_not_contain(self, locator, value):
"""Verifies the element identified by `locator` does not contain the specified value.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| value | value it should not contain | Slim Shady |""" |
self._info("Verifying element '%s' value does not contain '%s'" % (locator, value))
element = self._element_find(locator, True, True)
elem_value = str(element.get_attribute('value'))
if value in elem_value:
raise AssertionError("Value '%s' was found in element '%s' while it shouldn't have" % (value, locator)) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` has focus.
<END_TASK>
<USER_TASK:>
Description:
def element_focus_should_be_set(self, locator):
"""Verifies the element identified by `locator` has focus.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |""" |
self._info("Verifying element '%s' focus is set" % locator)
self._check_element_focus(True, locator) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` does not have focus.
<END_TASK>
<USER_TASK:>
Description:
def element_focus_should_not_be_set(self, locator):
"""Verifies the element identified by `locator` does not have focus.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |""" |
self._info("Verifying element '%s' focus is not set" % locator)
self._check_element_focus(False, locator) |
<SYSTEM_TASK:>
Verifies the element identified by `locator` has the expected
<END_TASK>
<USER_TASK:>
Description:
def element_css_attribute_should_be(self, locator, prop, expected):
"""Verifies the element identified by `locator` has the expected
value for the targeted `prop`.
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| prop | targeted css attribute | background-color |
| expected | expected value | rgba(0, 128, 0, 1) |""" |
self._info("Verifying element '%s' has css attribute '%s' with a value of '%s'" % (locator, prop, expected))
self._check_element_css_value(locator, prop, expected) |
<SYSTEM_TASK:>
Waits until all of the specified elements are not found on the page.
<END_TASK>
<USER_TASK:>
Description:
def wait_until_page_does_not_contain_these_elements(self, timeout, *locators):
"""Waits until all of the specified elements are not found on the page.
| *Argument* | *Description* | *Example* |
| timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s |
| *locators | Selenium 2 element locator(s) | id=MyId |""" |
self._wait_until_no_error(timeout, self._wait_for_elements_to_go_away, locators) |
<SYSTEM_TASK:>
Clicks the element specified by `locator` until the operation succeeds. This should be
<END_TASK>
<USER_TASK:>
Description:
def wait_until_element_is_clickable(self, locator, timeout=None):
"""Clicks the element specified by `locator` until the operation succeeds. This should be
used with buttons that are generated in real-time and that don't have their click handling available
immediately. This keyword avoids unclickable element exceptions.
| =Argument= | =Description= | =Example= |
| locator | Selenium 2 element locator(s) | id=MyId |
| timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s |""" |
self._wait_until_no_error(timeout, self._wait_for_click_to_succeed, locator) |
<SYSTEM_TASK:>
Decorator that creates a visitor method.
<END_TASK>
<USER_TASK:>
Description:
def visitor(arg_type):
"""Decorator that creates a visitor method.""" |
def decorator(fn):
declaring_class = _declaring_class(fn)
_methods[(declaring_class, arg_type)] = fn
# Replace all decorated methods with _visitor_impl
return _visitor_impl
return decorator |
<SYSTEM_TASK:>
Returns a dictionary of callable methods of object `obj`.
<END_TASK>
<USER_TASK:>
Description:
def get_callable_method_dict(obj):
"""Returns a dictionary of callable methods of object `obj`.
@param obj: ZOS API Python COM object
@return: a dictionary of callable methods
Notes:
the function only returns the callable attributes that are listed by dir()
function. Properties are not returned.
""" |
methodDict = {}
for methodStr in dir(obj):
method = getattr(obj, methodStr, 'none')
if callable(method) and not methodStr.startswith('_'):
methodDict[methodStr] = method
return methodDict |
<SYSTEM_TASK:>
Returns a lists of properties bound to the object `zos_obj`
<END_TASK>
<USER_TASK:>
Description:
def get_properties(zos_obj):
"""Returns a lists of properties bound to the object `zos_obj`
@param zos_obj: ZOS API Python COM object
@return prop_get: list of properties that are only getters
@return prop_set: list of properties that are both getters and setters
""" |
prop_get = set(zos_obj._prop_map_get_.keys())
prop_set = set(zos_obj._prop_map_put_.keys())
if prop_set.issubset(prop_get):
prop_get = prop_get.difference(prop_set)
else:
msg = 'Assumption all getters are also setters is incorrect!'
raise NotImplementedError(msg)
return list(prop_get), list(prop_set) |
<SYSTEM_TASK:>
Helper function to wrap ZOS API COM objects.
<END_TASK>
<USER_TASK:>
Description:
def wrapped_zos_object(zos_obj):
"""Helper function to wrap ZOS API COM objects.
@param zos_obj : ZOS API Python COM object
@return: instance of the wrapped ZOS API class. If the input object is not a ZOS-API
COM object or if it is already wrapped, then the object is returned without
wrapping.
Notes:
The function dynamically creates a wrapped class with all the provided methods,
properties, and custom methods monkey patched; and returns an instance of it.
""" |
if hasattr(zos_obj, '_wrapped') or ('CLSID' not in dir(zos_obj)):
return zos_obj
else:
Class = managed_wrapper_class_factory(zos_obj)
return Class(zos_obj) |
<SYSTEM_TASK:>
Clean any processing data, and prepare object for reuse
<END_TASK>
<USER_TASK:>
Description:
def reset(self):
"""Clean any processing data, and prepare object for reuse
""" |
self.current_table = None
self.tables = []
self.data = [{}]
self.additional_data = {}
self.lines = []
self.set_state('document')
self.current_file = None
self.set_of_energies = set() |
<SYSTEM_TASK:>
Set current parsing state to 'table',
<END_TASK>
<USER_TASK:>
Description:
def _set_table(self, data):
"""Set current parsing state to 'table',
create new table object and add it to tables collection
""" |
self.set_state('table')
self.current_table = HEPTable(index=len(self.tables) + 1)
self.tables.append(self.current_table)
self.data.append(self.current_table.metadata) |
<SYSTEM_TASK:>
Transform a square matrix into a format with two independent variables and one dependent variable.
<END_TASK>
<USER_TASK:>
Description:
def _reformat_matrix(self):
"""Transform a square matrix into a format with two independent variables and one dependent variable.
""" |
nxax = len(self.current_table.data['independent_variables'])
nyax = len(self.current_table.data['dependent_variables'])
npts = len(self.current_table.data['dependent_variables'][0]['values'])
# check if 1 x-axis, and npts (>=2) equals number of y-axes
if nxax != 1 or nyax != npts or npts < 2:
return False
# add second independent variable with each value duplicated npts times
if len(self.current_table.xheaders) == 2:
xheader = self.current_table.xheaders[1]
else:
xheader = copy.deepcopy(self.current_table.data['independent_variables'][0]['header'])
self.current_table.data['independent_variables'].append({'header': xheader, 'values': []})
for value in self.current_table.data['independent_variables'][0]['values']:
self.current_table.data['independent_variables'][1]['values'].extend([copy.deepcopy(value) for npt in range(npts)])
# duplicate values of first independent variable npts times
self.current_table.data['independent_variables'][0]['values'] \
= [copy.deepcopy(value) for npt in range(npts) for value in self.current_table.data['independent_variables'][0]['values']]
# suppress header if different for second y-axis
if self.current_table.data['dependent_variables'][0]['header'] != \
self.current_table.data['dependent_variables'][1]['header']:
self.current_table.data['dependent_variables'][0]['header'] = {'name': ''}
# remove qualifier if different for second y-axis
iqdel = [] # list of qualifier indices to be deleted
for iq, qualifier in enumerate(self.current_table.data['dependent_variables'][0]['qualifiers']):
if qualifier != self.current_table.data['dependent_variables'][1]['qualifiers'][iq]:
iqdel.append(iq)
for iq in iqdel[::-1]: # need to delete in reverse order
del self.current_table.data['dependent_variables'][0]['qualifiers'][iq]
# append values of second and subsequent y-axes to first dependent variable
for iy in range(1, nyax):
for value in self.current_table.data['dependent_variables'][iy]['values']:
self.current_table.data['dependent_variables'][0]['values'].append(value)
# finally, delete the second and subsequent y-axes in reverse order
for iy in range(nyax-1, 0, -1):
del self.current_table.data['dependent_variables'][iy]
return True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.