code
stringlengths 52
7.75k
| docs
stringlengths 1
5.85k
|
---|---|
def stream_info(self, stream_id):
''' get stream info '''
response, status_code = self.__pod__.Streams.get_v2_room_id_info(
sessionToken=self.__session__,
id=stream_id
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf stream_info(self, stream_id):
''' get stream info '''
response, status_code = self.__pod__.Streams.get_v2_room_id_info(
sessionToken=self.__session__,
id=stream_id
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | get stream info |
def create_stream(self, uidList=[]):
''' create a stream '''
req_hook = 'pod/v1/im/create'
req_args = json.dumps(uidList)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf create_stream(self, uidList=[]):
''' create a stream '''
req_hook = 'pod/v1/im/create'
req_args = json.dumps(uidList)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | create a stream |
def update_room(self, stream_id, room_definition):
''' update a room definition '''
req_hook = 'pod/v2/room/' + str(stream_id) + '/update'
req_args = json.dumps(room_definition)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf update_room(self, stream_id, room_definition):
''' update a room definition '''
req_hook = 'pod/v2/room/' + str(stream_id) + '/update'
req_args = json.dumps(room_definition)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | update a room definition |
def activate_stream(self, stream_id, status):
''' de/reactivate a stream '''
req_hook = 'pod/v1/room/' + str(stream_id) + '/setActive?active=' + self.__rest__.bool2str(status)
req_args = None
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf activate_stream(self, stream_id, status):
''' de/reactivate a stream '''
req_hook = 'pod/v1/room/' + str(stream_id) + '/setActive?active=' + self.__rest__.bool2str(status)
req_args = None
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | de/reactivate a stream |
def room_members(self, stream_id):
''' get list of room members '''
req_hook = 'pod/v2/room/' + str(stream_id) + '/membership/list'
req_args = None
status_code, response = self.__rest__.GET_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf room_members(self, stream_id):
''' get list of room members '''
req_hook = 'pod/v2/room/' + str(stream_id) + '/membership/list'
req_args = None
status_code, response = self.__rest__.GET_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | get list of room members |
def promote_owner(self, stream_id, user_id):
''' promote user to owner in stream '''
req_hook = 'pod/v1/room/' + stream_id + '/membership/promoteOwner'
req_args = '{ "id": %s }' % user_id
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf promote_owner(self, stream_id, user_id):
''' promote user to owner in stream '''
req_hook = 'pod/v1/room/' + stream_id + '/membership/promoteOwner'
req_args = '{ "id": %s }' % user_id
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | promote user to owner in stream |
def search_rooms(self, query, labels=None, active=True, creator=None, skip=0, limit=25):
''' search rooms '''
req_hook = 'pod/v2/room/search?skip=' + str(skip) + '&limit=' + str(limit)
json_query = {
"query": query,
"labels": labels,
"active": active,
"creator": creator
}
req_args = json.dumps(json_query)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf search_rooms(self, query, labels=None, active=True, creator=None, skip=0, limit=25):
''' search rooms '''
req_hook = 'pod/v2/room/search?skip=' + str(skip) + '&limit=' + str(limit)
json_query = {
"query": query,
"labels": labels,
"active": active,
"creator": creator
}
req_args = json.dumps(json_query)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | search rooms |
def list_streams(self, types=[], inactive=False):
''' list user streams '''
req_hook = 'pod/v1/streams/list'
json_query = {
"streamTypes": types,
"includeInactiveStreams": inactive
}
req_args = json.dumps(json_query)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf list_streams(self, types=[], inactive=False):
''' list user streams '''
req_hook = 'pod/v1/streams/list'
json_query = {
"streamTypes": types,
"includeInactiveStreams": inactive
}
req_args = json.dumps(json_query)
status_code, response = self.__rest__.POST_query(req_hook, req_args)
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | list user streams |
def get_unverified_claims(token):
try:
claims = jws.get_unverified_claims(token)
except:
raise JWTError('Error decoding token claims.')
try:
claims = json.loads(claims.decode('utf-8'))
except ValueError as e:
raise JWTError('Invalid claims string: %s' % e)
if not isinstance(claims, Mapping):
raise JWTError('Invalid claims string: must be a json object')
return claims | Returns the decoded claims without verification of any kind.
Args:
token (str): A signed JWT to decode the headers from.
Returns:
dict: The dict representation of the token claims.
Raises:
JWTError: If there is an exception decoding the token. |
def _validate_at_hash(claims, access_token, algorithm):
if 'at_hash' not in claims and not access_token:
return
elif 'at_hash' in claims and not access_token:
msg = 'No access_token provided to compare against at_hash claim.'
raise JWTClaimsError(msg)
elif access_token and 'at_hash' not in claims:
msg = 'at_hash claim missing from token.'
raise JWTClaimsError(msg)
try:
expected_hash = calculate_at_hash(access_token,
ALGORITHMS.HASHES[algorithm])
except (TypeError, ValueError):
msg = 'Unable to calculate at_hash to verify against token claims.'
raise JWTClaimsError(msg)
if claims['at_hash'] != expected_hash:
raise JWTClaimsError('at_hash claim does not match access_token.') | Validates that the 'at_hash' parameter included in the claims matches
with the access_token returned alongside the id token as part of
the authorization_code flow.
Args:
claims (dict): The claims dictionary to validate.
access_token (str): The access token returned by the OpenID Provider.
algorithm (str): The algorithm used to sign the JWT, as specified by
the token headers. |
def get_session_token(self):
''' get session token '''
# HTTP POST query to session authenticate API
try:
response = requests.post(self.__session_url__ + 'sessionauth/v1/authenticate',
cert=(self.__crt__, self.__key__), verify=True)
except requests.exceptions.RequestException as err:
self.logger.error(err)
raise
if response.status_code == 200:
# load json response as list
data = json.loads(response.text)
self.logger.debug(data)
# grab token from list
session_token = data['token']
else:
raise Exception('BAD HTTP STATUS: %s' % str(response.status_code))
# return the token
self.logger.debug(session_token)
return session_tokef get_session_token(self):
''' get session token '''
# HTTP POST query to session authenticate API
try:
response = requests.post(self.__session_url__ + 'sessionauth/v1/authenticate',
cert=(self.__crt__, self.__key__), verify=True)
except requests.exceptions.RequestException as err:
self.logger.error(err)
raise
if response.status_code == 200:
# load json response as list
data = json.loads(response.text)
self.logger.debug(data)
# grab token from list
session_token = data['token']
else:
raise Exception('BAD HTTP STATUS: %s' % str(response.status_code))
# return the token
self.logger.debug(session_token)
return session_token | get session token |
def main():
''' main program loop '''
conn = symphony.Config('example-bot.cfg')
# connect to pod
agent, pod, symphony_sid = conn.connect()
agent.test_echo('test')
# main loop
msgFormat = 'MESSAGEML'
message = '<messageML> hello world. </messageML>'
# send message
agent.send_message(symphony_sid, msgFormat, messagef main():
''' main program loop '''
conn = symphony.Config('example-bot.cfg')
# connect to pod
agent, pod, symphony_sid = conn.connect()
agent.test_echo('test')
# main loop
msgFormat = 'MESSAGEML'
message = '<messageML> hello world. </messageML>'
# send message
agent.send_message(symphony_sid, msgFormat, message) | main program loop |
def create_datafeed(self):
''' create datafeed '''
response, status_code = self.__agent__.Datafeed.post_v4_datafeed_create(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__
).result()
# return the token
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response['id'f create_datafeed(self):
''' create datafeed '''
response, status_code = self.__agent__.Datafeed.post_v4_datafeed_create(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__
).result()
# return the token
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response['id'] | create datafeed |
def read_datafeed(self, datafeed_id):
''' get datafeed '''
response, status_code = self.__agent__.Datafeed.get_v4_datafeed_id_read(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__,
id=datafeed_id
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf read_datafeed(self, datafeed_id):
''' get datafeed '''
response, status_code = self.__agent__.Datafeed.get_v4_datafeed_id_read(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__,
id=datafeed_id
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | get datafeed |
def send_message(self, threadid, msgFormat, message):
''' send message to threadid/stream '''
# using deprecated v3 message create because of bug in codegen of v4 ( multipart/form-data )
response, status_code = self.__agent__.Messages.post_v3_stream_sid_message_create(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__,
sid=threadid,
message={"format": msgFormat,
"message": message}
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf send_message(self, threadid, msgFormat, message):
''' send message to threadid/stream '''
# using deprecated v3 message create because of bug in codegen of v4 ( multipart/form-data )
response, status_code = self.__agent__.Messages.post_v3_stream_sid_message_create(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__,
sid=threadid,
message={"format": msgFormat,
"message": message}
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | send message to threadid/stream |
def read_stream(self, stream_id, since_epoch):
''' get datafeed '''
response, status_code = self.__agent__.Messages.get_v4_stream_sid_message(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__,
sid=stream_id,
since=since_epoch
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, responsf read_stream(self, stream_id, since_epoch):
''' get datafeed '''
response, status_code = self.__agent__.Messages.get_v4_stream_sid_message(
sessionToken=self.__session__,
keyManagerToken=self.__keymngr__,
sid=stream_id,
since=since_epoch
).result()
self.logger.debug('%s: %s' % (status_code, response))
return status_code, response | get datafeed |
def _NTU_max_for_P_solver(data, R1):
'''Private function to calculate the upper bound on the NTU1 value in the
P-NTU method. This value is calculated via a pade approximation obtained
on the result of a global minimizer which calculated the maximum P1
at a given R1 from ~1E-7 to approximately 100. This should suffice for
engineering applications. This value is needed to bound the solver.
'''
offset_max = data['offset'][-1]
for offset, p, q in zip(data['offset'], data['p'], data['q']):
if R1 < offset or offset == offset_max:
x = R1 - offset
return _horner(p, x)/_horner(q, xf _NTU_max_for_P_solver(data, R1):
'''Private function to calculate the upper bound on the NTU1 value in the
P-NTU method. This value is calculated via a pade approximation obtained
on the result of a global minimizer which calculated the maximum P1
at a given R1 from ~1E-7 to approximately 100. This should suffice for
engineering applications. This value is needed to bound the solver.
'''
offset_max = data['offset'][-1]
for offset, p, q in zip(data['offset'], data['p'], data['q']):
if R1 < offset or offset == offset_max:
x = R1 - offset
return _horner(p, x)/_horner(q, x) | Private function to calculate the upper bound on the NTU1 value in the
P-NTU method. This value is calculated via a pade approximation obtained
on the result of a global minimizer which calculated the maximum P1
at a given R1 from ~1E-7 to approximately 100. This should suffice for
engineering applications. This value is needed to bound the solver. |
def check_tubing_TEMA(NPS=None, BWG=None):
'''
>>> check_tubing_TEMA(2, 22)
False
>>> check_tubing_TEMA(0.375, 22)
True
'''
if NPS in TEMA_tubing:
if BWG in TEMA_tubing[NPS]:
return True
return Falsf check_tubing_TEMA(NPS=None, BWG=None):
'''
>>> check_tubing_TEMA(2, 22)
False
>>> check_tubing_TEMA(0.375, 22)
True
'''
if NPS in TEMA_tubing:
if BWG in TEMA_tubing[NPS]:
return True
return False | >>> check_tubing_TEMA(2, 22)
False
>>> check_tubing_TEMA(0.375, 22)
True |
def k2g(kml_path, output_dir, separate_folders, style_type,
style_filename):
m.convert(kml_path, output_dir, separate_folders, style_type, style_filename) | Given a path to a KML file, convert it to a a GeoJSON FeatureCollection file and save it to the given output directory.
If ``--separate_folders``, then create several GeoJSON files, one for each folder in the KML file that contains geodata or that has a descendant node that contains geodata.
Warning: this can produce GeoJSON files with the same geodata in case the KML file has nested folders with geodata.
If ``--style_type`` is specified, then also build a JSON style file of the given style type and save it to the output directory under the file name given by ``--style_filename``. |
def rm_paths(*paths):
for p in paths:
p = Path(p)
if p.exists():
if p.is_file():
p.unlink()
else:
shutil.rmtree(str(p)) | Delete the given file paths/directory paths, if they exists. |
def gx_coords(node):
els = get(node, 'gx:coord')
coordinates = []
times = []
coordinates = [gx_coords1(val(el)) for el in els]
time_els = get(node, 'when')
times = [val(t) for t in time_els]
return {
'coordinates': coordinates,
'times': times,
} | Given a KML DOM node, grab its <gx:coord> and <gx:timestamp><when>subnodes, and convert them into a dictionary with the keys and values
- ``'coordinates'``: list of lists of float coordinates
- ``'times'``: list of timestamps corresponding to the coordinates |
def disambiguate(names, mark='1'):
names_seen = set()
new_names = []
for name in names:
new_name = name
while new_name in names_seen:
new_name += mark
new_names.append(new_name)
names_seen.add(new_name)
return new_names | Given a list of strings ``names``, return a new list of names where repeated names have been disambiguated by repeatedly appending the given mark.
EXAMPLE::
>>> disambiguate(['sing', 'song', 'sing', 'sing'])
['sing', 'song', 'sing1', 'sing11'] |
def to_filename(s):
s = re.sub(r'(?u)[^-\w. ]', '', s)
s = s.strip().replace(' ', '_')
return s | Based on `django/utils/text.py <https://github.com/django/django/blob/master/django/utils/text.py>`_.
Return the given string converted to a string that can be used for a clean filename.
Specifically, leading and trailing spaces are removed; other spaces are converted to underscores, and anything that is not a unicode alphanumeric, dash, underscore, or dot, is removed.
EXAMPLE::
>>> to_filename("% A d\sbla'{-+\)(ç? ")
'A_dsbla-ç' |
def build_rgb_and_opacity(s):
# Set defaults
color = '000000'
opacity = 1
if s.startswith('#'):
s = s[1:]
if len(s) == 8:
color = s[6:8] + s[4:6] + s[2:4]
opacity = round(int(s[0:2], 16)/256, 2)
elif len(s) == 6:
color = s[4:6] + s[2:4] + s[0:2]
elif len(s) == 3:
color = s[::-1]
return '#' + color, opacity | Given a KML color string, return an equivalent RGB hex color string and an opacity float rounded to 2 decimal places.
EXAMPLE::
>>> build_rgb_and_opacity('ee001122')
('#221100', 0.93) |
def build_geometry(node):
geoms = []
times = []
if get1(node, 'MultiGeometry'):
return build_geometry(get1(node, 'MultiGeometry'))
if get1(node, 'MultiTrack'):
return build_geometry(get1(node, 'MultiTrack'))
if get1(node, 'gx:MultiTrack'):
return build_geometry(get1(node, 'gx:MultiTrack'))
for geotype in GEOTYPES:
geonodes = get(node, geotype)
if not geonodes:
continue
for geonode in geonodes:
if geotype == 'Point':
geoms.append({
'type': 'Point',
'coordinates': coords1(val(get1(
geonode, 'coordinates')))
})
elif geotype == 'LineString':
geoms.append({
'type': 'LineString',
'coordinates': coords(val(get1(
geonode, 'coordinates')))
})
elif geotype == 'Polygon':
rings = get(geonode, 'LinearRing')
coordinates = [coords(val(get1(ring, 'coordinates')))
for ring in rings]
geoms.append({
'type': 'Polygon',
'coordinates': coordinates,
})
elif geotype in ['Track', 'gx:Track']:
track = gx_coords(geonode)
geoms.append({
'type': 'LineString',
'coordinates': track['coordinates'],
})
if track['times']:
times.append(track['times'])
return {'geoms': geoms, 'times': times} | Return a (decoded) GeoJSON geometry dictionary corresponding to the given KML node. |
def build_feature_collection(node, name=None):
# Initialize
geojson = {
'type': 'FeatureCollection',
'features': [],
}
# Build features
for placemark in get(node, 'Placemark'):
feature = build_feature(placemark)
if feature is not None:
geojson['features'].append(feature)
# Give the collection a name if requested
if name is not None:
geojson['name'] = name
return geojson | Build and return a (decoded) GeoJSON FeatureCollection corresponding to this KML DOM node (typically a KML Folder).
If a name is given, store it in the FeatureCollection's ``'name'`` attribute. |
def build_layers(node, disambiguate_names=True):
layers = []
names = []
for i, folder in enumerate(get(node, 'Folder')):
name = val(get1(folder, 'name'))
geojson = build_feature_collection(folder, name)
if geojson['features']:
layers.append(geojson)
names.append(name)
if not layers:
# No folders, so use the root node
name = val(get1(node, 'name'))
geojson = build_feature_collection(node, name)
if geojson['features']:
layers.append(geojson)
names.append(name)
if disambiguate_names:
new_names = disambiguate(names)
new_layers = []
for i, layer in enumerate(layers):
layer['name'] = new_names[i]
new_layers.append(layer)
layers = new_layers
return layers | Return a list of GeoJSON FeatureCollections, one for each folder in the given KML DOM node that contains geodata.
Name each FeatureCollection (via a ``'name'`` attribute) according to its corresponding KML folder name.
If ``disambiguate_names == True``, then disambiguate repeated layer names via :func:`disambiguate`.
Warning: this can produce layers with the same geodata in case the KML node has nested folders with geodata. |
def _get_all_permissions(opts, tools):
perms = []
for tool in tools:
perms.append((_get_permission_codename(tool, opts), 'Can %s %s' % \
(tool.name, opts.verbose_name_plural)))
return perms | Returns (codename, name) for all tools. |
def _create_permissions(**kwargs):
from django.contrib.contenttypes.models import ContentType
object_tools.autodiscover()
tools = object_tools.tools._registry
# This will hold the permissions we're looking for as
# (content_type, (codename, name))
searched_perms = list()
# The codenames and ctypes that should exist.
ctypes = set()
for model, tools in tools.items():
ctype = ContentType.objects.get_for_model(model)
ctypes.add(ctype)
for perm in _get_all_permissions(model._meta, tools):
searched_perms.append((ctype, perm))
# Find all the Permissions that have a context_type for a model we're
# looking for. We don't need to check for codenames since we already have
# a list of the ones we're going to create.
all_perms = set(auth_app.Permission.objects.filter(
content_type__in=ctypes,
).values_list(
"content_type", "codename"
))
for ctype, (codename, name) in searched_perms:
# If the permissions exists, move on.
if (ctype.pk, codename) in all_perms:
continue
p = auth_app.Permission.objects.create(
codename=codename,
name=name,
content_type=ctype
)
if kwargs.get("verbosity", 2) >= 2:
print("Adding permission '%s'" % p) | Almost exactly the same as django.contrib.auth.management.__init__.py |
def get_random_user(self):
c = self.db.cursor()
c.execute('''SELECT username, password, fullname FROM users
WHERE rowid >= (abs(random()) % (SELECT max(rowid) FROM users))
LIMIT 1''')
r = c.fetchone()
return {"username": r[0], "password": r[1], "fullname": r[2]} | Gets a random user from the provider
:returns: Dictionary |
def is_element_available(self, locator):
if self.driver.is_element_present(locator):
if self.driver.is_visible(locator):
return True
else:
return False
else:
return False | Synchronization method for making sure the element we're looking for is not only on the page,
but also visible -- since Se will happily deal with things that aren't visible.
Use this instead of is_element_present most of the time. |
def wait_for_available(self, locator):
for i in range(timeout_seconds):
try:
if self.is_element_available(locator):
break
except:
pass
time.sleep(1)
else:
raise ElementVisiblityTimeout("%s availability timed out" % locator)
return True | Synchronization to deal with elements that are present, and are visible
:raises: ElementVisiblityTimeout |
def wait_for_visible(self, locator):
for i in range(timeout_seconds):
try:
if self.driver.is_visible(locator):
break
except:
pass
time.sleep(1)
else:
raise ElementVisiblityTimeout("%s visibility timed out" % locator)
return True | Synchronization to deal with elements that are present, but are disabled until some action
triggers their visibility.
:raises: ElementVisiblityTimeout |
def wait_for_text(self, locator, text):
for i in range(timeout_seconds):
try:
e = self.driver.find_element_by_locator(locator)
if e.text == text:
break
except:
pass
time.sleep(1)
else:
raise ElementTextTimeout("%s value timed out" % locator)
return True | Synchronization on some text being displayed in a particular element.
:raises: ElementVisiblityTimeout |
def wait_for_element_not_present(self, locator):
for i in range(timeout_seconds):
if self.driver.is_element_present(locator):
time.sleep(1)
else:
break
else:
raise ElementVisiblityTimeout("%s presence timed out" % locator)
return True | Synchronization helper to wait until some element is removed from the page
:raises: ElementVisiblityTimeout |
def validate(tool_class, model_class):
if not hasattr(tool_class, 'name'):
raise ImproperlyConfigured("No 'name' attribute found for tool %s." % (
tool_class.__name__
))
if not hasattr(tool_class, 'label'):
raise ImproperlyConfigured("No 'label' attribute found for tool %s." % (
tool_class.__name__
))
if not hasattr(tool_class, 'view'):
raise NotImplementedError("No 'view' method found for tool %s." % (
tool_class.__name__
)) | Does basic ObjectTool option validation. |
def register(self, object_tool_class, model_class=None):
if not object_tool_class:
return None
# Don't validate unless required.
if object_tool_class and settings.DEBUG:
from object_tools.validation import validate
validate(object_tool_class, model_class)
# = lambda model, adminclass: None
if not model_class:
models = get_models()
else:
models = [model_class, ]
for model in models:
if model._meta.abstract:
raise ImproperlyConfigured(
'The model %s is abstract, so it \
cannot be registered with object tools.' % model.__name__)
# Instantiate the object_tools class to save in the registry
if model in self._registry:
self._registry[model].append(object_tool_class(model))
else:
self._registry[model] = [object_tool_class(model), ] | Registers the given model(s) with the given object tool class.
The model(s) should be Model classes, not instances.
If a model class isn't given the object tool class will be registered
for all models.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured. |
def random_string(random_length=None):
choices = string.letters + ' '
text = []
if not random_length:
random_length = random.randint(1, 30)
for x in range(random_length):
text.append(random.choice(choices))
return "".join(text) | A generator for creating random string data of letters plus ' ' (whitespace)
:params random_length: how many characters of random string data. if not provided, will be between 1 - 30
:returns: String |
def construct_form(self, request):
if not hasattr(self, 'form_class'):
return None
if request.method == 'POST':
form = self.form_class(self.model, request.POST, request.FILES)
else:
form = self.form_class(self.model)
return form | Constructs form from POST method using self.form_class. |
def has_permission(self, user):
return user.has_perm(
self.model._meta.app_label + '.' + self.get_permission()
) | Returns True if the given request has permission to use the tool.
Can be overriden by the user in subclasses. |
def media(self, form):
js = ['admin/js/core.js', 'admin/js/admin/RelatedObjectLookups.js',
'admin/js/jquery.min.js', 'admin/js/jquery.init.js']
media = forms.Media(
js=['%s%s' % (settings.STATIC_URL, u) for u in js],
)
if form:
for name, field in form.fields.items():
media = media + field.widget.media
return media | Collects admin and form media. |
def _urls(self):
info = (
self.model._meta.app_label, self.model._meta.model_name,
self.name,
)
urlpatterns = [
url(r'^%s/$' % self.name, self._view, name='%s_%s_%s' % info)
]
return urlpatterns | URL patterns for tool linked to _view method. |
def construct_context(self, request):
opts = self.model._meta
app_label = opts.app_label
object_name = opts.object_name.lower()
form = self.construct_form(request)
media = self.media(form)
context = {
'user': request.user,
'title': '%s %s' % (self.label, opts.verbose_name_plural.lower()),
'tool': self,
'opts': opts,
'app_label': app_label,
'media': media,
'form': form,
'changelist_url': reverse('admin:%s_%s_changelist' % (
app_label, object_name
))
}
# Pass along fieldset if sepcififed.
if hasattr(form, 'fieldsets'):
admin_form = helpers.AdminForm(form, form.fieldsets, {})
context['adminform'] = admin_form
return context | Builds context with various required variables. |
def _view(self, request, extra_context=None):
if not self.has_permission(request.user):
raise PermissionDenied
return self.view(request, self.construct_context(request)) | View wrapper taking care of houskeeping for painless form rendering. |
def randomRow(self):
l = []
for row in self.data:
l.append(row)
return random.choice(l) | Gets a random row from the provider
:returns: List |
def get_random_user(self):
from provider.models import User
u = User.objects.order_by('?')[0]
return {"username": u.username, "password": u.password, "fullname": u.fullname} | Gets a random user from the provider
:returns: Dictionary |
def verify_equal(self, first, second, msg=""):
try:
self.assert_equal(first, second, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for equality
:params want: the value to compare against
:params second: the value to compare with
:params msg: (Optional) msg explaining the difference |
def verify_not_equal(self, first, second, msg=""):
try:
self.assert_not_equal(first, second, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for inequality
:params want: the value to compare against
:params second: the value to compare with
:params msg: (Optional) msg explaining the difference |
def verify_true(self, expr, msg=None):
try:
self.assert_true(expr, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the condition is true
:params expr: the statement to evaluate
:params msg: (Optional) msg explaining the difference |
def verify_false(self, expr, msg=None):
try:
self.assert_false(expr, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the condition is false
:params expr: the statement to evaluate
:params msg: (Optional) msg explaining the difference |
def verify_is(self, first, second, msg=None):
try:
self.assert_is(first, second, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the parameters evaluate to the same object
:params want: the object to compare against
:params second: the object to compare with
:params msg: (Optional) msg explaining the difference |
def verify_is_not(self, first, second, msg=None):
try:
self.assert_is_not(first, second, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the parameters do not evaluate to the same object
:params want: the object to compare against
:params second: the object to compare with
:params msg: (Optional) msg explaining the difference |
def verify_is_none(self, expr, msg=None):
try:
self.assert_is_none(expr, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the expr is None
:params want: the object to compare against
:params second: the object to compare with
:params msg: (Optional) msg explaining the difference |
def verify_is_not_none(self, expr, msg=None):
try:
self.assert_is_not_none(expr, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the expr is not None
:params want: the object to compare against
:params second: the object to compare with
:params msg: (Optional) msg explaining the difference |
def verify_in(self, first, second, msg=""):
try:
self.assert_in(first, second, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the first is in second
:params first: the value to check
:params second: the container to check in
:params msg: (Optional) msg explaining the difference |
def verify_not_in(self, first, second, msg=""):
try:
self.assert_not_in(first, second, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the first is not in second
:params first: the value to check
:params second: the container to check in
:params msg: (Optional) msg explaining the difference |
def verify_is_instance(self, obj, cls, msg=""):
try:
self.assert_is_instance(obj, cls, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the is an instance of cls
:params obj: the object instance
:params cls: the class to compare against
:params msg: (Optional) msg explaining the difference |
def verify_is_not_instance(self, obj, cls, msg=""):
try:
self.assert_is_not_instance(obj, cls, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the is not an instance of cls
:params obj: the object instance
:params cls: the class to compare against
:params msg: (Optional) msg explaining the difference |
def assert_text_present(self, text, msg=None):
e = driver.find_element_by_tag_name('body')
assert text in e.text | Hard assert for whether the text if visible in the current window/frame
:params text: the string to search for
:params msg: (Optional) msg explaining the difference |
def verify_text_present(self, text, msg=None):
try:
self.assert_text_present(text, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether the text if visible in the current window/frame
:params text: the string to search for
:params msg: (Optional) msg explaining the difference |
def asset_element_present(self, locator, msg=None):
e = driver.find_elements_by_locator(locator)
if len(e) == 0:
raise AssertionError("Element at %s was not found" % locator) | Hard assert for whether and element is present in the current window/frame
:params locator: the locator of the element to search for
:params msg: (Optional) msg explaining the difference |
def verify_element_present(self, locator, msg=None):
try:
self.asset_element_present(locator, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether and element is present in the current window/frame
:params locator: the locator of the element to search for
:params msg: (Optional) msg explaining the difference |
def assert_visible(self, locator, msg=None):
e = driver.find_elements_by_locator(locator)
if len(e) == 0:
raise AssertionError("Element at %s was not found" % locator)
assert e.is_displayed() | Hard assert for whether and element is present and visible in the current window/frame
:params locator: the locator of the element to search for
:params msg: (Optional) msg explaining the difference |
def verify_visible(self, locator, msg=None):
try:
self.assert_visible(locator, msg)
except AssertionError, e:
if msg:
m = "%s:\n%s" % (msg, str(e))
else:
m = str(e)
self.verification_erorrs.append(m) | Soft assert for whether and element is present and visible in the current window/frame
:params locator: the locator of the element to search for
:params msg: (Optional) msg explaining the difference |
def obj(self):
if self._wrapped is not self.Null:
return self._wrapped
else:
return self.object | Returns passed object but if chain method is used
returns the last processed result |
def _wrap(self, ret):
if self.chained:
self._wrapped = ret
return self
else:
return ret | Returns result but ig chain method is used
returns the object itself so we can chain |
def _toOriginal(self, val):
if self._clean.isTuple():
return tuple(val)
elif self._clean.isList():
return list(val)
elif self._clean.isDict():
return dict(val)
else:
return val | Pitty attempt to convert itertools result into a real object |
def each(self, func):
if self._clean.isTuple() or self._clean.isList():
for index, value in enumerate(self.obj):
r = func(value, index, self.obj)
if r is "breaker":
break
else:
for index, key in enumerate(self.obj):
r = func(self.obj[key], key, self.obj, index)
if r is "breaker":
break
return self._wrap(self) | iterates through each item of an object
:Param: func iterator function |
def map(self, func):
ns = self.Namespace()
ns.results = []
def by(value, index, list, *args):
ns.results.append(func(value, index, list))
_(self.obj).each(by)
return self._wrap(ns.results) | Return the results of applying the iterator to each element. |
def reduce(self, func, memo=None):
if memo is None:
memo = []
ns = self.Namespace()
ns.initial = True # arguments.length > 2
ns.memo = memo
obj = self.obj
def by(value, index, *args):
if not ns.initial:
ns.memo = value
ns.initial = True
else:
ns.memo = func(ns.memo, value, index)
_(obj).each(by)
return self._wrap(ns.memo) | **Reduce** builds up a single result from a list of values,
aka `inject`, or foldl |
def reduceRight(self, func):
#foldr = lambda f, i: lambda s: reduce(f, s, i)
x = self.obj[:]
x.reverse()
return self._wrap(functools.reduce(func, x)) | The right-associative version of reduce, also known as `foldr`. |
def find(self, func):
self.ftmp = None
def test(value, index, list):
if func(value, index, list) is True:
self.ftmp = value
return True
self._clean.any(test)
return self._wrap(self.ftmp) | Return the first value which passes a truth test.
Aliased as `detect`. |
def filter(self, func):
return self._wrap(list(filter(func, self.obj))) | Return all the elements that pass a truth test. |
def reject(self, func):
return self._wrap(list(filter(lambda val: not func(val), self.obj))) | Return all the elements for which a truth test fails. |
def all(self, func=None):
if func is None:
func = lambda x, *args: x
self.altmp = True
def testEach(value, index, *args):
if func(value, index, *args) is False:
self.altmp = False
self._clean.each(testEach)
return self._wrap(self.altmp) | Determine whether all of the elements match a truth test. |
def any(self, func=None):
if func is None:
func = lambda x, *args: x
self.antmp = False
def testEach(value, index, *args):
if func(value, index, *args) is True:
self.antmp = True
return "breaker"
self._clean.each(testEach)
return self._wrap(self.antmp) | Determine if at least one element in the object
matches a truth test. |
def include(self, target):
if self._clean.isDict():
return self._wrap(target in self.obj.values())
else:
return self._wrap(target in self.obj) | Determine if a given value is included in the
array or object using `is`. |
def invoke(self, method, *args):
def inv(value, *ar):
if (
_(method).isFunction() or
_(method).isLambda() or
_(method).isMethod()
):
return method(value, *args)
else:
return getattr(value, method)(*args)
return self._wrap(self._clean.map(inv)) | Invoke a method (with arguments) on every item in a collection. |
def pluck(self, key):
return self._wrap([x.get(key) for x in self.obj]) | Convenience version of a common use case of
`map`: fetching a property. |
def where(self, attrs=None, first=False):
if attrs is None:
return None if first is True else []
method = _.find if first else _.filter
def by(val, *args):
for key, value in attrs.items():
try:
if attrs[key] != val[key]:
return False
except KeyError:
return False
return True
return self._wrap(method(self.obj, by)) | Convenience version of a common use case of `filter`:
selecting only objects
containing specific `key:value` pairs. |
def findWhere(self, attrs=None):
return self._wrap(self._clean.where(attrs, True)) | Convenience version of a common use case of `find`:
getting the first object
containing specific `key:value` pairs. |
def max(self):
if(self._clean.isDict()):
return self._wrap(list())
return self._wrap(max(self.obj)) | Return the maximum element or (element-based computation). |
def min(self):
if(self._clean.isDict()):
return self._wrap(list())
return self._wrap(min(self.obj)) | Return the minimum element (or element-based computation). |
def shuffle(self):
if(self._clean.isDict()):
return self._wrap(list())
cloned = self.obj[:]
random.shuffle(cloned)
return self._wrap(cloned) | Shuffle an array. |
def sortBy(self, val=None):
if val is not None:
if _(val).isString():
return self._wrap(sorted(self.obj, key=lambda x,
*args: x.get(val)))
else:
return self._wrap(sorted(self.obj, key=val))
else:
return self._wrap(sorted(self.obj)) | Sort the object's values by a criterion produced by an iterator. |
def _lookupIterator(self, val):
if val is None:
return lambda el, *args: el
return val if _.isCallable(val) else lambda obj, *args: obj[val] | An internal function to generate lookup iterators. |
def _group(self, obj, val, behavior):
ns = self.Namespace()
ns.result = {}
iterator = self._lookupIterator(val)
def e(value, index, *args):
key = iterator(value, index)
behavior(ns.result, key, value)
_.each(obj, e)
if len(ns.result) == 1:
try:
return ns.result[0]
except KeyError:
return list(ns.result.values())[0]
return ns.result | An internal function used for aggregate "group by" operations. |
def groupBy(self, val):
def by(result, key, value):
if key not in result:
result[key] = []
result[key].append(value)
res = self._group(self.obj, val, by)
return self._wrap(res) | Groups the object's values by a criterion. Pass either a string
attribute to group by, or a function that returns the criterion. |
def indexBy(self, val=None):
if val is None:
val = lambda *args: args[0]
def by(result, key, value):
result[key] = value
res = self._group(self.obj, val, by)
return self._wrap(res) | Indexes the object's values by a criterion, similar to
`groupBy`, but for when you know that your index values will be unique. |
def countBy(self, val):
def by(result, key, value):
if key not in result:
result[key] = 0
result[key] += 1
res = self._group(self.obj, val, by)
return self._wrap(res) | Counts instances of an object that group by a certain criterion. Pass
either a string attribute to count by, or a function that returns the
criterion. |
def sortedIndex(self, obj, iterator=lambda x: x):
array = self.obj
value = iterator(obj)
low = 0
high = len(array)
while low < high:
mid = (low + high) >> 1
if iterator(array[mid]) < value:
low = mid + 1
else:
high = mid
return self._wrap(low) | Use a comparator function to figure out the smallest index at which
an object should be inserted so as to maintain order.
Uses binary search. |
def first(self, n=1):
res = self.obj[0:n]
if len(res) is 1:
res = res[0]
return self._wrap(res) | Get the first element of an array. Passing **n** will return the
first N values in the array. Aliased as `head` and `take`.
The **guard** check allows it to work with `_.map`. |
def last(self, n=1):
res = self.obj[-n:]
if len(res) is 1:
res = res[0]
return self._wrap(res) | Get the last element of an array. Passing **n** will return the last N
values in the array.
The **guard** check allows it to work with `_.map`. |
def flatten(self, shallow=None):
return self._wrap(self._flatten(self.obj, shallow)) | Return a completely flattened version of an array. |
def without(self, *values):
if self._clean.isDict():
newlist = {}
for i, k in enumerate(self.obj):
# if k not in values: # use indexof to check identity
if _(values).indexOf(k) is -1:
newlist.set(k, self.obj[k])
else:
newlist = []
for i, v in enumerate(self.obj):
# if v not in values: # use indexof to check identity
if _(values).indexOf(v) is -1:
newlist.append(v)
return self._wrap(newlist) | Return a version of the array that does not
contain the specified value(s). |
def partition(self, predicate=None):
predicate = self._lookupIterator(predicate)
pass_list = []
fail_list = []
def by(elem, index, *args):
(pass_list if predicate(elem) else fail_list).append(elem)
_.each(self.obj, by)
return self._wrap([pass_list, fail_list]) | Split an array into two arrays: one whose elements all satisfy the given
predicate, and one whose elements all do not satisfy the predicate. |
def uniq(self, isSorted=False, iterator=None):
ns = self.Namespace()
ns.results = []
ns.array = self.obj
initial = self.obj
if iterator is not None:
initial = _(ns.array).map(iterator)
def by(memo, value, index):
if ((_.last(memo) != value or
not len(memo)) if isSorted else not _.include(memo, value)):
memo.append(value)
ns.results.append(ns.array[index])
return memo
ret = _.reduce(initial, by)
return self._wrap(ret) | Produce a duplicate-free version of the array. If the array has already
been sorted, you have the option of using a faster algorithm.
Aliased as `unique`. |
def union(self, *args):
# setobj = set(self.obj)
# for i, v in enumerate(args):
# setobj = setobj + set(args[i])
# return self._wrap(self._clean._toOriginal(setobj))
args = list(args)
args.insert(0, self.obj)
return self._wrap(_.uniq(self._flatten(args, True, []))) | Produce an array that contains the union: each distinct element
from all of the passed-in arrays. |
def intersection(self, *args):
if type(self.obj[0]) is int:
a = self.obj
else:
a = tuple(self.obj[0])
setobj = set(a)
for i, v in enumerate(args):
setobj = setobj & set(args[i])
return self._wrap(list(setobj)) | Produce an array that contains every item shared between all the
passed-in arrays. |
def difference(self, *args):
setobj = set(self.obj)
for i, v in enumerate(args):
setobj = setobj - set(args[i])
return self._wrap(self._clean._toOriginal(setobj)) | Take the difference between one array and a number of other arrays.
Only the elements present in just the first array will remain. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.