text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Expecting a window to parent into a Nuke panel, that is dockable.
<END_TASK>
<USER_TASK:>
Description:
def dock(window):
""" Expecting a window to parent into a Nuke panel, that is dockable. """ |
# Deleting existing dock
# There is a bug where existing docks are kept in-memory when closed via UI
if self._dock:
print("Deleting existing dock...")
parent = self._dock
dialog = None
stacked_widget = None
main_windows = []
# Getting dock parents
while parent:
if isinstance(parent, QtWidgets.QDialog):
dialog = parent
if isinstance(parent, QtWidgets.QStackedWidget):
stacked_widget = parent
if isinstance(parent, QtWidgets.QMainWindow):
main_windows.append(parent)
parent = parent.parent()
dialog.deleteLater()
if len(main_windows) > 1:
# Then it's a floating window
if stacked_widget.count() == 1:
# Then it's empty and we can close it,
# as is native Nuke UI behaviour
main_windows[0].deleteLater()
# Creating new dock
pane = nuke.getPaneFor("Properties.1")
widget_path = "pyblish_nuke.lib.pyblish_nuke_dockwidget"
panel = nukescripts.panels.registerWidgetAsPanel(widget_path,
window.windowTitle(),
"pyblish_nuke.dock",
True).addToPane(pane)
panel_widget = panel.customKnob.getObject().widget
panel_widget.layout().addWidget(window)
_nuke_set_zero_margins(panel_widget)
self._dock = panel_widget
return self._dock |
<SYSTEM_TASK:>
Create an attached thread. An attached thread gets a ctx and a PAIR
<END_TASK>
<USER_TASK:>
Description:
def zthread_fork(ctx, func, *args, **kwargs):
"""
Create an attached thread. An attached thread gets a ctx and a PAIR
pipe back to its parent. It must monitor its pipe, and exit if the
pipe becomes unreadable. Returns pipe, or NULL if there was an error.
""" |
a = ctx.socket(zmq.PAIR)
a.setsockopt(zmq.LINGER, 0)
a.setsockopt(zmq.RCVHWM, 100)
a.setsockopt(zmq.SNDHWM, 100)
a.setsockopt(zmq.SNDTIMEO, 5000)
a.setsockopt(zmq.RCVTIMEO, 5000)
b = ctx.socket(zmq.PAIR)
b.setsockopt(zmq.LINGER, 0)
b.setsockopt(zmq.RCVHWM, 100)
b.setsockopt(zmq.SNDHWM, 100)
b.setsockopt(zmq.SNDTIMEO, 5000)
a.setsockopt(zmq.RCVTIMEO, 5000)
iface = "inproc://%s" % binascii.hexlify(os.urandom(8))
a.bind(iface)
b.connect(iface)
thread = threading.Thread(target=func, args=((ctx, b) + args), kwargs=kwargs)
thread.daemon = False
thread.start()
return a |
<SYSTEM_TASK:>
Prevent accidental assignment of existing members
<END_TASK>
<USER_TASK:>
Description:
def _remap(object, name, value, safe=True):
"""Prevent accidental assignment of existing members
Arguments:
object (object): Parent of new attribute
name (str): Name of new attribute
value (object): Value of new attribute
safe (bool): Whether or not to guarantee that
the new attribute was not overwritten.
Can be set to False under condition that
it is superseded by extensive testing.
""" |
if os.getenv("QT_TESTING") is not None and safe:
# Cannot alter original binding.
if hasattr(object, name):
raise AttributeError("Cannot override existing name: "
"%s.%s" % (object.__name__, name))
# Cannot alter classes of functions
if type(object).__name__ != "module":
raise AttributeError("%s != 'module': Cannot alter "
"anything but modules" % object)
elif hasattr(object, name):
# Keep track of modifications
self.__modified__.append(name)
self.__remapped__.append(name)
setattr(object, name, value) |
<SYSTEM_TASK:>
convert dict value if value is bool type,
<END_TASK>
<USER_TASK:>
Description:
def filter_params(params):
"""
convert dict value if value is bool type,
False -> "false"
True -> "true"
""" |
if params is not None:
new_params = copy.deepcopy(params)
new_params = dict((k, v) for k, v in new_params.items() if v is not None)
for key, value in new_params.items():
if isinstance(value, bool):
new_params[key] = "true" if value else "false"
return new_params |
<SYSTEM_TASK:>
Load fixtures using a data migration.
<END_TASK>
<USER_TASK:>
Description:
def fixture(app, fixtures, fixtures_dir='fixtures', raise_does_not_exist=False,
reversible=True, models=[]):
"""
Load fixtures using a data migration.
The migration will by default provide a rollback, deleting items by primary
key. This is not always what you want ; you may set reversible=False to
prevent rolling back.
Usage:
import myapp
import anotherapp
operations = [
migrations.RunPython(**fixture(myapp, 'eggs.yaml')),
migrations.RunPython(**fixture(anotherapp, ['sausage.json', 'walks.yaml']))
migrations.RunPython(**fixture(yap, ['foo.json'], reversible=False))
]
""" |
fixture_path = os.path.join(app.__path__[0], fixtures_dir)
if isinstance(fixtures, string_types):
fixtures = [fixtures]
def get_format(fixture):
return os.path.splitext(fixture)[1][1:]
def get_objects():
for fixture in fixtures:
with open(os.path.join(fixture_path, fixture), 'rb') as f:
objects = serializers.deserialize(get_format(fixture),
f,
ignorenonexistent=True)
for obj in objects:
yield obj
def patch_apps(func):
"""
Patch the app registry.
Note that this is necessary so that the Deserializer does not use the
current version of the model, which may not necessarily be representative
of the model the fixture was created for.
"""
@wraps(func)
def inner(apps, schema_editor):
try:
# Firstly patch the serializers registry
original_apps = django.core.serializers.python.apps
django.core.serializers.python.apps = apps
return func(apps, schema_editor)
finally:
# Ensure we always unpatch the serializers registry
django.core.serializers.python.apps = original_apps
return inner
@patch_apps
def load_fixture(apps, schema_editor):
for obj in get_objects():
obj.save()
@patch_apps
def unload_fixture(apps, schema_editor):
for obj in get_objects():
model = apps.get_model(app.__name__, obj.object.__class__.__name__)
kwargs = dict()
if 'id' in obj.object.__dict__:
kwargs.update(id=obj.object.__dict__.get('id'))
elif 'slug' in obj.object.__dict__:
kwargs.update(slug=obj.object.__dict__.get('slug'))
else:
kwargs.update(**obj.object.__dict__)
try:
model.objects.get(**kwargs).delete()
except model.DoesNotExist:
if not raise_does_not_exist:
raise FixtureObjectDoesNotExist(("Model %s instance with "
"kwargs %s does not exist."
% (model, kwargs)))
kwargs = dict(code=load_fixture)
if reversible:
kwargs['reverse_code'] = unload_fixture
return kwargs |
<SYSTEM_TASK:>
Initialize filter just before it will be used.
<END_TASK>
<USER_TASK:>
Description:
def setup(self):
"""Initialize filter just before it will be used.""" |
super(CleanCSSFilter, self).setup()
self.root = current_app.config.get('COLLECT_STATIC_ROOT') |
<SYSTEM_TASK:>
Determine which option name to use.
<END_TASK>
<USER_TASK:>
Description:
def rebase_opt(self):
"""Determine which option name to use.""" |
if not hasattr(self, '_rebase_opt'):
# out = b"MAJOR.MINOR.REVISION" // b"3.4.19" or b"4.0.0"
out, err = Popen(
['cleancss', '--version'], stdout=PIPE).communicate()
ver = int(out[:out.index(b'.')])
self._rebase_opt = ['--root', self.root] if ver == 3 else []
return self._rebase_opt |
<SYSTEM_TASK:>
Wrap translation in Angular module.
<END_TASK>
<USER_TASK:>
Description:
def output(self, _in, out, **kwargs):
"""Wrap translation in Angular module.""" |
out.write(
'angular.module("{0}", ["gettext"]).run('
'["gettextCatalog", function (gettextCatalog) {{'.format(
self.catalog_name
)
)
out.write(_in.read())
out.write('}]);') |
<SYSTEM_TASK:>
Process individual translation file.
<END_TASK>
<USER_TASK:>
Description:
def input(self, _in, out, **kwargs):
"""Process individual translation file.""" |
language_code = _re_language_code.search(_in.read()).group(
'language_code'
)
_in.seek(0) # move at the begining after matching the language
catalog = read_po(_in)
out.write('gettextCatalog.setStrings("{0}", '.format(language_code))
out.write(json.dumps({
key: value.string for key, value in catalog._messages.items()
if key and value.string
}))
out.write(');') |
<SYSTEM_TASK:>
Cache the return value in the correct cache directory. Set 'method' to
<END_TASK>
<USER_TASK:>
Description:
def disk_cache(cls, basename, function, *args, method=True, **kwargs):
"""
Cache the return value in the correct cache directory. Set 'method' to
false for static methods.
""" |
@utility.disk_cache(basename, cls.directory(), method=method)
def wrapper(*args, **kwargs):
return function(*args, **kwargs)
return wrapper(*args, **kwargs) |
<SYSTEM_TASK:>
Download a file into the correct cache directory.
<END_TASK>
<USER_TASK:>
Description:
def download(cls, url, filename=None):
"""
Download a file into the correct cache directory.
""" |
return utility.download(url, cls.directory(), filename) |
<SYSTEM_TASK:>
Path that should be used for caching. Different for all subclasses.
<END_TASK>
<USER_TASK:>
Description:
def directory(cls, prefix=None):
"""
Path that should be used for caching. Different for all subclasses.
""" |
prefix = prefix or utility.read_config().directory
name = cls.__name__.lower()
directory = os.path.expanduser(os.path.join(prefix, name))
utility.ensure_directory(directory)
return directory |
<SYSTEM_TASK:>
Get the rconfiguration_id of the last job run by the remoteci.
<END_TASK>
<USER_TASK:>
Description:
def get_last_rconfiguration_id(topic_id, remoteci_id, db_conn=None):
"""Get the rconfiguration_id of the last job run by the remoteci.
:param topic_id: the topic
:param remoteci_id: the remoteci id
:return: last rconfiguration_id of the remoteci
""" |
db_conn = db_conn or flask.g.db_conn
__TABLE = models.JOBS
query = sql.select([__TABLE.c.rconfiguration_id]). \
order_by(sql.desc(__TABLE.c.created_at)). \
where(sql.and_(__TABLE.c.topic_id == topic_id,
__TABLE.c.remoteci_id == remoteci_id)). \
limit(1)
rconfiguration_id = db_conn.execute(query).fetchone()
if rconfiguration_id is not None:
return str(rconfiguration_id[0])
else:
return None |
<SYSTEM_TASK:>
Get a remoteci configuration. This will iterate over each
<END_TASK>
<USER_TASK:>
Description:
def get_remoteci_configuration(topic_id, remoteci_id, db_conn=None):
"""Get a remoteci configuration. This will iterate over each
configuration in a round robin manner depending on the last
rconfiguration used by the remoteci.""" |
db_conn = db_conn or flask.g.db_conn
last_rconfiguration_id = get_last_rconfiguration_id(
topic_id, remoteci_id, db_conn=db_conn)
_RCONFIGURATIONS = models.REMOTECIS_RCONFIGURATIONS
_J_RCONFIGURATIONS = models.JOIN_REMOTECIS_RCONFIGURATIONS
query = sql.select([_RCONFIGURATIONS]). \
select_from(_J_RCONFIGURATIONS.
join(_RCONFIGURATIONS)). \
where(_J_RCONFIGURATIONS.c.remoteci_id == remoteci_id)
query = query.where(sql.and_(_RCONFIGURATIONS.c.state != 'archived',
_RCONFIGURATIONS.c.topic_id == topic_id))
query = query.order_by(sql.desc(_RCONFIGURATIONS.c.created_at))
query = query.order_by(sql.asc(_RCONFIGURATIONS.c.name))
all_rconfigurations = db_conn.execute(query).fetchall()
if len(all_rconfigurations) > 0:
for i in range(len(all_rconfigurations)):
if str(all_rconfigurations[i]['id']) == last_rconfiguration_id:
# if i==0, then indice -1 is the last element
return all_rconfigurations[i - 1]
return all_rconfigurations[0]
else:
return None |
<SYSTEM_TASK:>
Find and delete any text nodes containing nothing but whitespace in
<END_TASK>
<USER_TASK:>
Description:
def ignore_whitespace_text_nodes(cls, wrapped_node):
"""
Find and delete any text nodes containing nothing but whitespace in
in the given node and its descendents.
This is useful for cleaning up excess low-value text nodes in a
document DOM after parsing a pretty-printed XML document.
""" |
for child in wrapped_node.children:
if child.is_text and child.value.strip() == '':
child.delete()
else:
cls.ignore_whitespace_text_nodes(child) |
<SYSTEM_TASK:>
Verify the existence of a resource in the database and then
<END_TASK>
<USER_TASK:>
Description:
def verify_existence_and_get(id, table, name=None, get_id=False):
"""Verify the existence of a resource in the database and then
return it if it exists, according to the condition, or raise an
exception.
:param id: id of the resource
:param table: the table object
:param name: the name of the row to look for
:param get_id: if True, return only the ID
:return:
""" |
where_clause = table.c.id == id
if name:
where_clause = table.c.name == name
if 'state' in table.columns:
where_clause = sql.and_(table.c.state != 'archived', where_clause)
query = sql.select([table]).where(where_clause)
result = flask.g.db_conn.execute(query).fetchone()
if result is None:
raise dci_exc.DCIException('Resource "%s" not found.' % id,
status_code=404)
if get_id:
return result.id
return result |
<SYSTEM_TASK:>
Verify that the user's team does belongs to the given topic. If
<END_TASK>
<USER_TASK:>
Description:
def verify_team_in_topic(user, topic_id):
"""Verify that the user's team does belongs to the given topic. If
the user is an admin or read only user then it belongs to all topics.
""" |
if user.is_super_admin() or user.is_read_only_user():
return
if str(topic_id) not in user_topic_ids(user):
raise dci_exc.Unauthorized() |
<SYSTEM_TASK:>
From the _format_level_1 function we have a list of rows. Because of using
<END_TASK>
<USER_TASK:>
Description:
def _format_level_2(rows, list_embeds, embed_many):
"""
From the _format_level_1 function we have a list of rows. Because of using
joins, we have as many rows as join result.
For example:
[{'id' : 'id1',
'name' : 'name1,
'b' : {'id': 'id2,
'name': 'name2'}
}
{'id' : 'id1',
'name' : 'name1,
'b' : {'id' : 'id4',
'name' : 'name4}
}
]
Here there is two elements which correspond to one rows because of the
embed field 'b'. So we should transform it to:
[{'id' : 'id1',
'name' : 'name1,
'b' : [{'id': 'id2,
'name': 'name2'},
{'id' : 'id4',
'name' : 'name4}]
}
]
This is the purpose of this function.
""" |
def _uniqify_list(list_of_dicts):
# list() for py34
result = []
set_ids = set()
for v in list_of_dicts:
if v['id'] in set_ids:
continue
set_ids.add(v['id'])
result.append(v)
return result
row_ids_to_embed_values = {}
for row in rows:
# for each row, associate rows's id -> {all embeds values}
if row['id'] not in row_ids_to_embed_values:
row_ids_to_embed_values[row['id']] = {}
# add embeds values to the current row
for embd in list_embeds:
if embd not in row:
continue
if embd not in row_ids_to_embed_values[row['id']]:
# create a list or a dict depending on embed_many
if embed_many[embd]:
row_ids_to_embed_values[row['id']][embd] = [row[embd]]
else:
row_ids_to_embed_values[row['id']][embd] = row[embd]
else:
if embed_many[embd]:
row_ids_to_embed_values[row['id']][embd].append(row[embd])
# uniqify each embed list
for embd in list_embeds:
if embd in row_ids_to_embed_values[row['id']]:
embed_values = row_ids_to_embed_values[row['id']][embd]
if isinstance(embed_values, list):
row_ids_to_embed_values[row['id']][embd] = _uniqify_list(embed_values) # noqa
else:
row_ids_to_embed_values[row['id']][embd] = {}
if embed_many[embd]:
row_ids_to_embed_values[row['id']][embd] = []
# last loop over the initial rows in order to keep the ordering
result = []
# if row id in seen set then it means the row has been completely processed
seen = set()
for row in rows:
if row['id'] in seen:
continue
seen.add(row['id'])
new_row = {}
# adds level 1 fields
for field in row:
if field not in list_embeds:
new_row[field] = row[field]
# adds all level 2 fields
# list() for py34
row_ids_to_embed_values_keys = list(row_ids_to_embed_values[new_row['id']].keys()) # noqa
row_ids_to_embed_values_keys.sort()
# adds the nested fields if there is somes
for embd in list_embeds:
if embd in row_ids_to_embed_values_keys:
if '.' in embd:
prefix, suffix = embd.split('.', 1)
new_row[prefix][suffix] = row_ids_to_embed_values[new_row['id']][embd] # noqa
else:
new_row[embd] = row_ids_to_embed_values[new_row['id']][embd] # noqa
else:
new_row_embd_value = {}
if embed_many[embd]:
new_row_embd_value = []
if '.' in embd:
prefix, suffix = embd.split('.', 1)
new_row[prefix][suffix] = new_row_embd_value
else:
new_row[embd] = new_row_embd_value
# row is complete !
result.append(new_row)
return result |
<SYSTEM_TASK:>
Build a basic values object used in every create method.
<END_TASK>
<USER_TASK:>
Description:
def common_values_dict():
"""Build a basic values object used in every create method.
All our resources contain a same subset of value. Instead of
redoing this code everytime, this method ensures it is done only at
one place.
""" |
now = datetime.datetime.utcnow().isoformat()
etag = utils.gen_etag()
values = {
'id': utils.gen_uuid(),
'created_at': now,
'updated_at': now,
'etag': etag
}
return values |
<SYSTEM_TASK:>
Returns some information about the currently authenticated identity
<END_TASK>
<USER_TASK:>
Description:
def get_identity(identity):
"""Returns some information about the currently authenticated identity""" |
return flask.Response(
json.dumps(
{
'identity': {
'id': identity.id,
'etag': identity.etag,
'name': identity.name,
'fullname': identity.fullname,
'email': identity.email,
'timezone': identity.timezone,
'teams': _encode_dict(identity.teams)
}
}
), 200,
headers={'ETag': identity.etag},
content_type='application/json'
) |
<SYSTEM_TASK:>
Unattach an issue from a specific job.
<END_TASK>
<USER_TASK:>
Description:
def unattach_issue(resource_id, issue_id, table):
"""Unattach an issue from a specific job.""" |
v1_utils.verify_existence_and_get(issue_id, _TABLE)
if table.name == 'jobs':
join_table = models.JOIN_JOBS_ISSUES
where_clause = sql.and_(join_table.c.job_id == resource_id,
join_table.c.issue_id == issue_id)
else:
join_table = models.JOIN_COMPONENTS_ISSUES
where_clause = sql.and_(join_table.c.component_id == resource_id,
join_table.c.issue_id == issue_id)
query = join_table.delete().where(where_clause)
result = flask.g.db_conn.execute(query)
if not result.rowcount:
raise dci_exc.DCIConflict('%s_issues' % table.name, issue_id)
return flask.Response(None, 204, content_type='application/json') |
<SYSTEM_TASK:>
Attach an issue to a specific job.
<END_TASK>
<USER_TASK:>
Description:
def attach_issue(resource_id, table, user_id):
"""Attach an issue to a specific job.""" |
data = schemas.issue.post(flask.request.json)
issue = _get_or_create_issue(data)
# Second, insert a join record in the JOIN_JOBS_ISSUES or
# JOIN_COMPONENTS_ISSUES database.
if table.name == 'jobs':
join_table = models.JOIN_JOBS_ISSUES
else:
join_table = models.JOIN_COMPONENTS_ISSUES
key = '%s_id' % table.name[0:-1]
query = join_table.insert().values({
'user_id': user_id,
'issue_id': issue['id'],
key: resource_id
})
try:
flask.g.db_conn.execute(query)
except sa_exc.IntegrityError:
raise dci_exc.DCICreationConflict(join_table.name,
'%s, issue_id' % key)
result = json.dumps({'issue': dict(issue)})
return flask.Response(result, 201, content_type='application/json') |
<SYSTEM_TASK:>
Remove collect's static root folder from list.
<END_TASK>
<USER_TASK:>
Description:
def collect_staticroot_removal(app, blueprints):
"""Remove collect's static root folder from list.""" |
collect_root = app.extensions['collect'].static_root
return [bp for bp in blueprints if (
bp.has_static_folder and bp.static_folder != collect_root)] |
<SYSTEM_TASK:>
Create the dci client in the master realm.
<END_TASK>
<USER_TASK:>
Description:
def create_client(access_token):
"""Create the dci client in the master realm.""" |
url = 'http://keycloak:8080/auth/admin/realms/dci-test/clients'
r = requests.post(url,
data=json.dumps(client_data),
headers=get_auth_headers(access_token))
if r.status_code in (201, 409):
print('Keycloak client dci created successfully.')
else:
raise Exception(
'Error while creating Keycloak client dci:\nstatus code %s\n'
'error: %s' % (r.status_code, r.content)
) |
<SYSTEM_TASK:>
Convert all booleans to lowercase strings
<END_TASK>
<USER_TASK:>
Description:
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings""" |
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower() |
<SYSTEM_TASK:>
Requests wrapper function
<END_TASK>
<USER_TASK:>
Description:
def request(self, method, url, parameters=dict()):
"""Requests wrapper function""" |
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response |
<SYSTEM_TASK:>
Pulls all checks from pingdom
<END_TASK>
<USER_TASK:>
Description:
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
""" |
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']] |
<SYSTEM_TASK:>
Returns a detailed description of a specified check.
<END_TASK>
<USER_TASK:>
Description:
def getCheck(self, checkid):
"""Returns a detailed description of a specified check.""" |
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check |
<SYSTEM_TASK:>
Returns a list of all Pingdom probe servers
<END_TASK>
<USER_TASK:>
Description:
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
""" |
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes'] |
<SYSTEM_TASK:>
Perform a traceroute to a specified target from a specified Pingdom
<END_TASK>
<USER_TASK:>
Description:
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
""" |
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute'] |
<SYSTEM_TASK:>
Returns a list of all contacts.
<END_TASK>
<USER_TASK:>
Description:
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
""" |
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']] |
<SYSTEM_TASK:>
Modifies a list of contacts.
<END_TASK>
<USER_TASK:>
Description:
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
""" |
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message'] |
<SYSTEM_TASK:>
Creates a new email report
<END_TASK>
<USER_TASK:>
Description:
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
""" |
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message'] |
<SYSTEM_TASK:>
Returns a list of PingdomSharedReport instances
<END_TASK>
<USER_TASK:>
Description:
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances""" |
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports |
<SYSTEM_TASK:>
Download a file and return its filename on the local file system. If the
<END_TASK>
<USER_TASK:>
Description:
def download(url, directory, filename=None):
"""
Download a file and return its filename on the local file system. If the
file is already there, it will not be downloaded again. The filename is
derived from the url if not provided. Return the filepath.
""" |
if not filename:
_, filename = os.path.split(url)
directory = os.path.expanduser(directory)
ensure_directory(directory)
filepath = os.path.join(directory, filename)
if os.path.isfile(filepath):
return filepath
print('Download', filepath)
with urlopen(url) as response, open(filepath, 'wb') as file_:
shutil.copyfileobj(response, file_)
return filepath |
<SYSTEM_TASK:>
Create the directories along the provided directory path that do not exist.
<END_TASK>
<USER_TASK:>
Description:
def ensure_directory(directory):
"""
Create the directories along the provided directory path that do not exist.
""" |
directory = os.path.expanduser(directory)
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise e |
<SYSTEM_TASK:>
This function handles the various ways that users may enter 'validation functions', so as to output a single
<END_TASK>
<USER_TASK:>
Description:
def _process_validation_function_s(validation_func, # type: ValidationFuncs
auto_and_wrapper=True # type: bool
):
# type: (...) -> Union[Callable, List[Callable]]
"""
This function handles the various ways that users may enter 'validation functions', so as to output a single
callable method. Setting "auto_and_wrapper" to False allows callers to get a list of callables instead.
valid8 supports the following expressions for 'validation functions'
* <ValidationFunc>
* List[<ValidationFunc>(s)]. The list must not be empty.
<ValidationFunc> may either be
* a callable or a mini-lambda expression (instance of LambdaExpression - in which case it is automatically
'closed').
* a Tuple[callable or mini-lambda expression ; failure_type]. Where failure type should be a subclass of
valid8.Failure. In which case the tuple will be replaced with a _failure_raiser(callable, failure_type)
When the contents provided does not match the above, this function raises a ValueError. Otherwise it produces a
list of callables, that will typically be turned into a `and_` in the nominal case except if this is called inside
`or_` or `xor_`.
:param validation_func: the base validation function or list of base validation functions to use. A callable, a
tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists
are supported and indicate an implicit `and_`. Tuples indicate an implicit
`_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead
of callables, they will be transformed to functions automatically.
:param auto_and_wrapper: if True (default), this function returns a single callable that is a and_() of all
functions. Otherwise a list is returned.
:return:
""" |
# handle the case where validation_func is not yet a list or is empty or none
if validation_func is None:
raise ValueError('mandatory validation_func is None')
elif not isinstance(validation_func, list):
# so not use list() because we do not want to convert tuples here.
validation_func = [validation_func]
elif len(validation_func) == 0:
raise ValueError('provided validation_func list is empty')
# now validation_func is a non-empty list
final_list = []
for v in validation_func:
# special case of a LambdaExpression: automatically convert to a function
# note: we have to do it before anything else (such as .index) otherwise we may get failures
v = as_function(v)
if isinstance(v, tuple):
# convert all the tuples to failure raisers
if len(v) == 2:
if isinstance(v[1], str):
final_list.append(_failure_raiser(v[0], help_msg=v[1]))
elif isinstance(v[1], type) and issubclass(v[1], WrappingFailure):
final_list.append(_failure_raiser(v[0], failure_type=v[1]))
else:
raise TypeError('base validation function(s) not compliant with the allowed syntax. Base validation'
' function(s) can be {}. Found [{}].'.format(supported_syntax, str(v)))
else:
raise TypeError('base validation function(s) not compliant with the allowed syntax. Base validation'
' function(s) can be {}. Found [{}].'.format(supported_syntax, str(v)))
elif callable(v):
# use the validator directly
final_list.append(v)
elif isinstance(v, list):
# a list is an implicit and_, make it explicit
final_list.append(and_(*v))
else:
raise TypeError('base validation function(s) not compliant with the allowed syntax. Base validation'
' function(s) can be {}. Found [{}].'.format(supported_syntax, str(v)))
# return what is required:
if auto_and_wrapper:
# a single callable doing the 'and'
return and_(*final_list)
else:
# or the list (typically for use inside or_(), xor_()...)
return final_list |
<SYSTEM_TASK:>
Internal utility method to extract optional arguments from kwargs.
<END_TASK>
<USER_TASK:>
Description:
def pop_kwargs(kwargs,
names_with_defaults, # type: List[Tuple[str, Any]]
allow_others=False
):
"""
Internal utility method to extract optional arguments from kwargs.
:param kwargs:
:param names_with_defaults:
:param allow_others: if False (default) then an error will be raised if kwargs still contains something at the end.
:return:
""" |
all_arguments = []
for name, default_ in names_with_defaults:
try:
val = kwargs.pop(name)
except KeyError:
val = default_
all_arguments.append(val)
if not allow_others and len(kwargs) > 0:
raise ValueError("Unsupported arguments: %s" % kwargs)
if len(names_with_defaults) == 1:
return all_arguments[0]
else:
return all_arguments |
<SYSTEM_TASK:>
Overrides the base method in order to give details on the various successes and failures
<END_TASK>
<USER_TASK:>
Description:
def get_details(self):
""" Overrides the base method in order to give details on the various successes and failures """ |
# transform the dictionary of failures into a printable form
need_to_print_value = True
failures_for_print = OrderedDict()
for validator, failure in self.failures.items():
name = get_callable_name(validator)
if isinstance(failure, Exception):
if isinstance(failure, WrappingFailure) or isinstance(failure, CompositionFailure):
need_to_print_value = False
failures_for_print[name] = '{exc_type}: {msg}'.format(exc_type=type(failure).__name__, msg=str(failure))
else:
failures_for_print[name] = str(failure)
if need_to_print_value:
value_str = ' for value [{val}]'.format(val=self.wrong_value)
else:
value_str = ''
# OrderedDict does not pretty print...
key_values_str = [repr(key) + ': ' + repr(val) for key, val in failures_for_print.items()]
failures_for_print_str = '{' + ', '.join(key_values_str) + '}'
# Note: we do note cite the value in the message since it is most probably available in inner messages [{val}]
msg = '{what}{possibly_value}. Successes: {success} / Failures: {fails}' \
''.format(what=self.get_what(), possibly_value=value_str,
success=self.successes, fails=failures_for_print_str)
return msg |
<SYSTEM_TASK:>
Utility method to play all the provided validators on the provided value and output the
<END_TASK>
<USER_TASK:>
Description:
def play_all_validators(self, validators, value):
"""
Utility method to play all the provided validators on the provided value and output the
:param validators:
:param value:
:return:
""" |
successes = list()
failures = OrderedDict()
for validator in validators:
name = get_callable_name(validator)
try:
res = validator(value)
if result_is_success(res):
successes.append(name)
else:
failures[validator] = res
except Exception as exc:
failures[validator] = exc
return successes, failures |
<SYSTEM_TASK:>
Generates a secret of given length
<END_TASK>
<USER_TASK:>
Description:
def gen_secret(length=64):
""" Generates a secret of given length
""" |
charset = string.ascii_letters + string.digits
return ''.join(random.SystemRandom().choice(charset)
for _ in range(length)) |
<SYSTEM_TASK:>
Split a sentence while preserving tags.
<END_TASK>
<USER_TASK:>
Description:
def _tokenize(cls, sentence):
"""
Split a sentence while preserving tags.
""" |
while True:
match = cls._regex_tag.search(sentence)
if not match:
yield from cls._split(sentence)
return
chunk = sentence[:match.start()]
yield from cls._split(chunk)
tag = match.group(0)
yield tag
sentence = sentence[(len(chunk) + len(tag)):] |
<SYSTEM_TASK:>
Ensure the user is a PRODUCT_OWNER.
<END_TASK>
<USER_TASK:>
Description:
def is_product_owner(self, team_id):
"""Ensure the user is a PRODUCT_OWNER.""" |
if self.is_super_admin():
return True
team_id = uuid.UUID(str(team_id))
return team_id in self.child_teams_ids |
<SYSTEM_TASK:>
Test if user is in team
<END_TASK>
<USER_TASK:>
Description:
def is_in_team(self, team_id):
"""Test if user is in team""" |
if self.is_super_admin():
return True
team_id = uuid.UUID(str(team_id))
return team_id in self.teams or team_id in self.child_teams_ids |
<SYSTEM_TASK:>
Ensure ther resource has the role REMOTECI.
<END_TASK>
<USER_TASK:>
Description:
def is_remoteci(self, team_id=None):
"""Ensure ther resource has the role REMOTECI.""" |
if team_id is None:
return self._is_remoteci
team_id = uuid.UUID(str(team_id))
if team_id not in self.teams_ids:
return False
return self.teams[team_id]['role'] == 'REMOTECI' |
<SYSTEM_TASK:>
Ensure ther resource has the role FEEDER.
<END_TASK>
<USER_TASK:>
Description:
def is_feeder(self, team_id=None):
"""Ensure ther resource has the role FEEDER.""" |
if team_id is None:
return self._is_feeder
team_id = uuid.UUID(str(team_id))
if team_id not in self.teams_ids:
return False
return self.teams[team_id]['role'] == 'FEEDER' |
<SYSTEM_TASK:>
Delete this node from the owning document.
<END_TASK>
<USER_TASK:>
Description:
def delete(self, destroy=True):
"""
Delete this node from the owning document.
:param bool destroy: if True the child node will be destroyed in
addition to being removed from the document.
:returns: the removed child node, or *None* if the child was destroyed.
""" |
removed_child = self.adapter.remove_node_child(
self.adapter.get_node_parent(self.impl_node), self.impl_node,
destroy_node=destroy)
if removed_child is not None:
return self.adapter.wrap_node(removed_child, None, self.adapter)
else:
return None |
<SYSTEM_TASK:>
Perform an XPath query on the current node.
<END_TASK>
<USER_TASK:>
Description:
def xpath(self, xpath, **kwargs):
"""
Perform an XPath query on the current node.
:param string xpath: XPath query.
:param dict kwargs: Optional keyword arguments that are passed through
to the underlying XML library implementation.
:return: results of the query as a list of :class:`Node` objects, or
a list of base type objects if the XPath query does not reference
node objects.
""" |
result = self.adapter.xpath_on_node(self.impl_node, xpath, **kwargs)
if isinstance(result, (list, tuple)):
return [self._maybe_wrap_node(r) for r in result]
else:
return self._maybe_wrap_node(result) |
<SYSTEM_TASK:>
Add or update this element's attributes, where attributes can be
<END_TASK>
<USER_TASK:>
Description:
def set_attributes(self, attr_obj=None, ns_uri=None, **attr_dict):
"""
Add or update this element's attributes, where attributes can be
specified in a number of ways.
:param attr_obj: a dictionary or list of attribute name/value pairs.
:type attr_obj: dict, list, tuple, or None
:param ns_uri: a URI defining a namespace for the new attributes.
:type ns_uri: string or None
:param dict attr_dict: attribute name and values specified as keyword
arguments.
""" |
self._set_element_attributes(self.impl_node,
attr_obj=attr_obj, ns_uri=ns_uri, **attr_dict) |
<SYSTEM_TASK:>
Define a namespace prefix that will serve as shorthand for the given
<END_TASK>
<USER_TASK:>
Description:
def set_ns_prefix(self, prefix, ns_uri):
"""
Define a namespace prefix that will serve as shorthand for the given
namespace URI in element names.
:param string prefix: prefix that will serve as an alias for a
the namespace URI.
:param string ns_uri: namespace URI that will be denoted by the
prefix.
""" |
self._add_ns_prefix_attr(self.impl_node, prefix, ns_uri) |
<SYSTEM_TASK:>
Add a new child element to this element, with an optional namespace
<END_TASK>
<USER_TASK:>
Description:
def add_element(self, name, ns_uri=None, attributes=None,
text=None, before_this_element=False):
"""
Add a new child element to this element, with an optional namespace
definition. If no namespace is provided the child will be assigned
to the default namespace.
:param string name: a name for the child node. The name may be used
to apply a namespace to the child by including:
- a prefix component in the name of the form
``ns_prefix:element_name``, where the prefix has already been
defined for a namespace URI (such as via :meth:`set_ns_prefix`).
- a literal namespace URI value delimited by curly braces, of
the form ``{ns_uri}element_name``.
:param ns_uri: a URI specifying the new element's namespace. If the
``name`` parameter specifies a namespace this parameter is ignored.
:type ns_uri: string or None
:param attributes: collection of attributes to assign to the new child.
:type attributes: dict, list, tuple, or None
:param text: text value to assign to the new child.
:type text: string or None
:param bool before_this_element: if *True* the new element is
added as a sibling preceding this element, instead of as a child.
In other words, the new element will be a child of this element's
parent node, and will immediately precent this element in the DOM.
:return: the new child as a an :class:`Element` node.
""" |
# Determine local name, namespace and prefix info from tag name
prefix, local_name, node_ns_uri = \
self.adapter.get_ns_info_from_node_name(name, self.impl_node)
if prefix:
qname = u'%s:%s' % (prefix, local_name)
else:
qname = local_name
# If no name-derived namespace, apply an alternate namespace
if node_ns_uri is None:
if ns_uri is None:
# Default document namespace
node_ns_uri = self.adapter.get_ns_uri_for_prefix(
self.impl_node, None)
else:
# keyword-parameter namespace
node_ns_uri = ns_uri
# Create element
child_elem = self.adapter.new_impl_element(
qname, node_ns_uri, parent=self.impl_node)
# If element's default namespace was defined by literal uri prefix,
# create corresponding xmlns attribute for element...
if not prefix and '}' in name:
self._set_element_attributes(child_elem,
{'xmlns': node_ns_uri}, ns_uri=self.XMLNS_URI)
# ...otherwise define keyword-defined namespace as the default, if any
elif ns_uri is not None:
self._set_element_attributes(child_elem,
{'xmlns': ns_uri}, ns_uri=self.XMLNS_URI)
# Create subordinate nodes
if attributes is not None:
self._set_element_attributes(child_elem, attr_obj=attributes)
if text is not None:
self._add_text(child_elem, text)
# Add new element to its parent before a given node...
if before_this_element:
self.adapter.add_node_child(
self.adapter.get_node_parent(self.impl_node),
child_elem, before_sibling=self.impl_node)
# ...or in the default position, appended after existing nodes
else:
self.adapter.add_node_child(self.impl_node, child_elem)
return self.adapter.wrap_node(
child_elem, self.adapter.impl_document, self.adapter) |
<SYSTEM_TASK:>
Add a text node to this element.
<END_TASK>
<USER_TASK:>
Description:
def add_text(self, text):
"""
Add a text node to this element.
Adding text with this method is subtly different from assigning a new
text value with :meth:`text` accessor, because it "appends" to rather
than replacing this element's set of text nodes.
:param text: text content to add to this element.
:param type: string or anything that can be coerced by :func:`unicode`.
""" |
if not isinstance(text, basestring):
text = unicode(text)
self._add_text(self.impl_node, text) |
<SYSTEM_TASK:>
Add an instruction node to this element.
<END_TASK>
<USER_TASK:>
Description:
def add_instruction(self, target, data):
"""
Add an instruction node to this element.
:param string text: text content to add as an instruction.
""" |
self._add_instruction(self.impl_node, target, data) |
<SYSTEM_TASK:>
Apply filters to the set of nodes in this list.
<END_TASK>
<USER_TASK:>
Description:
def filter(self, local_name=None, name=None, ns_uri=None, node_type=None,
filter_fn=None, first_only=False):
"""
Apply filters to the set of nodes in this list.
:param local_name: a local name used to filter the nodes.
:type local_name: string or None
:param name: a name used to filter the nodes.
:type name: string or None
:param ns_uri: a namespace URI used to filter the nodes.
If *None* all nodes are returned regardless of namespace.
:type ns_uri: string or None
:param node_type: a node type definition used to filter the nodes.
:type node_type: int node type constant, class, or None
:param filter_fn: an arbitrary function to filter nodes in this list.
This function must accept a single :class:`Node` argument and
return a bool indicating whether to include the node in the
filtered results.
.. note:: if ``filter_fn`` is provided all other filter arguments
are ignore.
:type filter_fn: function or None
:return: the type of the return value depends on the value of the
``first_only`` parameter and how many nodes match the filter:
- if ``first_only=False`` return a :class:`NodeList` of filtered
nodes, which will be empty if there are no matching nodes.
- if ``first_only=True`` and at least one node matches,
return the first matching :class:`Node`
- if ``first_only=True`` and there are no matching nodes,
return *None*
""" |
# Build our own filter function unless a custom function is provided
if filter_fn is None:
def filter_fn(n):
# Test node type first in case other tests require this type
if node_type is not None:
# Node type can be specified as an integer constant (e.g.
# ELEMENT_NODE) or a class.
if isinstance(node_type, int):
if not n.is_type(node_type):
return False
elif n.__class__ != node_type:
return False
if name is not None and n.name != name:
return False
if local_name is not None and n.local_name != local_name:
return False
if ns_uri is not None and n.ns_uri != ns_uri:
return False
return True
# Filter nodes
nodelist = filter(filter_fn, self)
# If requested, return just the first node (or None if no nodes)
if first_only:
return nodelist[0] if nodelist else None
else:
return NodeList(nodelist) |
<SYSTEM_TASK:>
Get all analytics of a job.
<END_TASK>
<USER_TASK:>
Description:
def get_all_analytics(user, job_id):
"""Get all analytics of a job.""" |
args = schemas.args(flask.request.args.to_dict())
v1_utils.verify_existence_and_get(job_id, models.JOBS)
query = v1_utils.QueryBuilder(_TABLE, args, _A_COLUMNS)
# If not admin nor rh employee then restrict the view to the team
if user.is_not_super_admin() and not user.is_read_only_user():
query.add_extra_condition(_TABLE.c.team_id.in_(user.teams_ids))
query.add_extra_condition(_TABLE.c.job_id == job_id)
nb_rows = query.get_number_of_rows()
rows = query.execute(fetchall=True)
rows = v1_utils.format_result(rows, _TABLE.name)
return flask.jsonify({'analytics': rows, '_meta': {'count': nb_rows}}) |
<SYSTEM_TASK:>
'Values in' validation_function generator.
<END_TASK>
<USER_TASK:>
Description:
def is_in(allowed_values # type: Set
):
"""
'Values in' validation_function generator.
Returns a validation_function to check that x is in the provided set of allowed values
:param allowed_values: a set of allowed values
:return:
""" |
def is_in_allowed_values(x):
if x in allowed_values:
return True
else:
# raise Failure('is_in: x in ' + str(allowed_values) + ' does not hold for x=' + str(x))
raise NotInAllowedValues(wrong_value=x, allowed_values=allowed_values)
is_in_allowed_values.__name__ = 'is_in_{}'.format(allowed_values)
return is_in_allowed_values |
<SYSTEM_TASK:>
'Is subset' validation_function generator.
<END_TASK>
<USER_TASK:>
Description:
def is_subset(reference_set # type: Set
):
"""
'Is subset' validation_function generator.
Returns a validation_function to check that x is a subset of reference_set
:param reference_set: the reference set
:return:
""" |
def is_subset_of(x):
missing = x - reference_set
if len(missing) == 0:
return True
else:
# raise Failure('is_subset: len(x - reference_set) == 0 does not hold for x=' + str(x)
# + ' and reference_set=' + str(reference_set) + '. x contains unsupported '
# 'elements ' + str(missing))
raise NotSubset(wrong_value=x, reference_set=reference_set, unsupported=missing)
is_subset_of.__name__ = 'is_subset_of_{}'.format(reference_set)
return is_subset_of |
<SYSTEM_TASK:>
'Contains' validation_function generator.
<END_TASK>
<USER_TASK:>
Description:
def contains(ref_value):
"""
'Contains' validation_function generator.
Returns a validation_function to check that `ref_value in x`
:param ref_value: a value that must be present in x
:return:
""" |
def contains_ref_value(x):
if ref_value in x:
return True
else:
raise DoesNotContainValue(wrong_value=x, ref_value=ref_value)
contains_ref_value.__name__ = 'contains_{}'.format(ref_value)
return contains_ref_value |
<SYSTEM_TASK:>
'Is superset' validation_function generator.
<END_TASK>
<USER_TASK:>
Description:
def is_superset(reference_set # type: Set
):
"""
'Is superset' validation_function generator.
Returns a validation_function to check that x is a superset of reference_set
:param reference_set: the reference set
:return:
""" |
def is_superset_of(x):
missing = reference_set - x
if len(missing) == 0:
return True
else:
# raise Failure('is_superset: len(reference_set - x) == 0 does not hold for x=' + str(x)
# + ' and reference_set=' + str(reference_set) + '. x does not contain required '
# 'elements ' + str(missing))
raise NotSuperset(wrong_value=x, reference_set=reference_set, missing=missing)
is_superset_of.__name__ = 'is_superset_of_{}'.format(reference_set)
return is_superset_of |
<SYSTEM_TASK:>
Generates a validation_function for collection inputs where each element of the input will be validated against the
<END_TASK>
<USER_TASK:>
Description:
def on_all_(*validation_func):
"""
Generates a validation_function for collection inputs where each element of the input will be validated against the
validation_functions provided. For convenience, a list of validation_functions can be provided and will be replaced
with an 'and_'.
Note that if you want to apply DIFFERENT validation_functions for each element in the input, you should rather use
on_each_.
:param validation_func: the base validation function or list of base validation functions to use. A callable, a
tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists
are supported and indicate an implicit `and_` (such as the main list). Tuples indicate an implicit
`_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead
of callables, they will be transformed to functions automatically.
:return:
""" |
# create the validation functions
validation_function_func = _process_validation_function_s(list(validation_func))
def on_all_val(x):
# validate all elements in x in turn
for idx, x_elt in enumerate(x):
try:
res = validation_function_func(x_elt)
except Exception as e:
raise InvalidItemInSequence(wrong_value=x_elt, wrapped_func=validation_function_func, validation_outcome=e)
if not result_is_success(res):
# one element of x was not valid > raise
# raise Failure('on_all_(' + str(validation_func) + '): failed validation for input '
# 'element [' + str(idx) + ']: ' + str(x_elt))
raise InvalidItemInSequence(wrong_value=x_elt, wrapped_func=validation_function_func, validation_outcome=res)
return True
on_all_val.__name__ = 'apply_<{}>_on_all_elts'.format(get_callable_name(validation_function_func))
return on_all_val |
<SYSTEM_TASK:>
Get all the jobs events from a given sequence number.
<END_TASK>
<USER_TASK:>
Description:
def get_jobs_events_from_sequence(user, sequence):
"""Get all the jobs events from a given sequence number.""" |
args = schemas.args(flask.request.args.to_dict())
if user.is_not_super_admin():
raise dci_exc.Unauthorized()
query = sql.select([models.JOBS_EVENTS]). \
select_from(models.JOBS_EVENTS.join(models.JOBS,
models.JOBS.c.id == models.JOBS_EVENTS.c.job_id)). \
where(_TABLE.c.id >= sequence)
sort_list = v1_utils.sort_query(args['sort'], _JOBS_EVENTS_COLUMNS,
default='created_at')
query = v1_utils.add_sort_to_query(query, sort_list)
if args.get('limit', None):
query = query.limit(args.get('limit'))
if args.get('offset', None):
query = query.offset(args.get('offset'))
rows = flask.g.db_conn.execute(query).fetchall()
query_nb_rows = sql.select([func.count(models.JOBS_EVENTS.c.id)])
nb_rows = flask.g.db_conn.execute(query_nb_rows).scalar()
return json.jsonify({'jobs_events': rows, '_meta': {'count': nb_rows}}) |
<SYSTEM_TASK:>
Return contatenated value of all text node children of this element
<END_TASK>
<USER_TASK:>
Description:
def get_node_text(self, node):
"""
Return contatenated value of all text node children of this element
""" |
text_children = [n.nodeValue for n in self.get_node_children(node)
if n.nodeType == xml.dom.Node.TEXT_NODE]
if text_children:
return u''.join(text_children)
else:
return None |
<SYSTEM_TASK:>
Set text value as sole Text child node of element; any existing
<END_TASK>
<USER_TASK:>
Description:
def set_node_text(self, node, text):
"""
Set text value as sole Text child node of element; any existing
Text nodes are removed
""" |
# Remove any existing Text node children
for child in self.get_node_children(node):
if child.nodeType == xml.dom.Node.TEXT_NODE:
self.remove_node_child(node, child, True)
if text is not None:
text_node = self.new_impl_text(text)
self.add_node_child(node, text_node) |
<SYSTEM_TASK:>
Returns a user-friendly description of a NonePolicy taking into account NoneArgPolicy
<END_TASK>
<USER_TASK:>
Description:
def get_none_policy_text(none_policy, # type: int
verbose=False # type: bool
):
"""
Returns a user-friendly description of a NonePolicy taking into account NoneArgPolicy
:param none_policy:
:param verbose:
:return:
""" |
if none_policy is NonePolicy.SKIP:
return "accept None without performing validation" if verbose else 'SKIP'
elif none_policy is NonePolicy.FAIL:
return "fail on None without performing validation" if verbose else 'FAIL'
elif none_policy is NonePolicy.VALIDATE:
return "validate None as any other values" if verbose else 'VALIDATE'
elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_FAIL:
return "accept None without validation if the argument is optional, otherwise fail on None" if verbose \
else 'SKIP_IF_NONABLE_ELSE_FAIL'
elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_VALIDATE:
return "accept None without validation if the argument is optional, otherwise validate None as any other " \
"values" if verbose else 'SKIP_IF_NONABLE_ELSE_VALIDATE'
else:
raise ValueError('Invalid none_policy ' + str(none_policy)) |
<SYSTEM_TASK:>
Adds a wrapper or nothing around the provided validation_callable, depending on the selected policy
<END_TASK>
<USER_TASK:>
Description:
def _add_none_handler(validation_callable, # type: Callable
none_policy # type: int
):
# type: (...) -> Callable
"""
Adds a wrapper or nothing around the provided validation_callable, depending on the selected policy
:param validation_callable:
:param none_policy: an int representing the None policy, see NonePolicy
:return:
""" |
if none_policy is NonePolicy.SKIP:
return _none_accepter(validation_callable) # accept all None values
elif none_policy is NonePolicy.FAIL:
return _none_rejecter(validation_callable) # reject all None values
elif none_policy is NonePolicy.VALIDATE:
return validation_callable # do not handle None specifically, do not wrap
else:
raise ValueError('Invalid none_policy : ' + str(none_policy)) |
<SYSTEM_TASK:>
Creates an instance without using a Validator.
<END_TASK>
<USER_TASK:>
Description:
def create_manually(cls,
validation_function_name, # type: str
var_name, # type: str
var_value,
validation_outcome=None, # type: Any
help_msg=None, # type: str
append_details=True, # type: bool
**kw_context_args):
"""
Creates an instance without using a Validator.
This method is not the primary way that errors are created - they should rather created by the validation entry
points. However it can be handy in rare edge cases.
:param validation_function_name:
:param var_name:
:param var_value:
:param validation_outcome:
:param help_msg:
:param append_details:
:param kw_context_args:
:return:
""" |
# create a dummy validator
def val_fun(x):
pass
val_fun.__name__ = validation_function_name
validator = Validator(val_fun, error_type=cls, help_msg=help_msg, **kw_context_args)
# create the exception
# e = cls(validator, var_value, var_name, validation_outcome=validation_outcome, help_msg=help_msg,
# append_details=append_details, **kw_context_args)
e = validator._create_validation_error(var_name, var_value, validation_outcome, error_type=cls,
help_msg=help_msg, **kw_context_args)
return e |
<SYSTEM_TASK:>
Utility method to get the variable value or 'var_name=value' if name is not None.
<END_TASK>
<USER_TASK:>
Description:
def get_variable_str(self):
"""
Utility method to get the variable value or 'var_name=value' if name is not None.
Note that values with large string representations will not get printed
:return:
""" |
if self.var_name is None:
prefix = ''
else:
prefix = self.var_name
suffix = str(self.var_value)
if len(suffix) == 0:
suffix = "''"
elif len(suffix) > self.__max_str_length_displayed__:
suffix = ''
if len(prefix) > 0 and len(suffix) > 0:
return prefix + '=' + suffix
else:
return prefix + suffix |
<SYSTEM_TASK:>
Validates the provided value and returns a boolean indicating success or failure. Any Exception happening in
<END_TASK>
<USER_TASK:>
Description:
def is_valid(self,
value # type: Any
):
# type: (...) -> bool
"""
Validates the provided value and returns a boolean indicating success or failure. Any Exception happening in
the validation process will be silently caught.
:param value: the value to validate
:return: a boolean flag indicating success or failure
""" |
# noinspection PyBroadException
try:
# perform validation
res = self.main_function(value)
# return a boolean indicating if success or failure
return result_is_success(res)
except Exception:
# caught exception means failure > return False
return False |
<SYSTEM_TASK:>
Load entrypoint.
<END_TASK>
<USER_TASK:>
Description:
def load_entrypoint(self, entry_point_group):
"""Load entrypoint.
:param entry_point_group: A name of entry point group used to load
``webassets`` bundles.
.. versionchanged:: 1.0.0b2
The *entrypoint* has been renamed to *entry_point_group*.
""" |
for ep in pkg_resources.iter_entry_points(entry_point_group):
self.env.register(ep.name, ep.load()) |
<SYSTEM_TASK:>
Sends a 401 reject response that enables basic auth.
<END_TASK>
<USER_TASK:>
Description:
def reject():
"""Sends a 401 reject response that enables basic auth.""" |
auth_message = ('Could not verify your access level for that URL.'
'Please login with proper credentials.')
auth_message = json.dumps({'_status': 'Unauthorized',
'message': auth_message})
headers = {'WWW-Authenticate': 'Basic realm="Login required"'}
return flask.Response(auth_message, 401, headers=headers,
content_type='application/json') |
<SYSTEM_TASK:>
Modify a contact.
<END_TASK>
<USER_TASK:>
Description:
def modify(self, **kwargs):
"""Modify a contact.
Returns status message
Optional Parameters:
* name -- Contact name
Type: String
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
""" |
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser', 'name']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of <PingdomContact>.modify()\n')
response = self.pingdom.request('PUT', 'notification_contacts/%s' % self.id, kwargs)
return response.json()['message'] |
<SYSTEM_TASK:>
If the topic has it's export_control set to True then all the teams
<END_TASK>
<USER_TASK:>
Description:
def _check(user, topic):
"""If the topic has it's export_control set to True then all the teams
under the product team can access to the topic's resources.
:param user:
:param topic:
:return: True if check is ok, False otherwise
""" |
# if export_control then check the team is associated to the product, ie.:
# - the current user belongs to the product's team
# OR
# - the product's team belongs to the user's parents teams
if topic['export_control']:
product = v1_utils.verify_existence_and_get(topic['product_id'],
models.PRODUCTS)
return (user.is_in_team(product['team_id']) or
product['team_id'] in user.parent_teams_ids)
return False |
<SYSTEM_TASK:>
Ensure the proper content is uploaded.
<END_TASK>
<USER_TASK:>
Description:
def get_stream_or_content_from_request(request):
"""Ensure the proper content is uploaded.
Stream might be already consumed by authentication process.
Hence flask.request.stream might not be readable and return improper value.
This methods checks if the stream has already been consumed and if so
retrieve the data from flask.request.data where it has been stored.
""" |
if request.stream.tell():
logger.info('Request stream already consumed. '
'Storing file content using in-memory data.')
return request.data
else:
logger.info('Storing file content using request stream.')
return request.stream |
<SYSTEM_TASK:>
Returns the component types of a topic.
<END_TASK>
<USER_TASK:>
Description:
def get_component_types_from_topic(topic_id, db_conn=None):
"""Returns the component types of a topic.""" |
db_conn = db_conn or flask.g.db_conn
query = sql.select([models.TOPICS]).\
where(models.TOPICS.c.id == topic_id)
topic = db_conn.execute(query).fetchone()
topic = dict(topic)
return topic['component_types'] |
<SYSTEM_TASK:>
Returns either the topic component types or the rconfigration's
<END_TASK>
<USER_TASK:>
Description:
def get_component_types(topic_id, remoteci_id, db_conn=None):
"""Returns either the topic component types or the rconfigration's
component types.""" |
db_conn = db_conn or flask.g.db_conn
rconfiguration = remotecis.get_remoteci_configuration(topic_id,
remoteci_id,
db_conn=db_conn)
# if there is no rconfiguration associated to the remoteci or no
# component types then use the topic's one.
if (rconfiguration is not None and
rconfiguration['component_types'] is not None):
component_types = rconfiguration['component_types']
else:
component_types = get_component_types_from_topic(topic_id,
db_conn=db_conn)
return component_types, rconfiguration |
<SYSTEM_TASK:>
For each component type of a topic, get the last one.
<END_TASK>
<USER_TASK:>
Description:
def get_last_components_by_type(component_types, topic_id, db_conn=None):
"""For each component type of a topic, get the last one.""" |
db_conn = db_conn or flask.g.db_conn
schedule_components_ids = []
for ct in component_types:
where_clause = sql.and_(models.COMPONENTS.c.type == ct,
models.COMPONENTS.c.topic_id == topic_id,
models.COMPONENTS.c.export_control == True,
models.COMPONENTS.c.state == 'active') # noqa
query = (sql.select([models.COMPONENTS.c.id])
.where(where_clause)
.order_by(sql.desc(models.COMPONENTS.c.created_at)))
cmpt_id = db_conn.execute(query).fetchone()
if cmpt_id is None:
msg = 'Component of type "%s" not found or not exported.' % ct
raise dci_exc.DCIException(msg, status_code=412)
cmpt_id = cmpt_id[0]
if cmpt_id in schedule_components_ids:
msg = ('Component types %s malformed: type %s duplicated.' %
(component_types, ct))
raise dci_exc.DCIException(msg, status_code=412)
schedule_components_ids.append(cmpt_id)
return schedule_components_ids |
<SYSTEM_TASK:>
Process some verifications of the provided components ids.
<END_TASK>
<USER_TASK:>
Description:
def verify_and_get_components_ids(topic_id, components_ids, component_types,
db_conn=None):
"""Process some verifications of the provided components ids.""" |
db_conn = db_conn or flask.g.db_conn
if len(components_ids) != len(component_types):
msg = 'The number of component ids does not match the number ' \
'of component types %s' % component_types
raise dci_exc.DCIException(msg, status_code=412)
# get the components from their ids
schedule_component_types = set()
for c_id in components_ids:
where_clause = sql.and_(models.COMPONENTS.c.id == c_id,
models.COMPONENTS.c.topic_id == topic_id,
models.COMPONENTS.c.export_control == True, # noqa
models.COMPONENTS.c.state == 'active')
query = (sql.select([models.COMPONENTS])
.where(where_clause))
cmpt = db_conn.execute(query).fetchone()
if cmpt is None:
msg = 'Component id %s not found or not exported' % c_id
raise dci_exc.DCIException(msg, status_code=412)
cmpt = dict(cmpt)
if cmpt['type'] in schedule_component_types:
msg = ('Component types malformed: type %s duplicated.' %
cmpt['type'])
raise dci_exc.DCIException(msg, status_code=412)
schedule_component_types.add(cmpt['type'])
return components_ids |
<SYSTEM_TASK:>
Used everywhere to decide if some exception type should be displayed or hidden as the casue of an error
<END_TASK>
<USER_TASK:>
Description:
def should_be_hidden_as_cause(exc):
""" Used everywhere to decide if some exception type should be displayed or hidden as the casue of an error """ |
# reduced traceback in case of HasWrongType (instance_of checks)
from valid8.validation_lib.types import HasWrongType, IsWrongType
return isinstance(exc, (HasWrongType, IsWrongType)) |
<SYSTEM_TASK:>
Wraps the provided validation function so that in case of failure it raises the given failure_type or a WrappingFailure
<END_TASK>
<USER_TASK:>
Description:
def _failure_raiser(validation_callable, # type: Callable
failure_type=None, # type: Type[WrappingFailure]
help_msg=None, # type: str
**kw_context_args):
# type: (...) -> Callable
"""
Wraps the provided validation function so that in case of failure it raises the given failure_type or a WrappingFailure
with the given help message.
:param validation_callable:
:param failure_type: an optional subclass of `WrappingFailure` that should be raised in case of failure, instead of
`WrappingFailure`.
:param help_msg: an optional string help message for the raised `WrappingFailure` (if no failure_type is provided)
:param kw_context_args: optional context arguments for the custom failure message
:return:
""" |
# check failure type
if failure_type is not None and help_msg is not None:
raise ValueError('Only one of failure_type and help_msg can be set at the same time')
# convert mini-lambdas to functions if needed
validation_callable = as_function(validation_callable)
# create wrapper
# option (a) use the `decorate()` helper method to preserve name and signature of the inner object
# ==> NO, we want to support also non-function callable objects
# option (b) simply create a wrapper manually
def raiser(x):
""" Wraps validation_callable to raise a failure_type_or_help_msg in case of failure """
try:
# perform validation
res = validation_callable(x)
except Exception as e:
# no need to raise from e since the __cause__ is already set in the constructor: we can safely commonalize
res = e
if not result_is_success(res):
typ = failure_type or WrappingFailure
exc = typ(wrapped_func=validation_callable, wrong_value=x, validation_outcome=res,
help_msg=help_msg, **kw_context_args)
raise exc
# set a name so that the error messages are more user-friendly
# NO, Do not include the callable type or error message in the name since it is only used in error messages where
# they will appear anyway !
# ---
# if help_msg or failure_type:
# raiser.__name__ = 'failure_raiser({}, {})'.format(get_callable_name(validation_callable),
# help_msg or failure_type.__name__)
# else:
# ---
# raiser.__name__ = 'failure_raiser({})'.format(get_callable_name(validation_callable))
raiser.__name__ = get_callable_name(validation_callable)
# Note: obviously this can hold as long as we do not check the name of this object in any other context than
# raising errors. If we want to support this, then creating a callable object with everything in the fields will be
# probably more appropriate so that error messages will be able to display the inner name, while repr() will still
# say that this is a failure raiser.
# TODO consider transforming failure_raiser into a class (see comment above)
return raiser |
<SYSTEM_TASK:>
Wraps the given validation callable to accept None values silently. When a None value is received by the wrapper,
<END_TASK>
<USER_TASK:>
Description:
def _none_accepter(validation_callable # type: Callable
):
# type: (...) -> Callable
"""
Wraps the given validation callable to accept None values silently. When a None value is received by the wrapper,
it is not passed to the validation_callable and instead this function will return True. When any other value is
received the validation_callable is called as usual.
Note: the created wrapper has the same same than the validation callable for more user-friendly error messages
:param validation_callable:
:return:
""" |
# option (a) use the `decorate()` helper method to preserve name and signature of the inner object
# ==> NO, we want to support also non-function callable objects
# option (b) simply create a wrapper manually
def accept_none(x):
if x is not None:
# proceed with validation as usual
return validation_callable(x)
else:
# value is None: skip validation
return True
# set a name so that the error messages are more user-friendly
accept_none.__name__ = 'skip_on_none({})'.format(get_callable_name(validation_callable))
return accept_none |
<SYSTEM_TASK:>
Wraps the given validation callable to reject None values. When a None value is received by the wrapper,
<END_TASK>
<USER_TASK:>
Description:
def _none_rejecter(validation_callable # type: Callable
):
# type: (...) -> Callable
"""
Wraps the given validation callable to reject None values. When a None value is received by the wrapper,
it is not passed to the validation_callable and instead this function will raise a WrappingFailure. When any other value is
received the validation_callable is called as usual.
:param validation_callable:
:return:
""" |
# option (a) use the `decorate()` helper method to preserve name and signature of the inner object
# ==> NO, we want to support also non-function callable objects
# option (b) simply create a wrapper manually
def reject_none(x):
if x is not None:
return validation_callable(x)
else:
raise ValueIsNone(wrong_value=x)
# set a name so that the error messages are more user-friendly ==> NO ! here we want to see the checker
reject_none.__name__ = 'reject_none({})'.format(get_callable_name(validation_callable))
return reject_none |
<SYSTEM_TASK:>
The method used to get the formatted help message according to kwargs. By default it returns the 'help_msg'
<END_TASK>
<USER_TASK:>
Description:
def get_help_msg(self,
dotspace_ending=False, # type: bool
**kwargs):
# type: (...) -> str
"""
The method used to get the formatted help message according to kwargs. By default it returns the 'help_msg'
attribute, whether it is defined at the instance level or at the class level.
The help message is formatted according to help_msg.format(**kwargs), and may be terminated with a dot
and a space if dotspace_ending is set to True.
:param dotspace_ending: True will append a dot and a space at the end of the message if it is not
empty (default is False)
:param kwargs: keyword arguments to format the help message
:return: the formatted help message
""" |
context = self.get_context_for_help_msgs(kwargs)
if self.help_msg is not None and len(self.help_msg) > 0:
# create a copy because we will modify it
context = copy(context)
# first format if needed
try:
help_msg = self.help_msg
variables = re.findall("{\S+}", help_msg)
for v in set(variables):
v = v[1:-1]
if v in context and len(str(context[v])) > self.__max_str_length_displayed__:
new_name = '@@@@' + v + '@@@@'
help_msg = help_msg.replace('{' + v + '}', '{' + new_name + '}')
context[new_name] = "(too big for display)"
help_msg = help_msg.format(**context)
except KeyError as e:
# no need to raise from e, __cause__ is set in the constructor
raise HelpMsgFormattingException(self.help_msg, e, context)
# then add a trailing dot and space if needed
if dotspace_ending:
return end_with_dot_space(help_msg)
else:
return help_msg
else:
return '' |
<SYSTEM_TASK:>
Overrides the method in Failure so as to add a few details about the wrapped function and outcome
<END_TASK>
<USER_TASK:>
Description:
def get_details(self):
""" Overrides the method in Failure so as to add a few details about the wrapped function and outcome """ |
if isinstance(self.validation_outcome, Exception):
if isinstance(self.validation_outcome, Failure):
# do not say again what was the value, it is already mentioned inside :)
end_str = ''
else:
end_str = ' for value [{value}]'.format(value=self.wrong_value)
contents = 'Function [{wrapped}] raised [{exception}: {details}]{end}.' \
''.format(wrapped=get_callable_name(self.wrapped_func),
exception=type(self.validation_outcome).__name__, details=self.validation_outcome,
end=end_str)
else:
contents = 'Function [{wrapped}] returned [{result}] for value [{value}].' \
''.format(wrapped=get_callable_name(self.wrapped_func), result=self.validation_outcome,
value=self.wrong_value)
return contents |
<SYSTEM_TASK:>
We override this method from HelpMsgMixIn to replace wrapped_func with its name
<END_TASK>
<USER_TASK:>
Description:
def get_context_for_help_msgs(self, context_dict):
""" We override this method from HelpMsgMixIn to replace wrapped_func with its name """ |
context_dict = copy(context_dict)
context_dict['wrapped_func'] = get_callable_name(context_dict['wrapped_func'])
return context_dict |
<SYSTEM_TASK:>
This method is equivalent to applying `decorate_with_validation` once for each of the provided arguments of
<END_TASK>
<USER_TASK:>
Description:
def decorate_several_with_validation(func,
_out_=None, # type: ValidationFuncs
none_policy=None, # type: int
**validation_funcs # type: ValidationFuncs
):
# type: (...) -> Callable
"""
This method is equivalent to applying `decorate_with_validation` once for each of the provided arguments of
the function `func` as well as output `_out_`. validation_funcs keyword arguments are validation functions for each
arg name.
Note that this method is less flexible than decorate_with_validation since
* it does not allow to associate a custom error message or error type with each validation.
* the none_policy is the same for all inputs and outputs
:param func:
:param _out_:
:param validation_funcs:
:param none_policy:
:return: a function decorated with validation for all of the listed arguments and output if provided.
""" |
# add validation for output if provided
if _out_ is not None:
func = decorate_with_validation(func, _OUT_KEY, _out_, none_policy=none_policy)
# add validation for each of the listed arguments
for att_name, att_validation_funcs in validation_funcs.items():
func = decorate_with_validation(func, att_name, att_validation_funcs, none_policy=none_policy)
return func |
<SYSTEM_TASK:>
Depending on none_policy and of the fact that the target parameter is nonable or not, returns a corresponding
<END_TASK>
<USER_TASK:>
Description:
def _get_final_none_policy_for_validator(is_nonable, # type: bool
none_policy # type: NoneArgPolicy
):
"""
Depending on none_policy and of the fact that the target parameter is nonable or not, returns a corresponding
NonePolicy
:param is_nonable:
:param none_policy:
:return:
""" |
if none_policy in {NonePolicy.VALIDATE, NonePolicy.SKIP, NonePolicy.FAIL}:
none_policy_to_use = none_policy
elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_VALIDATE:
none_policy_to_use = NonePolicy.SKIP if is_nonable else NonePolicy.VALIDATE
elif none_policy is NoneArgPolicy.SKIP_IF_NONABLE_ELSE_FAIL:
none_policy_to_use = NonePolicy.SKIP if is_nonable else NonePolicy.FAIL
else:
raise ValueError('Invalid none policy: ' + str(none_policy))
return none_policy_to_use |
<SYSTEM_TASK:>
Utility method to decorate the provided function with the provided input and output Validator objects. Since this
<END_TASK>
<USER_TASK:>
Description:
def decorate_with_validators(func,
func_signature=None, # type: Signature
**validators # type: Validator
):
"""
Utility method to decorate the provided function with the provided input and output Validator objects. Since this
method takes Validator objects as argument, it is for advanced users.
:param func: the function to decorate. It might already be decorated, this method will check it and wont create
another wrapper in this case, simply adding the validators to the existing wrapper
:param func_signature: the function's signature if it is already known (internal calls), otherwise it will be found
again by inspection
:param validators: a dictionary of arg_name (or _out_) => Validator or list of Validator
:return:
""" |
# first turn the dictionary values into lists only
for arg_name, validator in validators.items():
if not isinstance(validator, list):
validators[arg_name] = [validator]
if hasattr(func, '__wrapped__') and hasattr(func.__wrapped__, '__validators__'):
# ---- This function is already wrapped by our validation wrapper ----
# Update the dictionary of validators with the new validator(s)
for arg_name, validator in validators.items():
for v in validator:
if arg_name in func.__wrapped__.__validators__:
func.__wrapped__.__validators__[arg_name].append(v)
else:
func.__wrapped__.__validators__[arg_name] = [v]
# return the function, no need to wrap it further (it is already wrapped)
return func
else:
# ---- This function is not yet wrapped by our validator. ----
# Store the dictionary of validators as an attribute of the function
if hasattr(func, '__validators__'):
raise ValueError('Function ' + str(func) + ' already has a defined __validators__ attribute, valid8 '
'decorators can not be applied on it')
else:
try:
func.__validators__ = validators
except AttributeError:
raise ValueError("Error - Could not add validators list to function '%s'" % func)
# either reuse or recompute function signature
func_signature = func_signature or signature(func)
# create a wrapper with the same signature
@wraps(func)
def validating_wrapper(*args, **kwargs):
""" This is the wrapper that will be called everytime the function is called """
# (a) Perform input validation by applying `_assert_input_is_valid` on all received arguments
apply_on_each_func_args_sig(func, args, kwargs, func_signature,
func_to_apply=_assert_input_is_valid,
func_to_apply_params_dict=func.__validators__)
# (b) execute the function as usual
res = func(*args, **kwargs)
# (c) validate output if needed
if _OUT_KEY in func.__validators__:
for validator in func.__validators__[_OUT_KEY]:
validator.assert_valid(res)
return res
return validating_wrapper |
<SYSTEM_TASK:>
Generate unique nonce with counter, uuid and rng.
<END_TASK>
<USER_TASK:>
Description:
def generate_nonce_timestamp():
""" Generate unique nonce with counter, uuid and rng.""" |
global count
rng = botan.rng().get(30)
uuid4 = uuid.uuid4().bytes # 16 byte
tmpnonce = (bytes(str(count).encode('utf-8'))) + uuid4 + rng
nonce = tmpnonce[:41] # 41 byte (328 bit)
count += 1
return nonce |
<SYSTEM_TASK:>
Dispatch jobs to remotecis.
<END_TASK>
<USER_TASK:>
Description:
def schedule_jobs(user):
"""Dispatch jobs to remotecis.
The remoteci can use this method to request a new job.
Before a job is dispatched, the server will flag as 'killed' all the
running jobs that were associated with the remoteci. This is because they
will never be finished.
""" |
values = schemas.job_schedule.post(flask.request.json)
values.update({
'id': utils.gen_uuid(),
'created_at': datetime.datetime.utcnow().isoformat(),
'updated_at': datetime.datetime.utcnow().isoformat(),
'etag': utils.gen_etag(),
'status': 'new',
'remoteci_id': user.id,
'user_agent': flask.request.environ.get('HTTP_USER_AGENT'),
'client_version': flask.request.environ.get(
'HTTP_CLIENT_VERSION'
),
})
topic_id = values.pop('topic_id')
topic_id_secondary = values.pop('topic_id_secondary')
components_ids = values.pop('components_ids')
# check remoteci
remoteci = v1_utils.verify_existence_and_get(user.id, models.REMOTECIS)
if remoteci['state'] != 'active':
message = 'RemoteCI "%s" is disabled.' % remoteci['id']
raise dci_exc.DCIException(message, status_code=412)
# check primary topic
topic = v1_utils.verify_existence_and_get(topic_id, models.TOPICS)
if topic['state'] != 'active':
msg = 'Topic %s:%s not active.' % (topic_id, topic['name'])
raise dci_exc.DCIException(msg, status_code=412)
v1_utils.verify_team_in_topic(user, topic_id)
# check secondary topic
if topic_id_secondary:
topic_secondary = v1_utils.verify_existence_and_get(
topic_id_secondary, models.TOPICS)
if topic_secondary['state'] != 'active':
msg = 'Topic %s:%s not active.' % (topic_id_secondary,
topic['name'])
raise dci_exc.DCIException(msg, status_code=412)
v1_utils.verify_team_in_topic(user, topic_id_secondary)
dry_run = values.pop('dry_run')
if dry_run:
component_types = components.get_component_types_from_topic(topic_id)
components_ids = components.get_last_components_by_type(
component_types,
topic_id
)
return flask.Response(
json.dumps({'components_ids': components_ids, 'job': None}),
201,
content_type='application/json'
)
remotecis.kill_existing_jobs(remoteci['id'])
values = _build_job(topic_id, remoteci, components_ids, values,
topic_id_secondary=topic_id_secondary)
return flask.Response(json.dumps({'job': values}), 201,
headers={'ETag': values['etag']},
content_type='application/json') |
<SYSTEM_TASK:>
Create a new job in the same topic as the job_id provided and
<END_TASK>
<USER_TASK:>
Description:
def create_new_update_job_from_an_existing_job(user, job_id):
"""Create a new job in the same topic as the job_id provided and
associate the latest components of this topic.""" |
values = {
'id': utils.gen_uuid(),
'created_at': datetime.datetime.utcnow().isoformat(),
'updated_at': datetime.datetime.utcnow().isoformat(),
'etag': utils.gen_etag(),
'status': 'new'
}
original_job_id = job_id
original_job = v1_utils.verify_existence_and_get(original_job_id,
models.JOBS)
if not user.is_in_team(original_job['team_id']):
raise dci_exc.Unauthorized()
# get the remoteci
remoteci_id = str(original_job['remoteci_id'])
remoteci = v1_utils.verify_existence_and_get(remoteci_id,
models.REMOTECIS)
values.update({'remoteci_id': remoteci_id})
# get the associated topic
topic_id = str(original_job['topic_id'])
v1_utils.verify_existence_and_get(topic_id, models.TOPICS)
values.update({
'user_agent': flask.request.environ.get('HTTP_USER_AGENT'),
'client_version': flask.request.environ.get(
'HTTP_CLIENT_VERSION'
),
})
values = _build_job(topic_id, remoteci, [], values,
update_previous_job_id=original_job_id)
return flask.Response(json.dumps({'job': values}), 201,
headers={'ETag': values['etag']},
content_type='application/json') |
<SYSTEM_TASK:>
Create a new job in the 'next topic' of the topic of
<END_TASK>
<USER_TASK:>
Description:
def create_new_upgrade_job_from_an_existing_job(user):
"""Create a new job in the 'next topic' of the topic of
the provided job_id.""" |
values = schemas.job_upgrade.post(flask.request.json)
values.update({
'id': utils.gen_uuid(),
'created_at': datetime.datetime.utcnow().isoformat(),
'updated_at': datetime.datetime.utcnow().isoformat(),
'etag': utils.gen_etag(),
'status': 'new'
})
original_job_id = values.pop('job_id')
original_job = v1_utils.verify_existence_and_get(original_job_id,
models.JOBS)
if not user.is_in_team(original_job['team_id']):
raise dci_exc.Unauthorized()
# get the remoteci
remoteci_id = str(original_job['remoteci_id'])
remoteci = v1_utils.verify_existence_and_get(remoteci_id,
models.REMOTECIS)
values.update({'remoteci_id': remoteci_id})
# get the associated topic
topic_id = str(original_job['topic_id'])
topic = v1_utils.verify_existence_and_get(topic_id, models.TOPICS)
values.update({
'user_agent': flask.request.environ.get('HTTP_USER_AGENT'),
'client_version': flask.request.environ.get(
'HTTP_CLIENT_VERSION'
),
})
next_topic_id = topic['next_topic_id']
if not next_topic_id:
raise dci_exc.DCIException(
"topic %s does not contains a next topic" % topic_id)
# instantiate a new job in the next_topic_id
# todo(yassine): make possible the upgrade to choose specific components
values = _build_job(next_topic_id, remoteci, [], values,
previous_job_id=original_job_id)
return flask.Response(json.dumps({'job': values}), 201,
headers={'ETag': values['etag']},
content_type='application/json') |
<SYSTEM_TASK:>
Get all jobs.
<END_TASK>
<USER_TASK:>
Description:
def get_all_jobs(user, topic_id=None):
"""Get all jobs.
If topic_id is not None, then return all the jobs with a topic
pointed by topic_id.
""" |
# get the diverse parameters
args = schemas.args(flask.request.args.to_dict())
# build the query thanks to the QueryBuilder class
query = v1_utils.QueryBuilder(_TABLE, args, _JOBS_COLUMNS)
# add extra conditions for filtering
# # If not admin nor rh employee then restrict the view to the team
if user.is_not_super_admin() and not user.is_read_only_user():
query.add_extra_condition(
sql.or_(
_TABLE.c.team_id.in_(user.teams_ids),
_TABLE.c.team_id.in_(user.child_teams_ids)))
# # If topic_id not None, then filter by topic_id
if topic_id is not None:
query.add_extra_condition(_TABLE.c.topic_id == topic_id)
# # Get only the non archived jobs
query.add_extra_condition(_TABLE.c.state != 'archived')
nb_rows = query.get_number_of_rows()
rows = query.execute(fetchall=True)
rows = v1_utils.format_result(rows, _TABLE.name, args['embed'],
_EMBED_MANY)
return flask.jsonify({'jobs': rows, '_meta': {'count': nb_rows}}) |
<SYSTEM_TASK:>
Get all results from job.
<END_TASK>
<USER_TASK:>
Description:
def get_all_results_from_jobs(user, j_id):
"""Get all results from job.
""" |
job = v1_utils.verify_existence_and_get(j_id, _TABLE)
if not user.is_in_team(job['team_id']) and not user.is_read_only_user():
raise dci_exc.Unauthorized()
# get testscases from tests_results
query = sql.select([models.TESTS_RESULTS]). \
where(models.TESTS_RESULTS.c.job_id == job['id'])
all_tests_results = flask.g.db_conn.execute(query).fetchall()
results = []
for test_result in all_tests_results:
test_result = dict(test_result)
results.append({'filename': test_result['name'],
'name': test_result['name'],
'total': test_result['total'],
'failures': test_result['failures'],
'errors': test_result['errors'],
'skips': test_result['skips'],
'time': test_result['time'],
'regressions': test_result['regressions'],
'successfixes': test_result['successfixes'],
'success': test_result['success'],
'file_id': test_result['file_id']})
return flask.jsonify({'results': results,
'_meta': {'count': len(results)}}) |
<SYSTEM_TASK:>
Return True if the given node is an ElementTree Element, a fact that
<END_TASK>
<USER_TASK:>
Description:
def _is_node_an_element(self, node):
"""
Return True if the given node is an ElementTree Element, a fact that
can be tricky to determine if the cElementTree implementation is
used.
""" |
# Try the simplest approach first, works for plain old ElementTree
if isinstance(node, BaseET.Element):
return True
# For cElementTree we need to be more cunning (or find a better way)
if hasattr(node, 'makeelement') and isinstance(node.tag, basestring):
return True |
<SYSTEM_TASK:>
List the entries to be purged from the database.
<END_TASK>
<USER_TASK:>
Description:
def get_to_purge_archived_resources(user, table):
"""List the entries to be purged from the database. """ |
if user.is_not_super_admin():
raise dci_exc.Unauthorized()
archived_resources = get_archived_resources(table)
return flask.jsonify({table.name: archived_resources,
'_meta': {'count': len(archived_resources)}}) |
<SYSTEM_TASK:>
Remove the entries to be purged from the database.
<END_TASK>
<USER_TASK:>
Description:
def purge_archived_resources(user, table):
"""Remove the entries to be purged from the database. """ |
if user.is_not_super_admin():
raise dci_exc.Unauthorized()
where_clause = sql.and_(
table.c.state == 'archived'
)
query = table.delete().where(where_clause)
flask.g.db_conn.execute(query)
return flask.Response(None, 204, content_type='application/json') |
<SYSTEM_TASK:>
Returns a list of the latest root cause analysis results for a
<END_TASK>
<USER_TASK:>
Description:
def getAnalyses(self, **kwargs):
"""Returns a list of the latest root cause analysis results for a
specified check.
Optional Parameters:
* limit -- Limits the number of returned results to the
specified quantity.
Type: Integer
Default: 100
* offset -- Offset for listing. (Requires limit.)
Type: Integer
Default: 0
* time_from -- Return only results with timestamp of first test greater
or equal to this value. Format is UNIX timestamp.
Type: Integer
Default: 0
* time_to -- Return only results with timestamp of first test less or
equal to this value. Format is UNIX timestamp.
Type: Integer
Default: Current Time
Returned structure:
[
{
'id' : <Integer> Analysis id
'timefirsttest' : <Integer> Time of test that initiated the
confirmation test
'timeconfrimtest' : <Integer> Time of the confirmation test
that perfromed the error
analysis
},
...
]
""" |
# 'from' is a reserved word, use time_from instead
if kwargs.get('time_from'):
kwargs['from'] = kwargs.get('time_from')
del kwargs['time_from']
if kwargs.get('time_to'):
kwargs['to'] = kwargs.get('time_to')
del kwargs['time_to']
# Warn user about unhandled kwargs
for key in kwargs:
if key not in ['limit', 'offset', 'from', 'to']:
sys.stderr.write('%s not a valid argument for analysis()\n'
% key)
response = self.pingdom.request('GET', 'analysis/%s' % self.id,
kwargs)
return [PingdomAnalysis(self, x) for x in response.json()['analysis']] |
<SYSTEM_TASK:>
Modify settings for a check. The provided settings will overwrite
<END_TASK>
<USER_TASK:>
Description:
def modify(self, **kwargs):
"""Modify settings for a check. The provided settings will overwrite
previous values. Settings not provided will stay the same as before
the update. To clear an existing value, provide an empty value.
Please note that you cannot change the type of a check once it has
been created.
General parameters:
* name -- Check name
Type: String
* host - Target host
Type: String
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* contactids -- Comma separated list of contact IDs
Type: String
* sendtoemail -- Send alerts as email
Type: Boolean
* sendtosms -- Send alerts as SMS
Type: Boolean
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
* sendtoandroid -- Send alerts to Android
Type: Boolean
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
* notifywhenbackup -- Notify when back up again
Type: Boolean
* use_legacy_notifications -- Use old notifications instead of BeepManager
Type: Boolean
* probe_filters -- Can be one of region: NA, region: EU, region: APAC
Type: String
HTTP check options:
* url -- Target path on server
Type: String
* encryption -- Use SSL/TLS
Type: Boolean
* port -- Target server port
Type: Integer
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
HTTPCustom check options:
* url -- Target path on server
Type: String
* encryption -- Use SSL/TLS
Type: Boolean
* port -- Target server port
Type: Integer
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
TCP check options:
* port -- Target server port
Type: Integer
* stringtosend -- String to send
Type: String
* stringtoexpect -- String to expect in response
Type: String
DNS check options:
* expectedip -- Expected IP
Type: String
* nameserver -- Nameserver to check
Type: String
UDP check options:
* port -- Target server port
Type: Integer
* stringtosend -- String to send
Type: String
* stringtoexpect -- String to expect in response
Type: String
SMTP check options:
* port -- Target server port
Type: Integer
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
* stringtoexpect -- String to expect in response
Type: String
* encryption -- Use connection encryption
Type: Boolean
POP3 check options:
* port -- Target server port
Type: Integer
* stringtoexpect -- String to expect in response
Type: String
* encryption -- Use connection encryption
Type: Boolean
IMAP check options:
* port -- Target server port
Type: Integer
* stringtoexpect -- String to expect in response
Type: String
* encryption -- Use connection encryption
Type: Boolean
""" |
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids', 'sendtoemail',
'sendtosms', 'sendtotwitter', 'sendtoiphone',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'created', 'type', 'hostname',
'status', 'lasterrortime', 'lasttesttime', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata', 'additionalurls',
'stringtosend', 'stringtoexpect', 'expectedip',
'nameserver', 'use_legacy_notifications', 'host',
'alert_policy', 'autoresolve', 'probe_filters']:
sys.stderr.write("'%s'" % key + ' is not a valid argument of' +
'<PingdomCheck>.modify()\n')
# If one of the legacy parameters is used, it is required to set the legacy flag.
# https://github.com/KennethWilke/PingdomLib/issues/12
if any([k for k in kwargs if k in legacy_notification_parameters]):
if "use_legacy_notifications" in kwargs and kwargs["use_legacy_notifications"] != True:
raise Exception("Cannot set legacy parameter when use_legacy_notifications is not True")
kwargs["use_legacy_notifications"] = True
response = self.pingdom.request("PUT", 'checks/%s' % self.id, kwargs)
return response.json()['message'] |
<SYSTEM_TASK:>
Get a list of probes that performed tests for a specified check
<END_TASK>
<USER_TASK:>
Description:
def probes(self, fromtime, totime=None):
"""Get a list of probes that performed tests for a specified check
during a specified period.""" |
args = {'from': fromtime}
if totime:
args['to'] = totime
response = self.pingdom.request('GET', 'summary.probes/%s' % self.id,
args)
return response.json()['probes'] |
<SYSTEM_TASK:>
Activate public report for this check.
<END_TASK>
<USER_TASK:>
Description:
def publishPublicReport(self):
"""Activate public report for this check.
Returns status message""" |
response = self.pingdom.request('PUT', 'reports.public/%s' % self.id)
return response.json()['message'] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.