text_prompt
stringlengths 100
17.7k
⌀ | code_prompt
stringlengths 7
9.86k
⌀ |
---|---|
<SYSTEM_TASK:>
Small helper to find all handlers associated to a given event
<END_TASK>
<USER_TASK:>
Description:
def find_handlers(event_name, registry=HANDLER_REGISTRY):
"""Small helper to find all handlers associated to a given event
If the event can't be found, an empty list will be returned, since
this is an internal function and all validation against the event
name and its existence was already performed.
""" |
handlers = []
# event_name can be a BaseEvent or the string representation
if isinstance(event_name, basestring):
matched_events = [event for event in registry.keys()
if fnmatch.fnmatchcase(event_name, event)]
for matched_event in matched_events:
handlers.extend(registry.get(matched_event))
else:
handlers = registry.get(find_event(event_name), [])
return handlers |
<SYSTEM_TASK:>
Return all default values that an event should have
<END_TASK>
<USER_TASK:>
Description:
def get_default_values(data):
"""Return all default values that an event should have""" |
request = data.get('request')
result = {}
result['__datetime__'] = datetime.now()
result['__ip_address__'] = request and get_ip(request) or '0.0.0.0'
return result |
<SYSTEM_TASK:>
Remove special values that log function can take
<END_TASK>
<USER_TASK:>
Description:
def filter_data_values(data):
"""Remove special values that log function can take
There are some special values, like "request" that the `log()`
function can take, but they're not meant to be passed to the celery
task neither for the event handlers. This function filter these keys
and return another dict without them.
""" |
banned = ('request',)
return {key: val for key, val in data.items() if not key in banned} |
<SYSTEM_TASK:>
Import all events declared for all currently installed apps
<END_TASK>
<USER_TASK:>
Description:
def import_event_modules():
"""Import all events declared for all currently installed apps
This function walks through the list of installed apps and tries to
import a module named `EVENTS_MODULE_NAME`.
""" |
for installed_app in getsetting('INSTALLED_APPS'):
module_name = u'{}.{}'.format(installed_app, EVENTS_MODULE_NAME)
try:
import_module(module_name)
except ImportError:
pass |
<SYSTEM_TASK:>
Check of expired accounts.
<END_TASK>
<USER_TASK:>
Description:
def handle_expired_accounts():
"""
Check of expired accounts.
""" |
ACTIVATED = RegistrationProfile.ACTIVATED
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
to_delete = []
print "Processing %s registration profiles..." % str(RegistrationProfile.objects.all().count())
for profile in RegistrationProfile.objects.all():
# if registration profile is expired, deactive user.
print "Processing %s" % profile.user
# If profile has been activated already, set it to be removed
# and move on to next registration profile
if profile.activation_key == ACTIVATED:
print "Found Active"
to_delete.append(profile.pk)
continue
# If the user has not activated their account and the activation
# days have passed, deactivate the user and send an email to user.
if profile.user.is_active and profile.user.date_joined + expiration_date <= datetime.datetime.now():
print "Found Expired"
user = profile.user
user.is_active = False
# Send an email notifing user of there account becoming inactive.
site = Site.objects.get_current()
ctx_dict = { 'site': site,
'activation_key': profile.activation_key}
subject = render_to_string(
'registration/email/emails/account_expired_subject.txt',
ctx_dict)
subject = ''.join(subject.splitlines())
message = render_to_string(
'registration/email/emails/account_expired.txt',
ctx_dict)
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
# Only save the user instance after the email is sent.
user.save()
# Delete the registration profiles that were set to be deleted, aka
# user has already activated their account.
print "Deleting %s registration profiles." % str(len(to_delete))
RegistrationProfile.objects.filter(pk__in=to_delete).delete() |
<SYSTEM_TASK:>
Override default activation process. This will activate the user
<END_TASK>
<USER_TASK:>
Description:
def activate(self, request, activation_key):
"""
Override default activation process. This will activate the user
even if its passed its expiration date.
""" |
if SHA1_RE.search(activation_key):
try:
profile = RegistrationProfile.objects.get(activation_key=activation_key)
except RegistrationProfile.DoesNotExist:
return False
user = profile.user
user.is_active = True
user.save()
profile.activation_key = RegistrationProfile.ACTIVATED
profile.save()
return user
return False |
<SYSTEM_TASK:>
Create and immediately log in a new user.
<END_TASK>
<USER_TASK:>
Description:
def register(self, request, **kwargs):
"""
Create and immediately log in a new user.
Only require a email to register, username is generated
automatically and a password is random generated and emailed
to the user.
Activation is still required for account uses after specified number
of days.
""" |
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
email = kwargs['email']
# Generate random password
password = User.objects.make_random_password()
# Generate username based off of the email supplied
username = sha_constructor(str(email)).hexdigest()[:30]
incr = 0
# Ensure the generated username is in fact unqiue
while User.objects.filter(username=username).count() > 0:
incr += 1
username = sha_constructor(str(email + str(incr))).hexdigest()[:30]
# Create the active user
new_user = User.objects.create_user(username, email, password)
new_user.save()
# Create the registration profile, this is still needed because
# the user still needs to activate there account for further users
# after 3 days
registration_profile = RegistrationProfile.objects.create_profile(
new_user)
# Authenticate and login the new user automatically
auth_user = authenticate(username=username, password=password)
login(request, auth_user)
# Set the expiration to when the users browser closes so user
# is forced to log in upon next visit, this should force the user
# to check there email for there generated password.
request.session.set_expiry(0)
# Create a profile instance for the new user if
# AUTH_PROFILE_MODULE is specified in settings
if hasattr(settings, 'AUTH_PROFILE_MODULE') and getattr(settings, 'AUTH_PROFILE_MODULE'):
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
model = models.get_model(app_label, model_name)
try:
profile = new_user.get_profile()
except model.DoesNotExist:
profile = model(user=new_user)
profile.save()
# Custom send activation email
self.send_activation_email(
new_user, registration_profile, password, site)
# Send user_registered signal
signals.user_registered.send(sender=self.__class__,
user=new_user,
request=request)
return new_user |
<SYSTEM_TASK:>
Custom send email method to supplied the activation link and
<END_TASK>
<USER_TASK:>
Description:
def send_activation_email(self, user, profile, password, site):
"""
Custom send email method to supplied the activation link and
new generated password.
""" |
ctx_dict = { 'password': password,
'site': site,
'activation_key': profile.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS}
subject = render_to_string(
'registration/email/emails/password_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/email/emails/password.txt',
ctx_dict)
try:
user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
except:
pass |
<SYSTEM_TASK:>
After registration, redirect to the home page or supplied "next"
<END_TASK>
<USER_TASK:>
Description:
def post_registration_redirect(self, request, user):
"""
After registration, redirect to the home page or supplied "next"
query string or hidden field value.
""" |
next_url = "/registration/register/complete/"
if "next" in request.GET or "next" in request.POST:
next_url = request.GET.get("next", None) or request.POST.get("next", None) or "/"
return (next_url, (), {}) |
<SYSTEM_TASK:>
Returns the next batch for the batched sequence or `None`, if
<END_TASK>
<USER_TASK:>
Description:
def next(self):
"""
Returns the next batch for the batched sequence or `None`, if
this batch is already the last batch.
:rtype: :class:`Batch` instance or `None`.
""" |
if self.start + self.size > self.total_size:
result = None
else:
result = Batch(self.start + self.size, self.size, self.total_size)
return result |
<SYSTEM_TASK:>
Returns the previous batch for the batched sequence or `None`, if
<END_TASK>
<USER_TASK:>
Description:
def previous(self):
"""
Returns the previous batch for the batched sequence or `None`, if
this batch is already the first batch.
:rtype: :class:`Batch` instance or `None`.
""" |
if self.start - self.size < 0:
result = None
else:
result = Batch(self.start - self.size, self.size, self.total_size)
return result |
<SYSTEM_TASK:>
Returns the last batch for the batched sequence.
<END_TASK>
<USER_TASK:>
Description:
def last(self):
"""
Returns the last batch for the batched sequence.
:rtype: :class:`Batch` instance.
""" |
start = max(self.number - 1, 0) * self.size
return Batch(start, self.size, self.total_size) |
<SYSTEM_TASK:>
Returns the number of batches the batched sequence contains.
<END_TASK>
<USER_TASK:>
Description:
def number(self):
"""
Returns the number of batches the batched sequence contains.
:rtype: integer.
""" |
return int(math.ceil(self.total_size / float(self.size))) |
<SYSTEM_TASK:>
Returns the URL to a watermarked copy of the image specified.
<END_TASK>
<USER_TASK:>
Description:
def watermark(url, args=''):
"""
Returns the URL to a watermarked copy of the image specified.
""" |
# initialize some variables
args = args.split(',')
params = dict(
name=args.pop(0),
opacity=0.5,
tile=False,
scale=1.0,
greyscale=False,
rotation=0,
position=None,
quality=QUALITY,
obscure=OBSCURE_ORIGINAL,
random_position_once=RANDOM_POSITION_ONCE,
)
params['url'] = unquote(url)
# iterate over all parameters to see what we need to do
for arg in args:
key, value = arg.split('=')
key, value = key.strip(), value.strip()
if key == 'position':
params['position'] = value
elif key == 'opacity':
params['opacity'] = utils._percent(value)
elif key == 'tile':
params['tile'] = bool(int(value))
elif key == 'scale':
params['scale'] = value
elif key == 'greyscale':
params['greyscale'] = bool(int(value))
elif key == 'rotation':
params['rotation'] = value
elif key == 'quality':
params['quality'] = int(value)
elif key == 'obscure':
params['obscure'] = bool(int(value))
elif key == 'random_position_once':
params['random_position_once'] = bool(int(value))
return Watermarker()(**params) |
<SYSTEM_TASK:>
Comes up with a good filename for the watermarked image
<END_TASK>
<USER_TASK:>
Description:
def generate_filename(self, mark, **kwargs):
"""Comes up with a good filename for the watermarked image""" |
kwargs = kwargs.copy()
kwargs['opacity'] = int(kwargs['opacity'] * 100)
kwargs['st_mtime'] = kwargs['fstat'].st_mtime
kwargs['st_size'] = kwargs['fstat'].st_size
params = [
'%(original_basename)s',
'wm',
'w%(watermark)i',
'o%(opacity)i',
'gs%(greyscale)i',
'r%(rotation)i',
'fm%(st_mtime)i',
'fz%(st_size)i',
'p%(position)s',
]
scale = kwargs.get('scale', None)
if scale and scale != mark.size:
params.append('_s%i' % (float(kwargs['scale'][0]) / mark.size[0] * 100))
if kwargs.get('tile', None):
params.append('_tiled')
# make thumbnail filename
filename = '%s%s' % ('_'.join(params), kwargs['ext'])
return filename % kwargs |
<SYSTEM_TASK:>
Determines an appropriate watermark path
<END_TASK>
<USER_TASK:>
Description:
def get_url_path(self, basedir, original_basename, ext, name, obscure=True):
"""Determines an appropriate watermark path""" |
try:
hash = hashlib.sha1(smart_str(name)).hexdigest()
except TypeError:
hash = hashlib.sha1(smart_str(name).encode('utf-8')).hexdigest()
# figure out where the watermark would be saved on the filesystem
if obscure is True:
logger.debug('Obscuring original image name: %s => %s' % (name, hash))
url_path = os.path.join(basedir, hash + ext)
else:
logger.debug('Not obscuring original image name.')
url_path = os.path.join(basedir, hash, original_basename + ext)
# make sure the destination directory exists
try:
fpath = self._get_filesystem_path(url_path)
os.makedirs(os.path.dirname(fpath))
except OSError as e:
if e.errno == errno.EEXIST:
pass # not to worry, directory exists
else:
logger.error('Error creating path: %s' % traceback.format_exc())
raise
else:
logger.debug('Created directory: %s' % os.path.dirname(fpath))
return url_path |
<SYSTEM_TASK:>
Create the watermarked image on the filesystem
<END_TASK>
<USER_TASK:>
Description:
def create_watermark(self, target, mark, fpath, quality=QUALITY, **kwargs):
"""Create the watermarked image on the filesystem""" |
im = utils.watermark(target, mark, **kwargs)
im.save(fpath, quality=quality)
return im |
<SYSTEM_TASK:>
Tries to determine the appropriate value of a particular variable that is
<END_TASK>
<USER_TASK:>
Description:
def _val(var, is_percent=False):
"""
Tries to determine the appropriate value of a particular variable that is
passed in. If the value is supposed to be a percentage, a whole integer
will be sought after and then turned into a floating point number between
0 and 1. If the value is supposed to be an integer, the variable is cast
into an integer.
""" |
try:
if is_percent:
var = float(int(var.strip('%')) / 100.0)
else:
var = int(var)
except ValueError:
raise ValueError('invalid watermark parameter: ' + var)
return var |
<SYSTEM_TASK:>
Returns an image with reduced opacity.
<END_TASK>
<USER_TASK:>
Description:
def reduce_opacity(img, opacity):
"""
Returns an image with reduced opacity.
""" |
assert opacity >= 0 and opacity <= 1
if img.mode != 'RGBA':
img = img.convert('RGBA')
else:
img = img.copy()
alpha = img.split()[3]
alpha = ImageEnhance.Brightness(alpha).enhance(opacity)
img.putalpha(alpha)
return img |
<SYSTEM_TASK:>
Scales an image using a specified ratio, 'F' or 'R'. If `scale` is
<END_TASK>
<USER_TASK:>
Description:
def determine_scale(scale, img, mark):
"""
Scales an image using a specified ratio, 'F' or 'R'. If `scale` is
'F', the image is scaled to be as big as possible to fit in `img`
without falling off the edges. If `scale` is 'R', the watermark
resizes to a percentage of minimum size of source image. Returns
the scaled `mark`.
""" |
if scale:
try:
scale = float(scale)
except (ValueError, TypeError):
pass
if isinstance(scale, six.string_types) and scale.upper() == 'F':
# scale watermark to full, but preserve the aspect ratio
scale = min(
float(img.size[0]) / mark.size[0],
float(img.size[1]) / mark.size[1]
)
elif isinstance(scale, six.string_types) and scale.upper() == 'R':
# scale watermark to % of source image and preserve the aspect ratio
scale = min(
float(img.size[0]) / mark.size[0],
float(img.size[1]) / mark.size[1]
) / 100 * settings.WATERMARK_PERCENTAGE
elif type(scale) not in (float, int):
raise ValueError('Invalid scale value "%s"! Valid values are "F" '
'for ratio-preserving scaling, "R%%" for percantage aspect '
'ratio of source image and floating-point numbers and '
'integers greater than 0.' % scale)
# determine the new width and height
w = int(mark.size[0] * float(scale))
h = int(mark.size[1] * float(scale))
# apply the new width and height, and return the new `mark`
return (w, h)
else:
return mark.size |
<SYSTEM_TASK:>
Determines the number of degrees to rotate the watermark image.
<END_TASK>
<USER_TASK:>
Description:
def determine_rotation(rotation, mark):
"""
Determines the number of degrees to rotate the watermark image.
""" |
if isinstance(rotation, six.string_types) and rotation.lower() == 'r':
rotation = random.randint(0, 359)
else:
rotation = _int(rotation)
return rotation |
<SYSTEM_TASK:>
Adds a watermark to an image
<END_TASK>
<USER_TASK:>
Description:
def watermark(img, mark, position=(0, 0), opacity=1, scale=1.0, tile=False,
greyscale=False, rotation=0, return_name=False, **kwargs):
"""Adds a watermark to an image""" |
if opacity < 1:
mark = reduce_opacity(mark, opacity)
if not isinstance(scale, tuple):
scale = determine_scale(scale, img, mark)
mark = mark.resize(scale, resample=Image.ANTIALIAS)
if greyscale and mark.mode != 'LA':
mark = mark.convert('LA')
rotation = determine_rotation(rotation, mark)
if rotation != 0:
# give some leeway for rotation overlapping
new_w = int(mark.size[0] * 1.5)
new_h = int(mark.size[1] * 1.5)
new_mark = Image.new('RGBA', (new_w, new_h), (0,0,0,0))
# center the watermark in the newly resized image
new_l = int((new_w - mark.size[0]) / 2)
new_t = int((new_h - mark.size[1]) / 2)
new_mark.paste(mark, (new_l, new_t))
mark = new_mark.rotate(rotation)
position = determine_position(position, img, mark)
if img.mode != 'RGBA':
img = img.convert('RGBA')
# make sure we have a tuple for a position now
assert isinstance(position, tuple), 'Invalid position "%s"!' % position
# create a transparent layer the size of the image and draw the
# watermark in that layer.
layer = Image.new('RGBA', img.size, (0,0,0,0))
if tile:
first_y = int(position[1] % mark.size[1] - mark.size[1])
first_x = int(position[0] % mark.size[0] - mark.size[0])
for y in range(first_y, img.size[1], mark.size[1]):
for x in range(first_x, img.size[0], mark.size[0]):
layer.paste(mark, (x, y))
else:
layer.paste(mark, position)
# composite the watermark with the layer
return Image.composite(layer, img, layer) |
<SYSTEM_TASK:>
Return the list of commands to run.
<END_TASK>
<USER_TASK:>
Description:
def commands(config, names):
"""Return the list of commands to run.""" |
commands = {cmd: Command(**dict((minus_to_underscore(k), v)
for k, v in config.items(cmd)))
for cmd in config.sections()
if cmd != 'packages'}
try:
return tuple(commands[x] for x in names)
except KeyError as e:
raise RuntimeError(
'Section [commands] in the config file does not contain the '
'key {.args[0]!r} you requested to execute.'.format(e)) |
<SYSTEM_TASK:>
Path to a file in the project.
<END_TASK>
<USER_TASK:>
Description:
def project_path(*names):
"""Path to a file in the project.""" |
return os.path.join(os.path.dirname(__file__), *names) |
<SYSTEM_TASK:>
Get the OSA sha referenced by an RPCO Repo.
<END_TASK>
<USER_TASK:>
Description:
def get_osa_commit(repo, ref, rpc_product=None):
"""Get the OSA sha referenced by an RPCO Repo.""" |
osa_differ.checkout(repo, ref)
functions_path = os.path.join(repo.working_tree_dir,
'scripts/functions.sh')
release_path = os.path.join(repo.working_tree_dir,
'playbooks/vars/rpc-release.yml')
if os.path.exists(release_path):
with open(release_path) as f:
rpc_release_data = yaml.safe_load(f.read())
rpc_product_releases = rpc_release_data['rpc_product_releases']
release_data = rpc_product_releases[rpc_product]
return release_data['osa_release']
elif repo.submodules['openstack-ansible']:
return repo.submodules['openstack-ansible'].hexsha
elif os.path.exists(functions_path):
# This branch doesn't use a submodule for OSA
# Pull the SHA out of functions.sh
quoted_re = re.compile('OSA_RELEASE:-?"?([^"}]+)["}]')
with open(functions_path, "r") as funcs:
for line in funcs.readlines():
match = quoted_re.search(line)
if match:
return match.groups()[0]
else:
raise SHANotFound(
("Cannot find OSA SHA in submodule or "
"script: {}".format(functions_path)))
else:
raise SHANotFound('No OSA SHA was able to be derived.') |
<SYSTEM_TASK:>
Publish the RST report based on the user request.
<END_TASK>
<USER_TASK:>
Description:
def publish_report(report, args, old_commit, new_commit):
"""Publish the RST report based on the user request.""" |
# Print the report to stdout unless the user specified --quiet.
output = ""
if not args.quiet and not args.gist and not args.file:
return report
if args.gist:
gist_url = post_gist(report, old_commit, new_commit)
output += "\nReport posted to GitHub Gist: {0}".format(gist_url)
if args.file is not None:
with open(args.file, 'w') as f:
f.write(report.encode('utf-8'))
output += "\nReport written to file: {0}".format(args.file)
return output |
<SYSTEM_TASK:>
Removes a list from the site.
<END_TASK>
<USER_TASK:>
Description:
def remove(self, list):
"""
Removes a list from the site.
""" |
xml = SP.DeleteList(SP.listName(list.id))
self.opener.post_soap(LIST_WEBSERVICE, xml,
soapaction='http://schemas.microsoft.com/sharepoint/soap/DeleteList')
self.all_lists.remove(list) |
<SYSTEM_TASK:>
Creates a new list in the site.
<END_TASK>
<USER_TASK:>
Description:
def create(self, name, description='', template=100):
"""
Creates a new list in the site.
""" |
try:
template = int(template)
except ValueError:
template = LIST_TEMPLATES[template]
if name in self:
raise ValueError("List already exists: '{0}".format(name))
if uuid_re.match(name):
raise ValueError("Cannot create a list with a UUID as a name")
xml = SP.AddList(SP.listName(name),
SP.description(description),
SP.templateID(text_type(template)))
result = self.opener.post_soap(LIST_WEBSERVICE, xml,
soapaction='http://schemas.microsoft.com/sharepoint/soap/AddList')
list_element = result.xpath('sp:AddListResult/sp:List', namespaces=namespaces)[0]
self._all_lists.append(SharePointList(self.opener, self, list_element)) |
<SYSTEM_TASK:>
The class for a row in this list.
<END_TASK>
<USER_TASK:>
Description:
def Row(self):
"""
The class for a row in this list.
""" |
if not hasattr(self, '_row_class'):
attrs = {'fields': self.fields, 'list': self, 'opener': self.opener}
for field in self.fields.values():
attrs[field.name] = field.descriptor
self._row_class = type('SharePointListRow', (SharePointListRow,), attrs)
return self._row_class |
<SYSTEM_TASK:>
Appends a row to the list. Takes a dictionary, returns a row.
<END_TASK>
<USER_TASK:>
Description:
def append(self, row):
"""
Appends a row to the list. Takes a dictionary, returns a row.
""" |
if isinstance(row, dict):
row = self.Row(row)
elif isinstance(row, self.Row):
pass
elif isinstance(row, SharePointListRow):
raise TypeError("row must be a dict or an instance of SharePointList.Row, not SharePointListRow")
else:
raise TypeError("row must be a dict or an instance of SharePointList.Row")
self.rows # Make sure self._rows exists.
self._rows.append(row)
return row |
<SYSTEM_TASK:>
Removes the row from the list.
<END_TASK>
<USER_TASK:>
Description:
def remove(self, row):
"""
Removes the row from the list.
""" |
self._rows.remove(row)
self._deleted_rows.add(row) |
<SYSTEM_TASK:>
Updates the list with changes.
<END_TASK>
<USER_TASK:>
Description:
def save(self):
"""
Updates the list with changes.
""" |
# Based on the documentation at
# http://msdn.microsoft.com/en-us/library/lists.lists.updatelistitems%28v=office.12%29.aspx
# Note, this ends up un-namespaced. SharePoint doesn't care about
# namespaces on this XML node, and will bork if any of these elements
# have a namespace prefix. Likewise Method and Field in
# SharePointRow.get_batch_method().
batches = E.Batch(ListVersion='1', OnError='Return')
# Here's the root element of our SOAP request.
xml = SP.UpdateListItems(SP.listName(self.id), SP.updates(batches))
# rows_by_batch_id contains a mapping from new rows to their batch
# IDs, so we can set their IDs when they are returned by SharePoint.
rows_by_batch_id, batch_id = {}, 1
for row in self._rows:
batch = row.get_batch_method()
if batch is None:
continue
# Add the batch ID
batch.attrib['ID'] = text_type(batch_id)
rows_by_batch_id[batch_id] = row
batches.append(batch)
batch_id += 1
for row in self._deleted_rows:
batch = E.Method(E.Field(text_type(row.id),
Name='ID'),
ID=text_type(batch_id), Cmd='Delete')
rows_by_batch_id[batch_id] = row
batches.append(batch)
batch_id += 1
if len(batches) == 0:
return
response = self.opener.post_soap(LIST_WEBSERVICE, xml,
soapaction='http://schemas.microsoft.com/sharepoint/soap/UpdateListItems')
for result in response.xpath('.//sp:Result', namespaces=namespaces):
batch_id, batch_result = result.attrib['ID'].split(',')
row = rows_by_batch_id[int(batch_id)]
error_code = result.find('sp:ErrorCode', namespaces=namespaces)
error_text = result.find('sp:ErrorText', namespaces=namespaces)
if error_code is not None and error_code.text != '0x00000000':
raise UpdateFailedError(row, batch_result,
error_code.text,
error_text.text)
if batch_result in ('Update', 'New'):
row._update(result.xpath('z:row', namespaces=namespaces)[0],
clear=True)
else:
self._deleted_rows.remove(row)
assert not self._deleted_rows
assert not any(row._changed for row in self.rows) |
<SYSTEM_TASK:>
Extracts a value for the field from an XML-RPC response.
<END_TASK>
<USER_TASK:>
Description:
def convert_to_python(self, xmlrpc=None):
"""
Extracts a value for the field from an XML-RPC response.
""" |
if xmlrpc:
return xmlrpc.get(self.name, self.default)
elif self.default:
return self.default
else:
return None |
<SYSTEM_TASK:>
Generate a set of output values for a given input.
<END_TASK>
<USER_TASK:>
Description:
def get_outputs(self, input_value):
"""
Generate a set of output values for a given input.
""" |
output_value = self.convert_to_xmlrpc(input_value)
output = {}
for name in self.output_names:
output[name] = output_value
return output |
<SYSTEM_TASK:>
XML-RPC-friendly representation of the current object state
<END_TASK>
<USER_TASK:>
Description:
def struct(self):
"""
XML-RPC-friendly representation of the current object state
""" |
data = {}
for var, fmap in self._def.items():
if hasattr(self, var):
data.update(fmap.get_outputs(getattr(self, var)))
return data |
<SYSTEM_TASK:>
Builds final set of XML-RPC method arguments based on
<END_TASK>
<USER_TASK:>
Description:
def get_args(self, client):
"""
Builds final set of XML-RPC method arguments based on
the method's arguments, any default arguments, and their
defined respective ordering.
""" |
default_args = self.default_args(client)
if self.method_args or self.optional_args:
optional_args = getattr(self, 'optional_args', tuple())
args = []
for arg in (self.method_args + optional_args):
if hasattr(self, arg):
obj = getattr(self, arg)
if hasattr(obj, 'struct'):
args.append(obj.struct)
else:
args.append(obj)
args = list(default_args) + args
else:
args = default_args
return args |
<SYSTEM_TASK:>
Performs actions on the raw result from the XML-RPC response.
<END_TASK>
<USER_TASK:>
Description:
def process_result(self, raw_result):
"""
Performs actions on the raw result from the XML-RPC response.
If a `results_class` is defined, the response will be converted
into one or more object instances of that class.
""" |
if self.results_class and raw_result:
if isinstance(raw_result, dict_type):
return self.results_class(raw_result)
elif isinstance(raw_result, collections.Iterable):
return [self.results_class(result) for result in raw_result]
return raw_result |
<SYSTEM_TASK:>
Creates request to AddressParser
<END_TASK>
<USER_TASK:>
Description:
def parse(some_text, **kwargs):
"""Creates request to AddressParser
and returns list of Address objects
""" |
ap = parser.AddressParser(**kwargs)
return ap.parse(some_text) |
<SYSTEM_TASK:>
Takes the values of an attribute value list and attempts to append
<END_TASK>
<USER_TASK:>
Description:
def setAttribute(values, value):
"""
Takes the values of an attribute value list and attempts to append
attributes of the proper type, inferred from their Python type.
""" |
if isinstance(value, int):
values.add().int32_value = value
elif isinstance(value, float):
values.add().double_value = value
elif isinstance(value, long):
values.add().int64_value = value
elif isinstance(value, str):
values.add().string_value = value
elif isinstance(value, bool):
values.add().bool_value = value
elif isinstance(value, (list, tuple, array.array)):
for v in value:
setAttribute(values, v)
elif isinstance(value, dict):
for key in value:
setAttribute(
values.add().attributes.attr[key].values, value[key])
else:
values.add().string_value = str(value) |
<SYSTEM_TASK:>
Sets a deep attribute on an object by resolving a dot-delimited
<END_TASK>
<USER_TASK:>
Description:
def deepSetAttr(obj, path, val):
"""
Sets a deep attribute on an object by resolving a dot-delimited
path. If path does not exist an `AttributeError` will be raised`.
""" |
first, _, rest = path.rpartition('.')
return setattr(deepGetAttr(obj, first) if first else obj, rest, val) |
<SYSTEM_TASK:>
Converts the specified datetime object into its appropriate protocol
<END_TASK>
<USER_TASK:>
Description:
def convertDatetime(t):
"""
Converts the specified datetime object into its appropriate protocol
value. This is the number of milliseconds from the epoch.
""" |
epoch = datetime.datetime.utcfromtimestamp(0)
delta = t - epoch
millis = delta.total_seconds() * 1000
return int(millis) |
<SYSTEM_TASK:>
Extract the currently set field from a Value structure
<END_TASK>
<USER_TASK:>
Description:
def getValueFromValue(value):
"""
Extract the currently set field from a Value structure
""" |
if type(value) != common.AttributeValue:
raise TypeError(
"Expected an AttributeValue, but got {}".format(type(value)))
if value.WhichOneof("value") is None:
raise AttributeError("Nothing set for {}".format(value))
return getattr(value, value.WhichOneof("value")) |
<SYSTEM_TASK:>
Returns all the protocol classes that are subclasses of the
<END_TASK>
<USER_TASK:>
Description:
def getProtocolClasses(superclass=message.Message):
"""
Returns all the protocol classes that are subclasses of the
specified superclass. Only 'leaf' classes are returned,
corresponding directly to the classes defined in the protocol.
""" |
# We keep a manual list of the superclasses that we define here
# so we can filter them out when we're getting the protocol
# classes.
superclasses = set([message.Message])
thisModule = sys.modules[__name__]
subclasses = []
for name, class_ in inspect.getmembers(thisModule):
if ((inspect.isclass(class_) and
issubclass(class_, superclass) and
class_ not in superclasses)):
subclasses.append(class_)
return subclasses |
<SYSTEM_TASK:>
Run a shell command given the command's parsed command line
<END_TASK>
<USER_TASK:>
Description:
def runCommandSplits(splits, silent=False, shell=False):
"""
Run a shell command given the command's parsed command line
""" |
try:
if silent:
with open(os.devnull, 'w') as devnull:
subprocess.check_call(
splits, stdout=devnull, stderr=devnull, shell=shell)
else:
subprocess.check_call(splits, shell=shell)
except OSError as exception:
if exception.errno == 2: # cmd not found
raise Exception(
"Can't find command while trying to run {}".format(splits))
else:
raise |
<SYSTEM_TASK:>
Create a hierarchy of proto files in a destination directory, copied
<END_TASK>
<USER_TASK:>
Description:
def _createSchemaFiles(self, destPath, schemasPath):
"""
Create a hierarchy of proto files in a destination directory, copied
from the schemasPath hierarchy
""" |
# Create the target directory hierarchy, if neccessary
ga4ghPath = os.path.join(destPath, 'ga4gh')
if not os.path.exists(ga4ghPath):
os.mkdir(ga4ghPath)
ga4ghSchemasPath = os.path.join(ga4ghPath, 'schemas')
if not os.path.exists(ga4ghSchemasPath):
os.mkdir(ga4ghSchemasPath)
ga4ghSchemasGa4ghPath = os.path.join(ga4ghSchemasPath, 'ga4gh')
if not os.path.exists(ga4ghSchemasGa4ghPath):
os.mkdir(ga4ghSchemasGa4ghPath)
ga4ghSchemasGooglePath = os.path.join(ga4ghSchemasPath, 'google')
if not os.path.exists(ga4ghSchemasGooglePath):
os.mkdir(ga4ghSchemasGooglePath)
ga4ghSchemasGoogleApiPath = os.path.join(
ga4ghSchemasGooglePath, 'api')
if not os.path.exists(ga4ghSchemasGoogleApiPath):
os.mkdir(ga4ghSchemasGoogleApiPath)
# rewrite the proto files to the destination
for root, dirs, files in os.walk(schemasPath):
for protoFilePath in fnmatch.filter(files, '*.proto'):
src = os.path.join(root, protoFilePath)
dst = os.path.join(
ga4ghSchemasPath,
os.path.relpath(root, schemasPath), protoFilePath)
self._copySchemaFile(src, dst) |
<SYSTEM_TASK:>
Given a line of a proto file, replace the line with one that is
<END_TASK>
<USER_TASK:>
Description:
def _doLineReplacements(self, line):
"""
Given a line of a proto file, replace the line with one that is
appropriate for the hierarchy that we want to compile
""" |
# ga4gh packages
packageString = 'package ga4gh;'
if packageString in line:
return line.replace(
packageString,
'package ga4gh.schemas.ga4gh;')
importString = 'import "ga4gh/'
if importString in line:
return line.replace(
importString,
'import "ga4gh/schemas/ga4gh/')
# google packages
googlePackageString = 'package google.api;'
if googlePackageString in line:
return line.replace(
googlePackageString,
'package ga4gh.schemas.google.api;')
googleImportString = 'import "google/api/'
if googleImportString in line:
return line.replace(
googleImportString,
'import "ga4gh/schemas/google/api/')
optionString = 'option (google.api.http)'
if optionString in line:
return line.replace(
optionString,
'option (.ga4gh.schemas.google.api.http)')
return line |
<SYSTEM_TASK:>
Copy a proto file to the temporary directory, with appropriate
<END_TASK>
<USER_TASK:>
Description:
def _copySchemaFile(self, src, dst):
"""
Copy a proto file to the temporary directory, with appropriate
line replacements
""" |
with open(src) as srcFile, open(dst, 'w') as dstFile:
srcLines = srcFile.readlines()
for srcLine in srcLines:
toWrite = self._doLineReplacements(srcLine)
dstFile.write(toWrite) |
<SYSTEM_TASK:>
Protobuf objects can't have arbitrary fields addedd and we need to later on
<END_TASK>
<USER_TASK:>
Description:
def convert_protodef_to_editable(proto):
"""
Protobuf objects can't have arbitrary fields addedd and we need to later on
add comments to them, so we instead make "Editable" objects that can do so
""" |
class Editable(object):
def __init__(self, prot):
self.kind = type(prot)
self.name = prot.name
self.comment = ""
self.options = dict([(key.name, value) for (key, value) in prot.options.ListFields()])
if isinstance(prot, EnumDescriptorProto):
self.value = [convert_protodef_to_editable(x) for x in prot.value]
elif isinstance(prot, DescriptorProto):
self.field = [convert_protodef_to_editable(x) for x in prot.field]
self.enum_type = [convert_protodef_to_editable(x) for x in prot.enum_type]
self.nested_type = prot.nested_type
self.oneof_decl = prot.oneof_decl
elif isinstance(prot, EnumValueDescriptorProto):
self.number = prot.number
elif isinstance(prot, FieldDescriptorProto):
if prot.type in [11, 14]:
self.ref_type = prot.type_name[1:]
self.type = prot.type
self.label = prot.label
elif isinstance(prot, ServiceDescriptorProto):
self.method = [convert_protodef_to_editable(x) for x in prot.method]
elif isinstance(prot, MethodDescriptorProto):
self.input_type = prot.input_type
self.output_type = prot.output_type
else:
raise Exception, type(prot)
return Editable(proto) |
<SYSTEM_TASK:>
Calculate the great-circle distance between two points on the Earth surface.
<END_TASK>
<USER_TASK:>
Description:
def haversine(point1, point2, unit='km'):
""" Calculate the great-circle distance between two points on the Earth surface.
:input: two 2-tuples, containing the latitude and longitude of each point
in decimal degrees.
Keyword arguments:
unit -- a string containing the initials of a unit of measurement (i.e. miles = mi)
default 'km' (kilometers).
Example: haversine((45.7597, 4.8422), (48.8567, 2.3508))
:output: Returns the distance between the two points.
The default returned unit is kilometers. The default unit can be changed by
setting the unit parameter to a string containing the initials of the desired unit.
Other available units are miles (mi), nautic miles (nmi), meters (m),
feets (ft) and inches (in).
""" |
# mean earth radius - https://en.wikipedia.org/wiki/Earth_radius#Mean_radius
AVG_EARTH_RADIUS_KM = 6371.0088
# Units values taken from http://www.unitconversion.org/unit_converter/length.html
conversions = {'km': 1,
'm': 1000,
'mi': 0.621371192,
'nmi': 0.539956803,
'ft': 3280.839895013,
'in': 39370.078740158}
# get earth radius in required units
avg_earth_radius = AVG_EARTH_RADIUS_KM * conversions[unit]
# unpack latitude/longitude
lat1, lng1 = point1
lat2, lng2 = point2
# convert all latitudes/longitudes from decimal degrees to radians
lat1, lng1, lat2, lng2 = map(radians, (lat1, lng1, lat2, lng2))
# calculate haversine
lat = lat2 - lat1
lng = lng2 - lng1
d = sin(lat * 0.5) ** 2 + cos(lat1) * cos(lat2) * sin(lng * 0.5) ** 2
return 2 * avg_earth_radius * asin(sqrt(d)) |
<SYSTEM_TASK:>
Retrieve run folder paths from the command line
<END_TASK>
<USER_TASK:>
Description:
def main():
""" Retrieve run folder paths from the command line
Ensure only metrics required for summary are loaded
Load the run metrics
Calculate the summary metrics
Display error by lane, read
""" |
logging.basicConfig(level=logging.INFO)
run_metrics = py_interop_run_metrics.run_metrics()
summary = py_interop_summary.run_summary()
valid_to_load = py_interop_run.uchar_vector(py_interop_run.MetricCount, 0)
py_interop_run_metrics.list_summary_metrics_to_load(valid_to_load)
for run_folder_path in sys.argv[1:]:
run_folder = os.path.basename(run_folder_path)
try:
run_metrics.read(run_folder_path, valid_to_load)
except Exception, ex:
logging.warn("Skipping - cannot read RunInfo.xml: %s - %s"%(run_folder, str(ex)))
continue
py_interop_summary.summarize_run_metrics(run_metrics, summary)
error_rate_read_lane_surface = numpy.zeros((summary.size(), summary.lane_count(), summary.surface_count()))
for read_index in xrange(summary.size()):
for lane_index in xrange(summary.lane_count()):
for surface_index in xrange(summary.surface_count()):
error_rate_read_lane_surface[read_index, lane_index, surface_index] = \
summary.at(read_index).at(lane_index).at(surface_index).error_rate().mean()
logging.info("Run Folder: "+run_folder)
for read_index in xrange(summary.size()):
read_summary = summary.at(read_index)
logging.info("Read "+str(read_summary.read().number())+" - Top Surface Mean Error: "+str(error_rate_read_lane_surface[read_index, :, 0].mean())) |
<SYSTEM_TASK:>
login user passwd
<END_TASK>
<USER_TASK:>
Description:
def login(self, user, passwd):
"""login user passwd
Performs the login operation for Security Center, storing the token
that Security Center has generated for this login session for future
queries.
""" |
data = self.raw_query('auth', 'login',
data={'username': user, 'password': passwd})
self._token = data["token"]
self._user = data |
<SYSTEM_TASK:>
Show the relationships and dependencies for one or more credentials.
<END_TASK>
<USER_TASK:>
Description:
def credential_delete_simulate(self, *ids):
"""Show the relationships and dependencies for one or more credentials.
:param ids: one or more credential ids
""" |
return self.raw_query("credential", "deleteSimulate", data={
"credentials": [{"id": str(id)} for id in ids]
}) |
<SYSTEM_TASK:>
Delete one or more credentials.
<END_TASK>
<USER_TASK:>
Description:
def credential_delete(self, *ids):
"""Delete one or more credentials.
:param ids: one or more credential ids
""" |
return self.raw_query("credential", "delete", data={
"credentials": [{"id": str(id)} for id in ids]
}) |
<SYSTEM_TASK:>
plugins
<END_TASK>
<USER_TASK:>
Description:
def plugins(self, plugin_type='all', sort='id', direction='asc',
size=1000, offset=0, all=True, loops=0, since=None, **filterset):
"""plugins
Returns a list of of the plugins and their associated families. For
simplicity purposes, the plugin family names will be injected into the
plugin data so that only 1 list is returned back with all of the
information.
""" |
plugins = []
# First we need to generate the basic payload that we will be augmenting
# to build the
payload = {
'size': size,
'offset': offset,
'type': plugin_type,
'sortField': sort,
'sortDirection': direction.upper(),
}
# If there was a filter given, we will need to populate that.
if len(filterset) > 0:
fname = list(filterset.keys())[0]
if fname in self._xrefs:
fname = 'xrefs:%s' % fname.replace('_', '-')
payload['filterField'] = fname
payload['filterString'] = filterset[list(filterset.keys())[0]]
# We also need to check if there was a datetime object sent to us and
# parse that down if given.
if since is not None and isinstance(since, date):
payload['since'] = calendar.timegm(since.utctimetuple())
# And now we run through the loop needed to pull all of the data. This
# may take some time even though we are pulling large data sets. At the
# time of development of this module, there were over 55k active plugins
# and over 7k passive ones.
while all or loops > 0:
# First things first, we need to query the data.
data = self.raw_query('plugin', 'init', data=payload)
if not data:
return []
# This no longer works in 4.4 as the family name is already
# referenced. Will re-activate this code when I can get a SC4.2
# Instance up and running to test...
# ---
# Next we convert the family dictionary list into a flat dictionary.
#fams = {}
#for famitem in data['families']:
# fams[famitem['id']] = famitem['name']
# Then we parse thtrough the data set, adding in the family name
# into the plugin definition before adding it into the plugins list.
for plugin in data['plugins']:
# plugin['familyName'] = fams[plugin['familyID']]
plugins.append(plugin)
# ---
# Next its time to increment the offset so that we get a new data
# set. We will also check here to see if the length really is the
# same as whats specified in the size variable. If it isnt, then
# we have reached the end of the dataset and might as well set
# the continue variable to False.
if len(data['plugins']) < size:
all = False
loops = 0
else:
loops -= 1
payload['offset'] += len(data['plugins'])
return plugins |
<SYSTEM_TASK:>
plugin_counts
<END_TASK>
<USER_TASK:>
Description:
def plugin_counts(self):
"""plugin_counts
Returns the plugin counts as dictionary with the last updated info if
its available.
""" |
ret = {
'total': 0,
}
# As ususal, we need data before we can actually do anything ;)
data = self.raw_query('plugin', 'init')
# For backwards compatability purposes, we will be handling this a bit
# differently than I would like. We are going to check to see if each
# value exists and override the default value of 0. The only value that
# I know existed in bost 4.2 and 4.4 is pluginCount, the rest aren't
# listed in the API docs, however return back from my experimentation.
ret['total'] = data['pluginCount']
if 'lastUpdates' in data:
for item in ['active', 'passive', 'compliance', 'custom', 'event']:
itemdata = {}
if item in data['lastUpdates']:
itemdata = data['lastUpdates'][item]
if item in data:
itemdata['count'] = data[item]
else:
itemdata['count'] = 0
ret[item] = itemdata
return ret |
<SYSTEM_TASK:>
ip_info
<END_TASK>
<USER_TASK:>
Description:
def ip_info(self, ip, repository_ids=None):
"""ip_info
Returns information about the IP specified in the repository ids
defined.
""" |
if not repository_ids:
repository_ids = []
repos = []
for rid in repository_ids:
repos.append({'id': rid})
return self.raw_query('vuln', 'getIP', data={
'ip': ip, 'repositories': repos}) |
<SYSTEM_TASK:>
List scans stored in Security Center in a given time range.
<END_TASK>
<USER_TASK:>
Description:
def scan_list(self, start_time=None, end_time=None, **kwargs):
"""List scans stored in Security Center in a given time range.
Time is given in UNIX timestamps, assumed to be UTC. If a `datetime` is
passed it is converted. If `end_time` is not specified it is NOW. If
`start_time` is not specified it is 30 days previous from `end_time`.
:param start_time: start of range to filter
:type start_time: date, datetime, int
:param end_time: end of range to filter
:type start_time: date, datetime, int
:return: list of dictionaries representing scans
""" |
try:
end_time = datetime.utcfromtimestamp(int(end_time))
except TypeError:
if end_time is None:
end_time = datetime.utcnow()
try:
start_time = datetime.utcfromtimestamp(int(start_time))
except TypeError:
if start_time is None:
start_time = end_time - timedelta(days=30)
data = {"startTime": calendar.timegm(start_time.utctimetuple()),
"endTime": calendar.timegm(end_time.utctimetuple())}
data.update(kwargs)
result = self.raw_query("scanResult", "getRange", data=data)
return result["scanResults"] |
<SYSTEM_TASK:>
report_import Report_Name, filename
<END_TASK>
<USER_TASK:>
Description:
def report_import(self, name, filename):
"""report_import Report_Name, filename
Uploads a report template to the current user's reports
UN-DOCUMENTED CALL: This function is not considered stable.
""" |
data = self._upload(filename)
return self.raw_query('report', 'import', data={
'filename': data['filename'],
'name': name,
}) |
<SYSTEM_TASK:>
Load a geotiff raster keeping ndv values using a masked array
<END_TASK>
<USER_TASK:>
Description:
def load_tiff(file):
"""
Load a geotiff raster keeping ndv values using a masked array
Usage:
data = load_tiff(file)
""" |
ndv, xsize, ysize, geot, projection, datatype = get_geo_info(file)
data = gdalnumeric.LoadFile(file)
data = np.ma.masked_array(data, mask=data == ndv, fill_value=ndv)
return data |
<SYSTEM_TASK:>
Compute Gamma Index of Spatial Autocorrelation for GeoRaster
<END_TASK>
<USER_TASK:>
Description:
def pysal_Gamma(self, **kwargs):
"""
Compute Gamma Index of Spatial Autocorrelation for GeoRaster
Usage:
geo.pysal_Gamma(permutations = 1000, rook=True, operation='c')
arguments passed to raster_weights() and pysal.Gamma
See help(gr.raster_weights), help(pysal.Gamma) for options
""" |
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Gamma = pysal.Gamma(rasterf, self.weights, **kwargs) |
<SYSTEM_TASK:>
Compute join count statistics for GeoRaster
<END_TASK>
<USER_TASK:>
Description:
def pysal_Join_Counts(self, **kwargs):
"""
Compute join count statistics for GeoRaster
Usage:
geo.pysal_Join_Counts(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.Join_Counts
See help(gr.raster_weights), help(pysal.Join_Counts) for options
""" |
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Join_Counts = pysal.Join_Counts(rasterf, self.weights, **kwargs) |
<SYSTEM_TASK:>
Compute Moran's I measure of global spatial autocorrelation for GeoRaster
<END_TASK>
<USER_TASK:>
Description:
def pysal_Moran(self, **kwargs):
"""
Compute Moran's I measure of global spatial autocorrelation for GeoRaster
Usage:
geo.pysal_Moran(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.Moran
See help(gr.raster_weights), help(pysal.Moran) for options
""" |
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Moran = pysal.Moran(rasterf, self.weights, **kwargs) |
<SYSTEM_TASK:>
Compute Local Moran's I measure of local spatial autocorrelation for GeoRaster
<END_TASK>
<USER_TASK:>
Description:
def pysal_Moran_Local(self, **kwargs):
"""
Compute Local Moran's I measure of local spatial autocorrelation for GeoRaster
Usage:
geo.pysal_Moran_Local(permutations = 1000, rook=True)
arguments passed to raster_weights() and pysal.Moran_Local
See help(gr.raster_weights), help(pysal.Moran_Local) for options
""" |
if self.weights is None:
self.raster_weights(**kwargs)
rasterf = self.raster.flatten()
rasterf = rasterf[rasterf.mask==False]
self.Moran_Local = pysal.Moran_Local(rasterf, self.weights, **kwargs)
for i in self.Moran_Local.__dict__.keys():
if (isinstance(getattr(self.Moran_Local, i), np.ma.masked_array) or
(isinstance(getattr(self.Moran_Local, i), np.ndarray)) and
len(getattr(self.Moran_Local, i).shape) == 1):
setattr(self.Moran_Local, i, self.map_vector(getattr(self.Moran_Local, i))) |
<SYSTEM_TASK:>
Setup MCP_Geometric object from skimage for optimal travel time computations
<END_TASK>
<USER_TASK:>
Description:
def mcp(self, *args, **kwargs):
"""
Setup MCP_Geometric object from skimage for optimal travel time computations
""" |
# Create Cost surface to work on
self.mcp_cost = graph.MCP_Geometric(self.raster, *args, **kwargs) |
<SYSTEM_TASK:>
Send a JSON RPC notification to the client.
<END_TASK>
<USER_TASK:>
Description:
def notify(self, method, params=None):
"""Send a JSON RPC notification to the client.
Args:
method (str): The method name of the notification to send
params (any): The payload of the notification
""" |
log.debug('Sending notification: %s %s', method, params)
message = {
'jsonrpc': JSONRPC_VERSION,
'method': method,
}
if params is not None:
message['params'] = params
self._consumer(message) |
<SYSTEM_TASK:>
Send a JSON RPC request to the client.
<END_TASK>
<USER_TASK:>
Description:
def request(self, method, params=None):
"""Send a JSON RPC request to the client.
Args:
method (str): The method name of the message to send
params (any): The payload of the message
Returns:
Future that will resolve once a response has been received
""" |
msg_id = self._id_generator()
log.debug('Sending request with id %s: %s %s', msg_id, method, params)
message = {
'jsonrpc': JSONRPC_VERSION,
'id': msg_id,
'method': method,
}
if params is not None:
message['params'] = params
request_future = futures.Future()
request_future.add_done_callback(self._cancel_callback(msg_id))
self._server_request_futures[msg_id] = request_future
self._consumer(message)
return request_future |
<SYSTEM_TASK:>
Construct a cancellation callback for the given request ID.
<END_TASK>
<USER_TASK:>
Description:
def _cancel_callback(self, request_id):
"""Construct a cancellation callback for the given request ID.""" |
def callback(future):
if future.cancelled():
self.notify(CANCEL_METHOD, {'id': request_id})
future.set_exception(JsonRpcRequestCancelled())
return callback |
<SYSTEM_TASK:>
Consume a JSON RPC message from the client.
<END_TASK>
<USER_TASK:>
Description:
def consume(self, message):
"""Consume a JSON RPC message from the client.
Args:
message (dict): The JSON RPC message sent by the client
""" |
if 'jsonrpc' not in message or message['jsonrpc'] != JSONRPC_VERSION:
log.warn("Unknown message type %s", message)
return
if 'id' not in message:
log.debug("Handling notification from client %s", message)
self._handle_notification(message['method'], message.get('params'))
elif 'method' not in message:
log.debug("Handling response from client %s", message)
self._handle_response(message['id'], message.get('result'), message.get('error'))
else:
try:
log.debug("Handling request from client %s", message)
self._handle_request(message['id'], message['method'], message.get('params'))
except JsonRpcException as e:
log.exception("Failed to handle request %s", message['id'])
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': message['id'],
'error': e.to_dict()
})
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle request %s", message['id'])
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': message['id'],
'error': JsonRpcInternalError.of(sys.exc_info()).to_dict()
}) |
<SYSTEM_TASK:>
Handle a notification from the client.
<END_TASK>
<USER_TASK:>
Description:
def _handle_notification(self, method, params):
"""Handle a notification from the client.""" |
if method == CANCEL_METHOD:
self._handle_cancel_notification(params['id'])
return
try:
handler = self._dispatcher[method]
except KeyError:
log.warn("Ignoring notification for unknown method %s", method)
return
try:
handler_result = handler(params)
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle notification %s: %s", method, params)
return
if callable(handler_result):
log.debug("Executing async notification handler %s", handler_result)
notification_future = self._executor_service.submit(handler_result)
notification_future.add_done_callback(self._notification_callback(method, params)) |
<SYSTEM_TASK:>
Construct a notification callback for the given request ID.
<END_TASK>
<USER_TASK:>
Description:
def _notification_callback(method, params):
"""Construct a notification callback for the given request ID.""" |
def callback(future):
try:
future.result()
log.debug("Successfully handled async notification %s %s", method, params)
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle async notification %s %s", method, params)
return callback |
<SYSTEM_TASK:>
Handle a cancel notification from the client.
<END_TASK>
<USER_TASK:>
Description:
def _handle_cancel_notification(self, msg_id):
"""Handle a cancel notification from the client.""" |
request_future = self._client_request_futures.pop(msg_id, None)
if not request_future:
log.warn("Received cancel notification for unknown message id %s", msg_id)
return
# Will only work if the request hasn't started executing
if request_future.cancel():
log.debug("Cancelled request with id %s", msg_id) |
<SYSTEM_TASK:>
Handle a request from the client.
<END_TASK>
<USER_TASK:>
Description:
def _handle_request(self, msg_id, method, params):
"""Handle a request from the client.""" |
try:
handler = self._dispatcher[method]
except KeyError:
raise JsonRpcMethodNotFound.of(method)
handler_result = handler(params)
if callable(handler_result):
log.debug("Executing async request handler %s", handler_result)
request_future = self._executor_service.submit(handler_result)
self._client_request_futures[msg_id] = request_future
request_future.add_done_callback(self._request_callback(msg_id))
else:
log.debug("Got result from synchronous request handler: %s", handler_result)
self._consumer({
'jsonrpc': JSONRPC_VERSION,
'id': msg_id,
'result': handler_result
}) |
<SYSTEM_TASK:>
Construct a request callback for the given request ID.
<END_TASK>
<USER_TASK:>
Description:
def _request_callback(self, request_id):
"""Construct a request callback for the given request ID.""" |
def callback(future):
# Remove the future from the client requests map
self._client_request_futures.pop(request_id, None)
if future.cancelled():
future.set_exception(JsonRpcRequestCancelled())
message = {
'jsonrpc': JSONRPC_VERSION,
'id': request_id,
}
try:
message['result'] = future.result()
except JsonRpcException as e:
log.exception("Failed to handle request %s", request_id)
message['error'] = e.to_dict()
except Exception: # pylint: disable=broad-except
log.exception("Failed to handle request %s", request_id)
message['error'] = JsonRpcInternalError.of(sys.exc_info()).to_dict()
self._consumer(message)
return callback |
<SYSTEM_TASK:>
Handle a response from the client.
<END_TASK>
<USER_TASK:>
Description:
def _handle_response(self, msg_id, result=None, error=None):
"""Handle a response from the client.""" |
request_future = self._server_request_futures.pop(msg_id, None)
if not request_future:
log.warn("Received response to unknown message id %s", msg_id)
return
if error is not None:
log.debug("Received error response to message %s: %s", msg_id, error)
request_future.set_exception(JsonRpcException.from_dict(error))
log.debug("Received result for message %s: %s", msg_id, result)
request_future.set_result(result) |
<SYSTEM_TASK:>
Blocking call to listen for messages on the rfile.
<END_TASK>
<USER_TASK:>
Description:
def listen(self, message_consumer):
"""Blocking call to listen for messages on the rfile.
Args:
message_consumer (fn): function that is passed each message as it is read off the socket.
""" |
while not self._rfile.closed:
request_str = self._read_message()
if request_str is None:
break
try:
message_consumer(json.loads(request_str.decode('utf-8')))
except ValueError:
log.exception("Failed to parse JSON message %s", request_str)
continue |
<SYSTEM_TASK:>
Reads the contents of a message.
<END_TASK>
<USER_TASK:>
Description:
def _read_message(self):
"""Reads the contents of a message.
Returns:
body of message if parsable else None
""" |
line = self._rfile.readline()
if not line:
return None
content_length = self._content_length(line)
# Blindly consume all header lines
while line and line.strip():
line = self._rfile.readline()
if not line:
return None
# Grab the body
return self._rfile.read(content_length) |
<SYSTEM_TASK:>
Extract the content length from an input line.
<END_TASK>
<USER_TASK:>
Description:
def _content_length(line):
"""Extract the content length from an input line.""" |
if line.startswith(b'Content-Length: '):
_, value = line.split(b'Content-Length: ')
value = value.strip()
try:
return int(value)
except ValueError:
raise ValueError("Invalid Content-Length header: {}".format(value))
return None |
<SYSTEM_TASK:>
Return a generator with information about each host API.
<END_TASK>
<USER_TASK:>
Description:
def hostapi_info(index=None):
"""Return a generator with information about each host API.
If index is given, only one dictionary for the given host API is
returned.
""" |
if index is None:
return (hostapi_info(i) for i in range(_pa.Pa_GetHostApiCount()))
else:
info = _pa.Pa_GetHostApiInfo(index)
if not info:
raise RuntimeError("Invalid host API")
assert info.structVersion == 1
return {'name': ffi.string(info.name).decode(errors='ignore'),
'default_input_device': info.defaultInputDevice,
'default_output_device': info.defaultOutputDevice} |
<SYSTEM_TASK:>
Return a generator with information about each device.
<END_TASK>
<USER_TASK:>
Description:
def device_info(index=None):
"""Return a generator with information about each device.
If index is given, only one dictionary for the given device is
returned.
""" |
if index is None:
return (device_info(i) for i in range(_pa.Pa_GetDeviceCount()))
else:
info = _pa.Pa_GetDeviceInfo(index)
if not info:
raise RuntimeError("Invalid device")
assert info.structVersion == 2
if 'DirectSound' in hostapi_info(info.hostApi)['name']:
enc = 'mbcs'
else:
enc = 'utf-8'
return {'name': ffi.string(info.name).decode(encoding=enc,
errors='ignore'),
'hostapi': info.hostApi,
'max_input_channels': info.maxInputChannels,
'max_output_channels': info.maxOutputChannels,
'default_low_input_latency': info.defaultLowInputLatency,
'default_low_output_latency': info.defaultLowOutputLatency,
'default_high_input_latency': info.defaultHighInputLatency,
'default_high_output_latency': info.defaultHighOutputLatency,
'default_samplerate': info.defaultSampleRate} |
<SYSTEM_TASK:>
Create NumPy array from a pointer to some memory.
<END_TASK>
<USER_TASK:>
Description:
def _frombuffer(ptr, frames, channels, dtype):
"""Create NumPy array from a pointer to some memory.""" |
framesize = channels * dtype.itemsize
data = np.frombuffer(ffi.buffer(ptr, frames * framesize), dtype=dtype)
data.shape = -1, channels
return data |
<SYSTEM_TASK:>
Commence audio processing.
<END_TASK>
<USER_TASK:>
Description:
def start(self):
"""Commence audio processing.
If successful, the stream is considered active.
""" |
err = _pa.Pa_StartStream(self._stream)
if err == _pa.paStreamIsNotStopped:
return
self._handle_error(err) |
<SYSTEM_TASK:>
Terminate audio processing.
<END_TASK>
<USER_TASK:>
Description:
def stop(self):
"""Terminate audio processing.
This waits until all pending audio buffers have been played
before it returns. If successful, the stream is considered
inactive.
""" |
err = _pa.Pa_StopStream(self._stream)
if err == _pa.paStreamIsStopped:
return
self._handle_error(err) |
<SYSTEM_TASK:>
Terminate audio processing immediately.
<END_TASK>
<USER_TASK:>
Description:
def abort(self):
"""Terminate audio processing immediately.
This does not wait for pending audio buffers. If successful,
the stream is considered inactive.
""" |
err = _pa.Pa_AbortStream(self._stream)
if err == _pa.paStreamIsStopped:
return
self._handle_error(err) |
<SYSTEM_TASK:>
Read samples from an input stream.
<END_TASK>
<USER_TASK:>
Description:
def read(self, frames, raw=False):
"""Read samples from an input stream.
The function does not return until the required number of
frames has been read. This may involve waiting for the
operating system to supply the data.
If raw data is requested, the raw cffi data buffer is
returned. Otherwise, a numpy array of the appropriate dtype
with one column per channel is returned.
""" |
channels, _ = _split(self.channels)
dtype, _ = _split(self.dtype)
data = ffi.new("signed char[]", channels * dtype.itemsize * frames)
self._handle_error(_pa.Pa_ReadStream(self._stream, data, frames))
if not raw:
data = np.frombuffer(ffi.buffer(data), dtype=dtype)
data.shape = frames, channels
return data |
<SYSTEM_TASK:>
Write samples to an output stream.
<END_TASK>
<USER_TASK:>
Description:
def write(self, data):
"""Write samples to an output stream.
As much as one blocksize of audio data will be played
without blocking. If more than one blocksize was provided,
the function will only return when all but one blocksize
has been played.
Data will be converted to a numpy matrix. Multichannel data
should be provided as a (frames, channels) matrix. If the
data is provided as a 1-dim array, it will be treated as mono
data and will be played on all channels simultaneously. If the
data is provided as a 2-dim matrix and fewer tracks are
provided than channels, silence will be played on the missing
channels. Similarly, if more tracks are provided than there
are channels, the extraneous channels will not be played.
""" |
frames = len(data)
_, channels = _split(self.channels)
_, dtype = _split(self.dtype)
if (not isinstance(data, np.ndarray) or data.dtype != dtype):
data = np.array(data, dtype=dtype)
if len(data.shape) == 1:
# play mono signals on all channels
data = np.tile(data, (channels, 1)).T
if data.shape[1] > channels:
data = data[:, :channels]
if data.shape < (frames, channels):
# if less data is available than requested, pad with zeros.
tmp = data
data = np.zeros((frames, channels), dtype=dtype)
data[:tmp.shape[0], :tmp.shape[1]] = tmp
data = data.ravel().tostring()
err = _pa.Pa_WriteStream(self._stream, data, frames)
self._handle_error(err) |
<SYSTEM_TASK:>
Command 'supervisord shell' runs the interactive command shell.
<END_TASK>
<USER_TASK:>
Description:
def _handle_shell(self,cfg_file,*args,**options):
"""Command 'supervisord shell' runs the interactive command shell.""" |
args = ("--interactive",) + args
return supervisorctl.main(("-c",cfg_file) + args) |
<SYSTEM_TASK:>
Command 'supervisor getconfig' prints merged config to stdout.
<END_TASK>
<USER_TASK:>
Description:
def _handle_getconfig(self,cfg_file,*args,**options):
"""Command 'supervisor getconfig' prints merged config to stdout.""" |
if args:
raise CommandError("supervisor getconfig takes no arguments")
print cfg_file.read()
return 0 |
<SYSTEM_TASK:>
Command 'supervisor autoreload' watches for code changes.
<END_TASK>
<USER_TASK:>
Description:
def _handle_autoreload(self,cfg_file,*args,**options):
"""Command 'supervisor autoreload' watches for code changes.
This command provides a simulation of the Django dev server's
auto-reloading mechanism that will restart all supervised processes.
It's not quite as accurate as Django's autoreloader because it runs
in a separate process, so it doesn't know the precise set of modules
that have been loaded. Instead, it tries to watch all python files
that are "nearby" the files loaded at startup by Django.
""" |
if args:
raise CommandError("supervisor autoreload takes no arguments")
live_dirs = self._find_live_code_dirs()
reload_progs = self._get_autoreload_programs(cfg_file)
def autoreloader():
"""
Forks a subprocess to make the restart call.
Otherwise supervisord might kill us and cancel the restart!
"""
if os.fork() == 0:
sys.exit(self.handle("restart", *reload_progs, **options))
# Call the autoreloader callback whenever a .py file changes.
# To prevent thrashing, limit callbacks to one per second.
handler = CallbackModifiedHandler(callback=autoreloader,
repeat_delay=1,
patterns=AUTORELOAD_PATTERNS,
ignore_patterns=AUTORELOAD_IGNORE,
ignore_directories=True)
# Try to add watches using the platform-specific observer.
# If this fails, print a warning and fall back to the PollingObserver.
# This will avoid errors with e.g. too many inotify watches.
from watchdog.observers import Observer
from watchdog.observers.polling import PollingObserver
observer = None
for ObserverCls in (Observer, PollingObserver):
observer = ObserverCls()
try:
for live_dir in set(live_dirs):
observer.schedule(handler, live_dir, True)
break
except Exception:
print>>sys.stderr, "COULD NOT WATCH FILESYSTEM USING"
print>>sys.stderr, "OBSERVER CLASS: ", ObserverCls
traceback.print_exc()
observer.start()
observer.stop()
# Fail out if none of the observers worked.
if observer is None:
print>>sys.stderr, "COULD NOT WATCH FILESYSTEM"
return 1
# Poll if we have an observer.
# TODO: Is this sleep necessary? Or will it suffice
# to block indefinitely on something and wait to be killed?
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
return 0 |
<SYSTEM_TASK:>
Get the set of programs to auto-reload when code changes.
<END_TASK>
<USER_TASK:>
Description:
def _get_autoreload_programs(self,cfg_file):
"""Get the set of programs to auto-reload when code changes.
Such programs will have autoreload=true in their config section.
This can be affected by config file sections or command-line
arguments, so we need to read it out of the merged config.
""" |
cfg = RawConfigParser()
cfg.readfp(cfg_file)
reload_progs = []
for section in cfg.sections():
if section.startswith("program:"):
try:
if cfg.getboolean(section,"autoreload"):
reload_progs.append(section.split(":",1)[1])
except NoOptionError:
pass
return reload_progs |
<SYSTEM_TASK:>
Find all directories in which we might have live python code.
<END_TASK>
<USER_TASK:>
Description:
def _find_live_code_dirs(self):
"""Find all directories in which we might have live python code.
This walks all of the currently-imported modules and adds their
containing directory to the list of live dirs. After normalization
and de-duplication, we get a pretty good approximation of the
directories on sys.path that are actively in use.
""" |
live_dirs = []
for mod in sys.modules.values():
# Get the directory containing that module.
# This is deliberately casting a wide net.
try:
dirnm = os.path.dirname(mod.__file__)
except AttributeError:
continue
# Normalize it for comparison purposes.
dirnm = os.path.realpath(os.path.abspath(dirnm))
if not dirnm.endswith(os.sep):
dirnm += os.sep
# Check that it's not an egg or some other wierdness
if not os.path.isdir(dirnm):
continue
# If it's a subdir of one we've already found, ignore it.
for dirnm2 in live_dirs:
if dirnm.startswith(dirnm2):
break
else:
# Remove any ones we've found that are subdirs of it.
live_dirs = [dirnm2 for dirnm2 in live_dirs\
if not dirnm2.startswith(dirnm)]
live_dirs.append(dirnm)
return live_dirs |
<SYSTEM_TASK:>
Render the given config data using Django's template system.
<END_TASK>
<USER_TASK:>
Description:
def render_config(data,ctx):
"""Render the given config data using Django's template system.
This function takes a config data string and a dict of context variables,
renders the data through Django's template system, and returns the result.
""" |
djsupervisor_tags.current_context = ctx
data = "{% load djsupervisor_tags %}" + data
t = template.Template(data)
c = template.Context(ctx)
return t.render(c).encode("ascii") |
<SYSTEM_TASK:>
Get config file fragment reflecting command-line options.
<END_TASK>
<USER_TASK:>
Description:
def get_config_from_options(**options):
"""Get config file fragment reflecting command-line options.""" |
data = []
# Set whether or not to daemonize.
# Unlike supervisord, our default is to stay in the foreground.
data.append("[supervisord]\n")
if options.get("daemonize",False):
data.append("nodaemon=false\n")
else:
data.append("nodaemon=true\n")
if options.get("pidfile",None):
data.append("pidfile=%s\n" % (options["pidfile"],))
if options.get("logfile",None):
data.append("logfile=%s\n" % (options["logfile"],))
# Set which programs to launch automatically on startup.
for progname in options.get("launch",None) or []:
data.append("[program:%s]\nautostart=true\n" % (progname,))
for progname in options.get("nolaunch",None) or []:
data.append("[program:%s]\nautostart=false\n" % (progname,))
# Set which programs to include/exclude from the config
for progname in options.get("include",None) or []:
data.append("[program:%s]\nexclude=false\n" % (progname,))
for progname in options.get("exclude",None) or []:
data.append("[program:%s]\nexclude=true\n" % (progname,))
# Set which programs to autoreload when code changes.
# When this option is specified, the default for all other
# programs becomes autoreload=false.
if options.get("autoreload",None):
data.append("[program:autoreload]\nexclude=false\nautostart=true\n")
data.append("[program:__defaults__]\nautoreload=false\n")
for progname in options["autoreload"]:
data.append("[program:%s]\nautoreload=true\n" % (progname,))
# Set whether to use the autoreloader at all.
if options.get("noreload",False):
data.append("[program:autoreload]\nexclude=true\n")
return "".join(data) |
<SYSTEM_TASK:>
Find the top-level Django project directory.
<END_TASK>
<USER_TASK:>
Description:
def guess_project_dir():
"""Find the top-level Django project directory.
This function guesses the top-level Django project directory based on
the current environment. It looks for module containing the currently-
active settings module, in both pre-1.4 and post-1.4 layours.
""" |
projname = settings.SETTINGS_MODULE.split(".",1)[0]
projmod = import_module(projname)
projdir = os.path.dirname(projmod.__file__)
# For Django 1.3 and earlier, the manage.py file was located
# in the same directory as the settings file.
if os.path.isfile(os.path.join(projdir,"manage.py")):
return projdir
# For Django 1.4 and later, the manage.py file is located in
# the directory *containing* the settings file.
projdir = os.path.abspath(os.path.join(projdir, os.path.pardir))
if os.path.isfile(os.path.join(projdir,"manage.py")):
return projdir
msg = "Unable to determine the Django project directory;"\
" use --project-dir to specify it"
raise RuntimeError(msg) |
<SYSTEM_TASK:>
If the given option is missing, set to the given value.
<END_TASK>
<USER_TASK:>
Description:
def set_if_missing(cfg,section,option,value):
"""If the given option is missing, set to the given value.""" |
try:
cfg.get(section,option)
except NoSectionError:
cfg.add_section(section)
cfg.set(section,option,value)
except NoOptionError:
cfg.set(section,option,value) |
<SYSTEM_TASK:>
Helper function to re-render command-line options.
<END_TASK>
<USER_TASK:>
Description:
def rerender_options(options):
"""Helper function to re-render command-line options.
This assumes that command-line options use the same name as their
key in the options dictionary.
""" |
args = []
for name,value in options.iteritems():
name = name.replace("_","-")
if value is None:
pass
elif isinstance(value,bool):
if value:
args.append("--%s" % (name,))
elif isinstance(value,list):
for item in value:
args.append("--%s=%s" % (name,item))
else:
args.append("--%s=%s" % (name,value))
return " ".join(args) |
<SYSTEM_TASK:>
Logs the user in and setups the header with the private token
<END_TASK>
<USER_TASK:>
Description:
def login(self, email=None, password=None, user=None):
"""
Logs the user in and setups the header with the private token
:param email: Gitlab user Email
:param user: Gitlab username
:param password: Gitlab user password
:return: True if login successful
:raise: HttpError
:raise: ValueError
""" |
if user is not None:
data = {'login': user, 'password': password}
elif email is not None:
data = {'email': email, 'password': password}
else:
raise ValueError('Neither username nor email provided to login')
self.headers = {'connection': 'close'}
response = self.post('/session', **data)
self.token = response['private_token']
self.headers = {'PRIVATE-TOKEN': self.token,
'connection': 'close'}
return response |
<SYSTEM_TASK:>
Get info for a user identified by id
<END_TASK>
<USER_TASK:>
Description:
def getuser(self, user_id):
"""
Get info for a user identified by id
:param user_id: id of the user
:return: False if not found, a dictionary if found
""" |
request = requests.get(
'{0}/{1}'.format(self.users_url, user_id),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
if request.status_code == 200:
return request.json()
else:
return False |
<SYSTEM_TASK:>
Deletes a user. Available only for administrators.
<END_TASK>
<USER_TASK:>
Description:
def deleteuser(self, user_id):
"""
Deletes a user. Available only for administrators.
This is an idempotent function, calling this function for a non-existent user id
still returns a status code 200 OK.
The JSON response differs if the user was actually deleted or not.
In the former the user is returned and in the latter not.
.. warning:: Warning this is being deprecated please use :func:`gitlab.Gitlab.delete_user`
:param user_id: The ID of the user
:return: True if it deleted, False if it couldn't
""" |
deleted = self.delete_user(user_id)
if deleted is False:
return False
else:
return True |
<SYSTEM_TASK:>
Returns the current user parameters. The current user is linked to the secret token
<END_TASK>
<USER_TASK:>
Description:
def currentuser(self):
"""
Returns the current user parameters. The current user is linked to the secret token
:return: a list with the current user properties
""" |
request = requests.get(
'{0}/api/v3/user'.format(self.host),
headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout)
return request.json() |
<SYSTEM_TASK:>
Edits an user data.
<END_TASK>
<USER_TASK:>
Description:
def edituser(self, user_id, **kwargs):
"""
Edits an user data.
:param user_id: id of the user to change
:param kwargs: Any param the the Gitlab API supports
:return: Dict of the user
""" |
data = {}
if kwargs:
data.update(kwargs)
request = requests.put(
'{0}/{1}'.format(self.users_url, user_id),
headers=self.headers, data=data, timeout=self.timeout, verify=self.verify_ssl, auth=self.auth)
if request.status_code == 200:
return request.json()
else:
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.