text_prompt
stringlengths
157
13.1k
code_prompt
stringlengths
7
19.8k
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post(self, request, *args, **kwargs): """ Returns POST response. :param request: the request instance. :rtype: django.http.HttpResponse. """
form = None link_type = int(request.POST.get('link_type', 0)) if link_type == Link.LINK_TYPE_EMAIL: form = EmailLinkForm(**self.get_form_kwargs()) elif link_type == Link.LINK_TYPE_EXTERNAL: form = ExternalLinkForm(**self.get_form_kwargs()) if form: if form.is_valid(): return self.form_valid(form) else: return self.form_invalid(form) else: raise Http404()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_form_class(self): """ Returns form class to use in the view. :rtype: django.forms.ModelForm. """
if self.object.link_type == Link.LINK_TYPE_EMAIL: return EmailLinkForm elif self.object.link_type == Link.LINK_TYPE_EXTERNAL: return ExternalLinkForm return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_password(self, service, username, password): """Write the password to the registry """
# encrypt the password password_encrypted = _win_crypto.encrypt(password.encode('utf-8')) # encode with base64 password_base64 = base64.encodestring(password_encrypted) # encode again to unicode password_saved = password_base64.decode('ascii') # store the password key_name = self._key_for_service(service) hkey = winreg.CreateKey(winreg.HKEY_CURRENT_USER, key_name) winreg.SetValueEx(hkey, username, 0, winreg.REG_SZ, password_saved)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def encrypt(self, password): """Encrypt the password. """
if not password or not self._crypter: return password or b'' return self._crypter.encrypt(password)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def decrypt(self, password_encrypted): """Decrypt the password. """
if not password_encrypted or not self._crypter: return password_encrypted or b'' return self._crypter.decrypt(password_encrypted)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _open(self, mode='r'): """Open the password file in the specified mode """
open_file = None writeable = 'w' in mode or 'a' in mode or '+' in mode try: # NOTE: currently the MemOpener does not split off any filename # which causes errors on close() # so we add a dummy name and open it separately if (self.filename.startswith('mem://') or self.filename.startswith('ram://')): open_file = fs.opener.fsopendir(self.filename).open('kr.cfg', mode) else: if not hasattr(self, '_pyfs'): # reuse the pyfilesystem and path self._pyfs, self._path = fs.opener.opener.parse( self.filename, writeable=writeable) # cache if permitted if self._cache_timeout is not None: self._pyfs = fs.remote.CacheFS( self._pyfs, cache_timeout=self._cache_timeout) open_file = self._pyfs.open(self._path, mode) except fs.errors.ResourceNotFoundError: if self._can_create: segments = fs.opener.opener.split_segments(self.filename) if segments: # this seems broken, but pyfilesystem uses it, so we must fs_name, credentials, url1, url2, path = segments.groups() assert fs_name, 'Should be a remote filesystem' host = '' # allow for domain:port if ':' in url2: split_url2 = url2.split('/', 1) if len(split_url2) > 1: url2 = split_url2[1] else: url2 = '' host = split_url2[0] pyfs = fs.opener.opener.opendir( '%s://%s' % (fs_name, host)) # cache if permitted if self._cache_timeout is not None: pyfs = fs.remote.CacheFS( pyfs, cache_timeout=self._cache_timeout) # NOTE: fs.path.split does not function in the same # way os os.path.split... at least under windows url2_path, url2_filename = os.path.split(url2) if url2_path and not pyfs.exists(url2_path): pyfs.makedir(url2_path, recursive=True) else: # assume local filesystem full_url = fs.opener._expand_syspath(self.filename) # NOTE: fs.path.split does not function in the same # way os os.path.split... at least under windows url2_path, url2 = os.path.split(full_url) pyfs = fs.osfs.OSFS(url2_path) try: # reuse the pyfilesystem and path self._pyfs = pyfs self._path = url2 return pyfs.open(url2, mode) except fs.errors.ResourceNotFoundError: if writeable: raise else: pass # NOTE: ignore read errors as the underlying caller can fail safely if writeable: raise else: pass return open_file
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def config(self): """load the passwords from the config file """
if not hasattr(self, '_config'): raw_config = configparser.RawConfigParser() f = self._open() if f: raw_config.readfp(f) f.close() self._config = raw_config return self._config
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_queryset(self): """ Returns queryset limited to categories with live Entry instances. :rtype: django.db.models.query.QuerySet. """
queryset = super(LiveEntryCategoryManager, self).get_queryset() return queryset.filter(tag__in=[ entry_tag.tag for entry_tag in EntryTag.objects.filter(entry__live=True) ])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_for_tag(self, tag): """ Returns queryset of Entry instances assigned to specified tag, which can be a PK value, a slug value, or a Tag instance. :param tag: tag PK, slug, or instance. :rtype: django.db.models.query.QuerySet. """
tag_filter = {'tag': tag} if isinstance(tag, six.integer_types): tag_filter = {'tag_id': tag} elif isinstance(tag, str): tag_filter = {'tag__slug': tag} return self.filter(id__in=[ entry_tag.entry_id for entry_tag in EntryTag.objects.filter(**tag_filter) ])
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def for_category(self, category, live_only=False): """ Returns queryset of EntryTag instances for specified category. :param category: the Category instance. :param live_only: flag to include only "live" entries. :rtype: django.db.models.query.QuerySet. """
filters = {'tag': category.tag} if live_only: filters.update({'entry__live': True}) return self.filter(**filters)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def related_to(self, entry, live_only=False): """ Returns queryset of Entry instances related to specified Entry instance. :param entry: the Entry instance. :param live_only: flag to include only "live" entries. :rtype: django.db.models.query.QuerySet. """
filters = {'tag__in': entry.tags} if live_only: filters.update({'entry__live': True}) return self.filter(**filters).exclude(entry=entry)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def chosen_view_factory(chooser_cls): """ Returns a ChosenView class that extends specified chooser class. :param chooser_cls: the class to extend. :rtype: class. """
class ChosenView(chooser_cls): #noinspection PyUnusedLocal def get(self, request, *args, **kwargs): """ Returns GET response. :param request: the request instance. :rtype: django.http.HttpResponse. """ #noinspection PyAttributeOutsideInit self.object = self.get_object() return render_modal_workflow( self.request, None, '{0}/chosen.js'.format(self.template_dir), {'obj': self.get_json(self.object)} ) def get_object(self, queryset=None): """ Returns chosen object instance. :param queryset: the queryset instance. :rtype: django.db.models.Model. """ if queryset is None: queryset = self.get_queryset() pk = self.kwargs.get('pk', None) try: return queryset.get(pk=pk) except self.models.DoesNotExist: raise Http404() def post(self, request, *args, **kwargs): """ Returns POST response. :param request: the request instance. :rtype: django.http.HttpResponse. """ return self.get(request, *args, **kwargs) return ChosenView
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get(self, request, *args, **kwargs): """ Returns GET response. :param request: the request instance. :rtype: django.http.HttpResponse. """
#noinspection PyAttributeOutsideInit self.object_list = self.get_queryset() context = self.get_context_data(force_search=True) if self.form_class: context.update({'form': self.get_form()}) if 'q' in request.GET or 'p' in request.GET: return render( request, '{0}/results.html'.format(self.template_dir), context ) else: return render_modal_workflow( request, '{0}/chooser.html'.format(self.template_dir), '{0}/chooser.js'.format(self.template_dir), context )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_entry_tag(sender, instance, created, **kwargs): """ Creates EntryTag for Entry corresponding to specified ItemBase instance. :param sender: the sending ItemBase class. :param instance: the ItemBase instance. """
from ..models import ( Entry, EntryTag ) entry = Entry.objects.get_for_model(instance.content_object)[0] tag = instance.tag if not EntryTag.objects.filter(tag=tag, entry=entry).exists(): EntryTag.objects.create(tag=tag, entry=entry)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_entry_tag(sender, instance, **kwargs): """ Deletes EntryTag for Entry corresponding to specified TaggedItemBase instance. :param sender: the sending TaggedItemBase class. :param instance: the TaggedItemBase instance. """
from ..models import ( Entry, EntryTag ) entry = Entry.objects.get_for_model(instance.content_object)[0] tag = instance.tag EntryTag.objects.filter(tag=tag, entry=entry).delete()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_entry(sender, instance, **kwargs): """ Deletes Entry instance corresponding to specified instance. :param sender: the sending class. :param instance: the instance being deleted. """
from ..models import Entry Entry.objects.get_for_model(instance)[0].delete()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def update_entry_attributes(sender, instance, **kwargs): """ Updates attributes for Entry instance corresponding to specified instance. :param sender: the sending class. :param instance: the instance being saved. """
from ..models import Entry entry = Entry.objects.get_for_model(instance)[0] default_url = getattr(instance, 'get_absolute_url', '') entry.title = getattr(instance, 'title', str(instance)) entry.url = getattr(instance, 'url', default_url) entry.live = bool(getattr(instance, 'live', True)) entry.save()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_revisions(page, page_num=1): """ Returns paginated queryset of PageRevision instances for specified Page instance. :param page: the page instance. :param page_num: the pagination page number. :rtype: django.db.models.query.QuerySet. """
revisions = page.revisions.order_by('-created_at') current = page.get_latest_revision() if current: revisions.exclude(id=current.id) paginator = Paginator(revisions, 5) try: revisions = paginator.page(page_num) except PageNotAnInteger: revisions = paginator.page(1) except EmptyPage: revisions = paginator.page(paginator.num_pages) return revisions
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def page_revisions(request, page_id, template_name='wagtailrollbacks/edit_handlers/revisions.html'): """ Returns GET response for specified page revisions. :param request: the request instance. :param page_id: the page ID. :param template_name: the template name. :rtype: django.http.HttpResponse. """
page = get_object_or_404(Page, pk=page_id) page_perms = page.permissions_for_user(request.user) if not page_perms.can_edit(): raise PermissionDenied page_num = request.GET.get('p', 1) revisions = get_revisions(page, page_num) return render( request, template_name, { 'page': page, 'revisions': revisions, 'p': page_num, } )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def preview_page_version(request, revision_id): """ Returns GET response for specified page preview. :param request: the request instance. :param reversion_pk: the page revision ID. :rtype: django.http.HttpResponse. """
revision = get_object_or_404(PageRevision, pk=revision_id) if not revision.page.permissions_for_user(request.user).can_publish(): raise PermissionDenied page = revision.as_page_object() request.revision_id = revision_id return page.serve_preview(request, page.default_preview_mode)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _save_keyring(self, keyring_dict): """Helper to actually write the keyring to Google"""
import gdata result = self.OK file_contents = base64.urlsafe_b64encode(pickle.dumps(keyring_dict)) try: if self.docs_entry: extra_headers = {'Content-Type': 'text/plain', 'Content-Length': len(file_contents)} self.docs_entry = self.client.Put( file_contents, self.docs_entry.GetEditMediaLink().href, extra_headers=extra_headers ) else: from gdata.docs.service import DocumentQuery # check for existence of folder, create if required folder_query = DocumentQuery(categories=['folder']) folder_query['title'] = self.collection folder_query['title-exact'] = 'true' docs = self.client.QueryDocumentListFeed(folder_query.ToUri()) if docs.entry: folder_entry = docs.entry[0] else: folder_entry = self.client.CreateFolder(self.collection) file_handle = io.BytesIO(file_contents) media_source = gdata.MediaSource( file_handle=file_handle, content_type='text/plain', content_length=len(file_contents), file_name='temp') self.docs_entry = self.client.Upload( media_source, self._get_doc_title(), folder_or_uri=folder_entry ) except gdata.service.RequestError as ex: try: if ex.message['reason'].lower().find('conflict') != -1: result = self.CONFLICT else: # Google docs has a bug when updating a shared document # using PUT from any account other that the owner. # It returns an error 400 "Sorry, there was an error saving # the file. Please try again" # *despite* actually updating the document! # Workaround by re-reading to see if it actually updated msg = 'Sorry, there was an error saving the file' if ex.message['body'].find(msg) != -1: new_docs_entry, new_keyring_dict = self._read() if new_keyring_dict == keyring_dict: result = self.OK else: result = self.FAIL else: result = self.FAIL except Exception: result = self.FAIL return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _request(method, url, session=None, **kwargs): """Make HTTP request, raising an exception if it fails. """
url = BASE_URL + url if session: request_func = getattr(session, method) else: request_func = getattr(requests, method) response = request_func(url, **kwargs) # raise an exception if request is not successful if not response.status_code == requests.codes.ok: raise DweepyError('HTTP {0} response'.format(response.status_code)) response_json = response.json() if response_json['this'] == 'failed': raise DweepyError(response_json['because']) return response_json['with']
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _send_dweet(payload, url, params=None, session=None): """Send a dweet to dweet.io """
data = json.dumps(payload) headers = {'Content-type': 'application/json'} return _request('post', url, data=data, headers=headers, params=params, session=session)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def dweet_for(thing_name, payload, key=None, session=None): """Send a dweet to dweet.io for a thing with a known name """
if key is not None: params = {'key': key} else: params = None return _send_dweet(payload, '/dweet/for/{0}'.format(thing_name), params=params, session=session)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_dweets_for(thing_name, key=None, session=None): """Read all the dweets for a dweeter """
if key is not None: params = {'key': key} else: params = None return _request('get', '/get/dweets/for/{0}'.format(thing_name), params=params, session=None)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def unlock(thing_name, key, session=None): """Unlock a thing """
return _request('get', '/unlock/{0}'.format(thing_name), params={'key': key}, session=session)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def remove_alert(thing_name, key, session=None): """Remove an alert for the given thing """
return _request('get', '/remove/alert/for/{0}'.format(thing_name), params={'key': key}, session=session)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_product_sets(self): """ list all product sets for current user """
# ensure we are using api url without a specific product set id api_url = super(ProductSetAPI, self).base_url return self.client.get(api_url)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _check_stream_timeout(started, timeout): """Check if the timeout has been reached and raise a `StopIteration` if so. """
if timeout: elapsed = datetime.datetime.utcnow() - started if elapsed.seconds > timeout: raise StopIteration
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _listen_for_dweets_from_response(response): """Yields dweets as received from dweet.io's streaming API """
streambuffer = '' for byte in response.iter_content(): if byte: streambuffer += byte.decode('ascii') try: dweet = json.loads(streambuffer.splitlines()[1]) except (IndexError, ValueError): continue if isstr(dweet): yield json.loads(dweet) streambuffer = ''
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def listen_for_dweets_from(thing_name, timeout=900, key=None, session=None): """Create a real-time subscription to dweets """
url = BASE_URL + '/listen/for/dweets/from/{0}'.format(thing_name) session = session or requests.Session() if key is not None: params = {'key': key} else: params = None start = datetime.datetime.utcnow() while True: request = requests.Request("GET", url, params=params).prepare() resp = session.send(request, stream=True, timeout=timeout) try: for x in _listen_for_dweets_from_response(resp): yield x _check_stream_timeout(start, timeout) except (ChunkedEncodingError, requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout): pass _check_stream_timeout(start, timeout)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def build(self, parallel=True, debug=False, force=False, machine_readable=False): """Executes a `packer build` :param bool parallel: Run builders in parallel :param bool debug: Run in debug mode :param bool force: Force artifact output even if exists :param bool machine_readable: Make output machine-readable """
self.packer_cmd = self.packer.build self._add_opt('-parallel=true' if parallel else None) self._add_opt('-debug' if debug else None) self._add_opt('-force' if force else None) self._add_opt('-machine-readable' if machine_readable else None) self._append_base_arguments() self._add_opt(self.packerfile) return self.packer_cmd()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def fix(self, to_file=None): """Implements the `packer fix` function :param string to_file: File to output fixed template to """
self.packer_cmd = self.packer.fix self._add_opt(self.packerfile) result = self.packer_cmd() if to_file: with open(to_file, 'w') as f: f.write(result.stdout.decode()) result.fixed = json.loads(result.stdout.decode()) return result
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def push(self, create=True, token=False): """Implmenets the `packer push` function UNTESTED! Must be used alongside an Atlas account """
self.packer_cmd = self.packer.push self._add_opt('-create=true' if create else None) self._add_opt('-tokn={0}'.format(token) if token else None) self._add_opt(self.packerfile) return self.packer_cmd()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _append_base_arguments(self): """Appends base arguments to packer commands. -except, -only, -var and -var-file are appeneded to almost all subcommands in packer. As such this can be called to add these flags to the subcommand. """
if self.exc and self.only: raise PackerException('Cannot provide both "except" and "only"') elif self.exc: self._add_opt('-except={0}'.format(self._join_comma(self.exc))) elif self.only: self._add_opt('-only={0}'.format(self._join_comma(self.only))) for var, value in self.vars.items(): self._add_opt("-var") self._add_opt("{0}={1}".format(var, value)) if self.var_file: self._add_opt('-var-file={0}'.format(self.var_file))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _parse_inspection_output(self, output): """Parses the machine-readable output `packer inspect` provides. See the inspect method for more info. This has been tested vs. Packer v0.7.5 """
parts = {'variables': [], 'builders': [], 'provisioners': []} for line in output.splitlines(): line = line.split(',') if line[2].startswith('template'): del line[0:2] component = line[0] if component == 'template-variable': variable = {"name": line[1], "value": line[2]} parts['variables'].append(variable) elif component == 'template-builder': builder = {"name": line[1], "type": line[2]} parts['builders'].append(builder) elif component == 'template-provisioner': provisioner = {"type": line[1]} parts['provisioners'].append(provisioner) return parts
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def post(self, url, data, headers=None): """ Perform an HTTP POST request for a given url. Returns the response object. """
return self._request('POST', url, data, headers=headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def put(self, url, data, headers=None): """ Perform an HTTP PUT request for a given url. Returns the response object. """
return self._request('PUT', url, data, headers=headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def query(self, *args): """ Query a fulltext index by key and query or just a plain Lucene query, i1 = gdb.nodes.indexes.get('people',type='fulltext', provider='lucene') i1.query('name','do*') i1.query('name:do*') In this example, the last two line are equivalent. """
if not args or len(args) > 2: raise TypeError('query() takes 2 or 3 arguments (a query or a key ' 'and a query) (%d given)' % (len(args) + 1)) elif len(args) == 1: query, = args return self.get('text').query(text_type(query)) else: key, query = args index_key = self.get(key) if isinstance(query, string_types): return index_key.query(query) else: if query.fielded: raise ValueError('Queries with an included key should ' 'not include a field.') return index_key.query(text_type(query))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def do_call(self, path, method, body=None, headers=None): """ Send an HTTP request to the REST API. :param string path: A URL :param string method: The HTTP method (GET, POST, etc.) to use in the request. :param string body: A string representing any data to be sent in the body of the HTTP request. :param dictionary headers: "{header-name: header-value}" dictionary. """
url = urljoin(self.base_url, path) try: resp = requests.request(method, url, data=body, headers=headers, auth=self.auth, timeout=self.timeout) except requests.exceptions.Timeout as out: raise NetworkError("Timeout while trying to connect to RabbitMQ") except requests.exceptions.RequestException as err: # All other requests exceptions inherit from RequestException raise NetworkError("Error during request %s %s" % (type(err), err)) try: content = resp.json() except ValueError as out: content = None # 'success' HTTP status codes are 200-206 if resp.status_code < 200 or resp.status_code > 206: raise HTTPError(content, resp.status_code, resp.text, path, body) else: if content: return content else: return None
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _call(self, path, method, body=None, headers=None): """ Wrapper around http.do_call that transforms some HTTPError into our own exceptions """
try: resp = self.http.do_call(path, method, body, headers) except http.HTTPError as err: if err.status == 401: raise PermissionError('Insufficient permissions to query ' + '%s with user %s :%s' % (path, self.user, err)) raise return resp
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_whoami(self): """ A convenience function used in the event that you need to confirm that the broker thinks you are who you think you are. :returns dict whoami: Dict structure contains: * administrator: whether the user is has admin privileges * name: user name * auth_backend: backend used to determine admin rights """
path = Client.urls['whoami'] whoami = self._call(path, 'GET') return whoami
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_vhost_names(self): """ A convenience function for getting back only the vhost names instead of the larger vhost dicts. :returns list vhost_names: A list of just the vhost names. """
vhosts = self.get_all_vhosts() vhost_names = [i['name'] for i in vhosts] return vhost_names
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_vhost(self, vname): """ Returns the attributes of a single named vhost in a dict. :param string vname: Name of the vhost to get. :returns dict vhost: Attribute dict for the named vhost """
vname = quote(vname, '') path = Client.urls['vhosts_by_name'] % vname vhost = self._call(path, 'GET', headers=Client.json_headers) return vhost
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_vhost(self, vname): """ Creates a vhost on the server to house exchanges. :param string vname: The name to give to the vhost on the server :returns: boolean """
vname = quote(vname, '') path = Client.urls['vhosts_by_name'] % vname return self._call(path, 'PUT', headers=Client.json_headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_vhost(self, vname): """ Deletes a vhost from the server. Note that this also deletes any exchanges or queues that belong to this vhost. :param string vname: Name of the vhost to delete from the server. """
vname = quote(vname, '') path = Client.urls['vhosts_by_name'] % vname return self._call(path, 'DELETE')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def set_vhost_permissions(self, vname, username, config, rd, wr): """ Set permissions for a given username on a given vhost. Both must already exist. :param string vname: Name of the vhost to set perms on. :param string username: User to set permissions for. :param string config: Permission pattern for configuration operations for this user in this vhost. :param string rd: Permission pattern for read operations for this user in this vhost :param string wr: Permission pattern for write operations for this user in this vhost. Permission patterns are regex strings. If you're unfamiliar with this, you should definitely check out this section of the RabbitMQ docs: http://www.rabbitmq.com/admin-guide.html#access-control """
vname = quote(vname, '') body = json.dumps({"configure": config, "read": rd, "write": wr}) path = Client.urls['vhost_permissions'] % (vname, username) return self._call(path, 'PUT', body, headers=Client.json_headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_permission(self, vname, username): """ Delete permission for a given username on a given vhost. Both must already exist. :param string vname: Name of the vhost to set perms on. :param string username: User to set permissions for. """
vname = quote(vname, '') path = Client.urls['vhost_permissions'] % (vname, username) return self._call(path, 'DELETE')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_exchange(self, vhost, name): """ Gets a single exchange which requires a vhost and name. :param string vhost: The vhost containing the target exchange :param string name: The name of the exchange :returns: dict """
vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['exchange_by_name'] % (vhost, name) exch = self._call(path, 'GET') return exch
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_exchange(self, vhost, name): """ Delete the named exchange from the named vhost. The API returns a 204 on success, in which case this method returns True, otherwise the error is raised. :param string vhost: Vhost where target exchange was created :param string name: The name of the exchange to delete. :returns bool: True on success. """
vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['exchange_by_name'] % (vhost, name) self._call(path, 'DELETE') return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_queues(self, vhost=None): """ Get all queues, or all queues in a vhost if vhost is not None. Returns a list. :param string vhost: The virtual host to list queues for. If This is None (the default), all queues for the broker instance are returned. :returns: A list of dicts, each representing a queue. :rtype: list of dicts """
if vhost: vhost = quote(vhost, '') path = Client.urls['queues_by_vhost'] % vhost else: path = Client.urls['all_queues'] queues = self._call(path, 'GET') return queues or list()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_queue(self, vhost, name): """ Get a single queue, which requires both vhost and name. :param string vhost: The virtual host for the queue being requested. If the vhost is '/', note that it will be translated to '%2F' to conform to URL encoding requirements. :param string name: The name of the queue being requested. :returns: A dictionary of queue properties. :rtype: dict """
vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['queues_by_name'] % (vhost, name) queue = self._call(path, 'GET') return queue
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_queue_depths(self, vhost, names=None): """ Get the number of messages currently sitting in either the queue names listed in 'names', or all queues in 'vhost' if no 'names' are given. :param str vhost: Vhost where queues in 'names' live. :param list names: OPTIONAL - Specific queues to show depths for. If None, show depths for all queues in 'vhost'. """
vhost = quote(vhost, '') if not names: # get all queues in vhost path = Client.urls['queues_by_vhost'] % vhost queues = self._call(path, 'GET') for queue in queues: depth = queue['messages'] print("\t%s: %s" % (queue, depth)) else: # get the named queues only. for name in names: depth = self.get_queue_depth(vhost, name) print("\t%s: %s" % (name, depth))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def purge_queues(self, queues): """ Purge all messages from one or more queues. :param list queues: A list of ('qname', 'vhost') tuples. :returns: True on success """
for name, vhost in queues: vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['purge_queue'] % (vhost, name) self._call(path, 'DELETE') return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def purge_queue(self, vhost, name): """ Purge all messages from a single queue. This is a convenience method so you aren't forced to supply a list containing a single tuple to the purge_queues method. :param string vhost: The vhost of the queue being purged. :param string name: The name of the queue being purged. :rtype: None """
vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['purge_queue'] % (vhost, name) return self._call(path, 'DELETE')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_queue(self, vhost, name, **kwargs): """ Create a queue. The API documentation specifies that all of the body elements are optional, so this method only requires arguments needed to form the URI :param string vhost: The vhost to create the queue in. :param string name: The name of the queue More on these operations can be found at: http://www.rabbitmq.com/amqp-0-9-1-reference.html """
vhost = quote(vhost, '') name = quote(name, '') path = Client.urls['queues_by_name'] % (vhost, name) body = json.dumps(kwargs) return self._call(path, 'PUT', body, headers=Client.json_headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_queue(self, vhost, qname): """ Deletes the named queue from the named vhost. :param string vhost: Vhost housing the queue to be deleted. :param string qname: Name of the queue to delete. Note that if you just want to delete the messages from a queue, you should use purge_queue instead of deleting/recreating a queue. """
vhost = quote(vhost, '') qname = quote(qname, '') path = Client.urls['queues_by_name'] % (vhost, qname) return self._call(path, 'DELETE', headers=Client.json_headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_connection(self, name): """ Get a connection by name. To get the names, use get_connections. :param string name: Name of connection to get :returns dict conn: A connection attribute dictionary. """
name = quote(name, '') path = Client.urls['connections_by_name'] % name conn = self._call(path, 'GET') return conn
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_connection(self, name): """ Close the named connection. The API returns a 204 on success, in which case this method returns True, otherwise the error is raised. :param string name: The name of the connection to delete. :returns bool: True on success. """
name = quote(name, '') path = Client.urls['connections_by_name'] % name self._call(path, 'DELETE') return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_channel(self, name): """ Get a channel by name. To get the names, use get_channels. :param string name: Name of channel to get :returns dict conn: A channel attribute dictionary. """
name = quote(name, '') path = Client.urls['channels_by_name'] % name chan = self._call(path, 'GET') return chan
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def create_binding(self, vhost, exchange, queue, rt_key=None, args=None): """ Creates a binding between an exchange and a queue on a given vhost. :param string vhost: vhost housing the exchange/queue to bind :param string exchange: the target exchange of the binding :param string queue: the queue to bind to the exchange :param string rt_key: the routing key to use for the binding :param list args: extra arguments to associate w/ the binding. :returns: boolean """
vhost = quote(vhost, '') exchange = quote(exchange, '') queue = quote(queue, '') body = json.dumps({'routing_key': rt_key, 'arguments': args or []}) path = Client.urls['bindings_between_exch_queue'] % (vhost, exchange, queue) binding = self._call(path, 'POST', body=body, headers=Client.json_headers) return binding
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_binding(self, vhost, exchange, queue, rt_key): """ Deletes a binding between an exchange and a queue on a given vhost. :param string vhost: vhost housing the exchange/queue to bind :param string exchange: the target exchange of the binding :param string queue: the queue to bind to the exchange :param string rt_key: the routing key to use for the binding """
vhost = quote(vhost, '') exchange = quote(exchange, '') queue = quote(queue, '') body = '' path = Client.urls['rt_bindings_between_exch_queue'] % (vhost, exchange, queue, rt_key) return self._call(path, 'DELETE', headers=Client.json_headers)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_user(self, username): """ Deletes a user from the server. :param string username: Name of the user to delete from the server. """
path = Client.urls['users_by_name'] % username return self._call(path, 'DELETE')
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def index(request): """ Redirects to the default wiki index name. """
kwargs = {'slug': getattr(settings, 'WAKAWAKA_DEFAULT_INDEX', 'WikiIndex')} redirect_to = reverse('wakawaka_page', kwargs=kwargs) return HttpResponseRedirect(redirect_to)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def page( request, slug, rev_id=None, template_name='wakawaka/page.html', extra_context=None, ): """ Displays a wiki page. Redirects to the edit view if the page doesn't exist. """
try: queryset = WikiPage.objects.all() page = queryset.get(slug=slug) rev = page.current # Display an older revision if rev_id is given if rev_id: revision_queryset = Revision.objects.all() rev_specific = revision_queryset.get(pk=rev_id) if rev.pk != rev_specific.pk: rev_specific.is_not_current = True rev = rev_specific # The Page does not exist, redirect to the edit form or # deny, if the user has no permission to add pages except WikiPage.DoesNotExist: if request.user.is_authenticated: kwargs = {'slug': slug} redirect_to = reverse('wakawaka_edit', kwargs=kwargs) return HttpResponseRedirect(redirect_to) raise Http404 template_context = {'page': page, 'rev': rev} template_context.update(extra_context or {}) return render(request, template_name, template_context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def edit( request, slug, rev_id=None, template_name='wakawaka/edit.html', extra_context=None, wiki_page_form=WikiPageForm, wiki_delete_form=DeleteWikiPageForm, ): """ Displays the form for editing and deleting a page. """
# Get the page for slug and get a specific revision, if given try: queryset = WikiPage.objects.all() page = queryset.get(slug=slug) rev = page.current initial = {'content': page.current.content} # Do not allow editing wiki pages if the user has no permission if not request.user.has_perms( ('wakawaka.change_wikipage', 'wakawaka.change_revision') ): return HttpResponseForbidden( ugettext('You don\'t have permission to edit pages.') ) if rev_id: # There is a specific revision, fetch this rev_specific = Revision.objects.get(pk=rev_id) if rev.pk != rev_specific.pk: rev = rev_specific rev.is_not_current = True initial = { 'content': rev.content, 'message': _('Reverted to "%s"' % rev.message), } # This page does not exist, create a dummy page # Note that it's not saved here except WikiPage.DoesNotExist: # Do not allow adding wiki pages if the user has no permission if not request.user.has_perms( ('wakawaka.add_wikipage', 'wakawaka.add_revision') ): return HttpResponseForbidden( ugettext('You don\'t have permission to add wiki pages.') ) page = WikiPage(slug=slug) page.is_initial = True rev = None initial = { 'content': _('Describe your new page %s here...' % slug), 'message': _('Initial revision'), } # Don't display the delete form if the user has nor permission delete_form = None # The user has permission, then do if request.user.has_perm( 'wakawaka.delete_wikipage' ) or request.user.has_perm('wakawaka.delete_revision'): delete_form = wiki_delete_form(request) if request.method == 'POST' and request.POST.get('delete'): delete_form = wiki_delete_form(request, request.POST) if delete_form.is_valid(): return delete_form.delete_wiki(request, page, rev) # Page add/edit form form = wiki_page_form(initial=initial) if request.method == 'POST': form = wiki_page_form(data=request.POST) if form.is_valid(): # Check if the content is changed, except there is a rev_id and the # user possibly only reverted the HEAD to it if ( not rev_id and initial['content'] == form.cleaned_data['content'] ): form.errors['content'] = (_('You have made no changes!'),) # Save the form and redirect to the page view else: try: # Check that the page already exist queryset = WikiPage.objects.all() page = queryset.get(slug=slug) except WikiPage.DoesNotExist: # Must be a new one, create that page page = WikiPage(slug=slug) page.save() form.save(request, page) kwargs = {'slug': page.slug} redirect_to = reverse('wakawaka_page', kwargs=kwargs) messages.success( request, ugettext('Your changes to %s were saved' % page.slug), ) return HttpResponseRedirect(redirect_to) template_context = { 'form': form, 'delete_form': delete_form, 'page': page, 'rev': rev, } template_context.update(extra_context or {}) return render(request, template_name, template_context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def revisions( request, slug, template_name='wakawaka/revisions.html', extra_context=None ): """ Displays the list of all revisions for a specific WikiPage """
queryset = WikiPage.objects.all() page = get_object_or_404(queryset, slug=slug) template_context = {'page': page} template_context.update(extra_context or {}) return render(request, template_name, template_context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def changes( request, slug, template_name='wakawaka/changes.html', extra_context=None ): """ Displays the changes between two revisions. """
rev_a_id = request.GET.get('a', None) rev_b_id = request.GET.get('b', None) # Some stinky fingers manipulated the url if not rev_a_id or not rev_b_id: return HttpResponseBadRequest('Bad Request') try: revision_queryset = Revision.objects.all() wikipage_queryset = WikiPage.objects.all() rev_a = revision_queryset.get(pk=rev_a_id) rev_b = revision_queryset.get(pk=rev_b_id) page = wikipage_queryset.get(slug=slug) except ObjectDoesNotExist: raise Http404 if rev_a.content != rev_b.content: d = difflib.unified_diff( rev_b.content.splitlines(), rev_a.content.splitlines(), 'Original', 'Current', lineterm='', ) difftext = '\n'.join(d) else: difftext = _(u'No changes were made between this two files.') template_context = { 'page': page, 'diff': difftext, 'rev_a': rev_a, 'rev_b': rev_b, } template_context.update(extra_context or {}) return render(request, template_name, template_context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def revision_list( request, template_name='wakawaka/revision_list.html', extra_context=None ): """ Displays a list of all recent revisions. """
revision_list = Revision.objects.all() template_context = {'revision_list': revision_list} template_context.update(extra_context or {}) return render(request, template_name, template_context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def page_list( request, template_name='wakawaka/page_list.html', extra_context=None ): """ Displays all Pages """
page_list = WikiPage.objects.all() page_list = page_list.order_by('slug') template_context = { 'page_list': page_list, 'index_slug': getattr(settings, 'WAKAWAKA_DEFAULT_INDEX', 'WikiIndex'), } template_context.update(extra_context or {}) return render(request, template_name, template_context)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def delete_wiki(self, request, page, rev): """ Deletes the page with all revisions or the revision, based on the users choice. Returns a HttpResponseRedirect. """
# Delete the page if ( self.cleaned_data.get('delete') == 'page' and request.user.has_perm('wakawaka.delete_revision') and request.user.has_perm('wakawaka.delete_wikipage') ): self._delete_page(page) messages.success( request, ugettext('The page %s was deleted' % page.slug) ) return HttpResponseRedirect(reverse('wakawaka_index')) # Revision handling if self.cleaned_data.get('delete') == 'rev': revision_length = len(page.revisions.all()) # Delete the revision if there are more than 1 and the user has permission if revision_length > 1 and request.user.has_perm( 'wakawaka.delete_revision' ): self._delete_revision(rev) messages.success( request, ugettext('The revision for %s was deleted' % page.slug), ) return HttpResponseRedirect( reverse('wakawaka_page', kwargs={'slug': page.slug}) ) # Do not allow deleting the revision, if it's the only one and the user # has no permisson to delete the page. if revision_length <= 1 and not request.user.has_perm( 'wakawaka.delete_wikipage' ): messages.error( request, ugettext( 'You can not delete this revison for %s because it\'s the ' 'only one and you have no permission to delete the whole page.' % page.slug ), ) return HttpResponseRedirect( reverse('wakawaka_page', kwargs={'slug': page.slug}) ) # Delete the page and the revision if the user has both permissions if ( revision_length <= 1 and request.user.has_perm('wakawaka.delete_revision') and request.user.has_perm('wakawaka.delete_wikipage') ): self._delete_page(page) messages.success( request, ugettext( 'The page for %s was deleted because you deleted the only revision' % page.slug ), ) return HttpResponseRedirect(reverse('wakawaka_index'))
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_real_field(model, field_name): ''' Get the real field from a model given its name. Handle nested models recursively (aka. ``__`` lookups) ''' parts = field_name.split('__') field = model._meta.get_field(parts[0]) if len(parts) == 1: return model._meta.get_field(field_name) elif isinstance(field, models.ForeignKey): return get_real_field(field.rel.to, '__'.join(parts[1:])) else: raise Exception('Unhandled field: %s' % field_name)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def can_regex(self, field): '''Test if a given field supports regex lookups''' from django.conf import settings if settings.DATABASES['default']['ENGINE'].endswith('sqlite3'): return not isinstance(get_real_field(self.model, field), UNSUPPORTED_REGEX_FIELDS) else: return True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_orders(self): '''Get ordering fields for ``QuerySet.order_by``''' orders = [] iSortingCols = self.dt_data['iSortingCols'] dt_orders = [(self.dt_data['iSortCol_%s' % i], self.dt_data['sSortDir_%s' % i]) for i in xrange(iSortingCols)] for field_idx, field_dir in dt_orders: direction = '-' if field_dir == DESC else '' if hasattr(self, 'sort_col_%s' % field_idx): method = getattr(self, 'sort_col_%s' % field_idx) result = method(direction) if isinstance(result, (bytes, text_type)): orders.append(result) else: orders.extend(result) else: field = self.get_field(field_idx) if RE_FORMATTED.match(field): tokens = RE_FORMATTED.findall(field) orders.extend(['%s%s' % (direction, token) for token in tokens]) else: orders.append('%s%s' % (direction, field)) return orders
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def global_search(self, queryset): '''Filter a queryset with global search''' search = self.dt_data['sSearch'] if search: if self.dt_data['bRegex']: criterions = [ Q(**{'%s__iregex' % field: search}) for field in self.get_db_fields() if self.can_regex(field) ] if len(criterions) > 0: search = reduce(or_, criterions) queryset = queryset.filter(search) else: for term in search.split(): criterions = (Q(**{'%s__icontains' % field: term}) for field in self.get_db_fields()) search = reduce(or_, criterions) queryset = queryset.filter(search) return queryset
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def column_search(self, queryset): '''Filter a queryset with column search''' for idx in xrange(self.dt_data['iColumns']): search = self.dt_data['sSearch_%s' % idx] if search: if hasattr(self, 'search_col_%s' % idx): custom_search = getattr(self, 'search_col_%s' % idx) queryset = custom_search(search, queryset) else: field = self.get_field(idx) fields = RE_FORMATTED.findall(field) if RE_FORMATTED.match(field) else [field] if self.dt_data['bRegex_%s' % idx]: criterions = [Q(**{'%s__iregex' % field: search}) for field in fields if self.can_regex(field)] if len(criterions) > 0: search = reduce(or_, criterions) queryset = queryset.filter(search) else: for term in search.split(): criterions = (Q(**{'%s__icontains' % field: term}) for field in fields) search = reduce(or_, criterions) queryset = queryset.filter(search) return queryset
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_queryset(self): '''Apply Datatables sort and search criterion to QuerySet''' qs = super(DatatablesView, self).get_queryset() # Perform global search qs = self.global_search(qs) # Perform column search qs = self.column_search(qs) # Return the ordered queryset return qs.order_by(*self.get_orders())
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def get_page(self, form): '''Get the requested page''' page_size = form.cleaned_data['iDisplayLength'] start_index = form.cleaned_data['iDisplayStart'] paginator = Paginator(self.object_list, page_size) num_page = (start_index / page_size) + 1 return paginator.page(num_page)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description:
def render_to_response(self, form, **kwargs): '''Render Datatables expected JSON format''' page = self.get_page(form) data = { 'iTotalRecords': page.paginator.count, 'iTotalDisplayRecords': page.paginator.count, 'sEcho': form.cleaned_data['sEcho'], 'aaData': self.get_rows(page.object_list), } return self.json_response(data)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def authenticated(func): """ Decorator to check if Smappee's access token has expired. If it has, use the refresh token to request a new access token """
@wraps(func) def wrapper(*args, **kwargs): self = args[0] if self.refresh_token is not None and \ self.token_expiration_time <= dt.datetime.utcnow(): self.re_authenticate() return func(*args, **kwargs) return wrapper
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def urljoin(*parts): """ Join terms together with forward slashes Parameters parts Returns ------- str """
# first strip extra forward slashes (except http:// and the likes) and # create list part_list = [] for part in parts: p = str(part) if p.endswith('//'): p = p[0:-1] else: p = p.strip('/') part_list.append(p) # join everything together url = '/'.join(part_list) return url
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def authenticate(self, username, password): """ Uses a Smappee username and password to request an access token, refresh token and expiry date. Parameters username : str password : str Returns ------- requests.Response access token is saved in self.access_token refresh token is saved in self.refresh_token expiration time is set in self.token_expiration_time as datetime.datetime """
url = URLS['token'] data = { "grant_type": "password", "client_id": self.client_id, "client_secret": self.client_secret, "username": username, "password": password } r = requests.post(url, data=data) r.raise_for_status() j = r.json() self.access_token = j['access_token'] self.refresh_token = j['refresh_token'] self._set_token_expiration_time(expires_in=j['expires_in']) return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_service_location_info(self, service_location_id): """ Request service location info Parameters service_location_id : int Returns ------- dict """
url = urljoin(URLS['servicelocation'], service_location_id, "info") headers = {"Authorization": "Bearer {}".format(self.access_token)} r = requests.get(url, headers=headers) r.raise_for_status() return r.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_consumption(self, service_location_id, start, end, aggregation, raw=False): """ Request Elektricity consumption and Solar production for a given service location. Parameters service_location_id : int start : int | dt.datetime | pd.Timestamp end : int | dt.datetime | pd.Timestamp start and end support epoch (in milliseconds), datetime and Pandas Timestamp aggregation : int 1 = 5 min values (only available for the last 14 days) 2 = hourly values 3 = daily values 4 = monthly values 5 = quarterly values raw : bool default False if True: Return the data "as is" from the server if False: convert the 'alwaysOn' value to Wh. (the server returns this value as the sum of the power, measured in 5 minute blocks. This means that it is 12 times higher than the consumption in Wh. See https://github.com/EnergieID/smappy/issues/24) Returns ------- dict """
url = urljoin(URLS['servicelocation'], service_location_id, "consumption") d = self._get_consumption(url=url, start=start, end=end, aggregation=aggregation) if not raw: for block in d['consumptions']: if 'alwaysOn' not in block.keys(): break block.update({'alwaysOn': block['alwaysOn'] / 12}) return d
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_sensor_consumption(self, service_location_id, sensor_id, start, end, aggregation): """ Request consumption for a given sensor in a given service location Parameters service_location_id : int sensor_id : int start : int | dt.datetime | pd.Timestamp end : int | dt.datetime | pd.Timestamp start and end support epoch (in milliseconds), datetime and Pandas Timestamp timezone-naive datetimes are assumed to be in UTC aggregation : int 1 = 5 min values (only available for the last 14 days) 2 = hourly values 3 = daily values 4 = monthly values 5 = quarterly values Returns ------- dict """
url = urljoin(URLS['servicelocation'], service_location_id, "sensor", sensor_id, "consumption") return self._get_consumption(url=url, start=start, end=end, aggregation=aggregation)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _get_consumption(self, url, start, end, aggregation): """ Request for both the get_consumption and get_sensor_consumption methods. Parameters url : str start : dt.datetime end : dt.datetime aggregation : int Returns ------- dict """
start = self._to_milliseconds(start) end = self._to_milliseconds(end) headers = {"Authorization": "Bearer {}".format(self.access_token)} params = { "aggregation": aggregation, "from": start, "to": end } r = requests.get(url, headers=headers, params=params) r.raise_for_status() return r.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def get_events(self, service_location_id, appliance_id, start, end, max_number=None): """ Request events for a given appliance Parameters service_location_id : int appliance_id : int start : int | dt.datetime | pd.Timestamp end : int | dt.datetime | pd.Timestamp start and end support epoch (in milliseconds), datetime and Pandas Timestamp timezone-naive datetimes are assumed to be in UTC max_number : int, optional The maximum number of events that should be returned by this query Default returns all events in the selected period Returns ------- dict """
start = self._to_milliseconds(start) end = self._to_milliseconds(end) url = urljoin(URLS['servicelocation'], service_location_id, "events") headers = {"Authorization": "Bearer {}".format(self.access_token)} params = { "from": start, "to": end, "applianceId": appliance_id, "maxNumber": max_number } r = requests.get(url, headers=headers, params=params) r.raise_for_status() return r.json()
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def actuator_on(self, service_location_id, actuator_id, duration=None): """ Turn actuator on Parameters service_location_id : int actuator_id : int duration : int, optional 300,900,1800 or 3600 , specifying the time in seconds the actuator should be turned on. Any other value results in turning on for an undetermined period of time. Returns ------- requests.Response """
return self._actuator_on_off( on_off='on', service_location_id=service_location_id, actuator_id=actuator_id, duration=duration)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def actuator_off(self, service_location_id, actuator_id, duration=None): """ Turn actuator off Parameters service_location_id : int actuator_id : int duration : int, optional 300,900,1800 or 3600 , specifying the time in seconds the actuator should be turned on. Any other value results in turning on for an undetermined period of time. Returns ------- requests.Response """
return self._actuator_on_off( on_off='off', service_location_id=service_location_id, actuator_id=actuator_id, duration=duration)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _actuator_on_off(self, on_off, service_location_id, actuator_id, duration=None): """ Turn actuator on or off Parameters on_off : str 'on' or 'off' service_location_id : int actuator_id : int duration : int, optional 300,900,1800 or 3600 , specifying the time in seconds the actuator should be turned on. Any other value results in turning on for an undetermined period of time. Returns ------- requests.Response """
url = urljoin(URLS['servicelocation'], service_location_id, "actuator", actuator_id, on_off) headers = {"Authorization": "Bearer {}".format(self.access_token)} if duration is not None: data = {"duration": duration} else: data = {} r = requests.post(url, headers=headers, json=data) r.raise_for_status() return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _to_milliseconds(self, time): """ Converts a datetime-like object to epoch, in milliseconds Timezone-naive datetime objects are assumed to be in UTC Parameters time : dt.datetime | pd.Timestamp | int Returns ------- int epoch milliseconds """
if isinstance(time, dt.datetime): if time.tzinfo is None: time = time.replace(tzinfo=pytz.UTC) return int(time.timestamp() * 1e3) elif isinstance(time, numbers.Number): return time else: raise NotImplementedError("Time format not supported. Use milliseconds since epoch,\ Datetime or Pandas Datetime")
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def _basic_post(self, url, data=None): """ Because basically every post request is the same Parameters url : str data : str, optional Returns ------- requests.Response """
_url = urljoin(self.base_url, url) r = self.session.post(_url, data=data, headers=self.headers, timeout=5) r.raise_for_status() return r
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def active_power(self): """ Takes the sum of all instantaneous active power values Returns them in kWh Returns ------- float """
inst = self.load_instantaneous() values = [float(i['value']) for i in inst if i['key'].endswith('ActivePower')] return sum(values) / 1000
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def active_cosfi(self): """ Takes the average of all instantaneous cosfi values Returns ------- float """
inst = self.load_instantaneous() values = [float(i['value']) for i in inst if i['key'].endswith('Cosfi')] return sum(values) / len(values)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getInterfaceInAllSpeeds(interface, endpoint_list, class_descriptor_list=()): """ Produce similar fs, hs and ss interface and endpoints descriptors. Should be useful for devices desiring to work in all 3 speeds with maximum endpoint wMaxPacketSize. Reduces data duplication from descriptor declarations. Not intended to cover fancy combinations. interface (dict): Keyword arguments for in all speeds. bNumEndpoints must not be provided. endpoint_list (list of dicts) Each dict represents an endpoint, and may contain the following items: - "endpoint": required, contains keyword arguments for or The with-audio variant is picked when its extra fields are assigned a value. wMaxPacketSize may be missing, in which case it will be set to the maximum size for given speed and endpoint type. bmAttributes must be provided. If bEndpointAddress is zero (excluding direction bit) on the first endpoint, endpoints will be assigned their rank in this list, starting at 1. Their direction bit is preserved. If bInterval is present on a INT or ISO endpoint, it must be in millisecond units (but may not be an integer), and will be converted to the nearest integer millisecond for full-speed descriptor, and nearest possible interval for high- and super-speed descriptors. If bInterval is present on a BULK endpoint, it is set to zero on full-speed descriptor and used as provided on high- and super-speed descriptors. - "superspeed": optional, contains keyword arguments for - "superspeed_iso": optional, contains keyword arguments for Must be provided and non-empty only when endpoint is isochronous and "superspeed" dict has "bmAttributes" bit 7 set. class_descriptor (list of descriptors of any type) Descriptors to insert in all speeds between the interface descriptor and endpoint descriptors. Returns a 3-tuple of lists: - fs descriptors - hs descriptors - ss descriptors """
interface = getDescriptor( USBInterfaceDescriptor, bNumEndpoints=len(endpoint_list), **interface ) class_descriptor_list = list(class_descriptor_list) fs_list = [interface] + class_descriptor_list hs_list = [interface] + class_descriptor_list ss_list = [interface] + class_descriptor_list need_address = ( endpoint_list[0]['endpoint'].get( 'bEndpointAddress', 0, ) & ~ch9.USB_DIR_IN == 0 ) for index, endpoint in enumerate(endpoint_list, 1): endpoint_kw = endpoint['endpoint'].copy() transfer_type = endpoint_kw[ 'bmAttributes' ] & ch9.USB_ENDPOINT_XFERTYPE_MASK fs_max, hs_max, ss_max = _MAX_PACKET_SIZE_DICT[transfer_type] if need_address: endpoint_kw['bEndpointAddress'] = index | ( endpoint_kw.get('bEndpointAddress', 0) & ch9.USB_DIR_IN ) klass = ( USBEndpointDescriptor if 'bRefresh' in endpoint_kw or 'bSynchAddress' in endpoint_kw else USBEndpointDescriptorNoAudio ) interval = endpoint_kw.pop('bInterval', _MARKER) if interval is _MARKER: fs_interval = hs_interval = 0 else: if transfer_type == ch9.USB_ENDPOINT_XFER_BULK: fs_interval = 0 hs_interval = interval else: # USB_ENDPOINT_XFER_ISOC or USB_ENDPOINT_XFER_INT fs_interval = max(1, min(255, round(interval))) # 8 is the number of microframes in a millisecond hs_interval = max( 1, min(16, int(round(1 + math.log(interval * 8, 2)))), ) packet_size = endpoint_kw.pop('wMaxPacketSize', _MARKER) if packet_size is _MARKER: fs_packet_size = fs_max hs_packet_size = hs_max ss_packet_size = ss_max else: fs_packet_size = min(fs_max, packet_size) hs_packet_size = min(hs_max, packet_size) ss_packet_size = min(ss_max, packet_size) fs_list.append(getDescriptor( klass, wMaxPacketSize=fs_max, bInterval=fs_interval, **endpoint_kw )) hs_list.append(getDescriptor( klass, wMaxPacketSize=hs_max, bInterval=hs_interval, **endpoint_kw )) ss_list.append(getDescriptor( klass, wMaxPacketSize=ss_max, bInterval=hs_interval, **endpoint_kw )) ss_companion_kw = endpoint.get('superspeed', _EMPTY_DICT) ss_list.append(getDescriptor( USBSSEPCompDescriptor, **ss_companion_kw )) ssp_iso_kw = endpoint.get('superspeed_iso', _EMPTY_DICT) if bool(ssp_iso_kw) != ( endpoint_kw.get('bmAttributes', 0) & ch9.USB_ENDPOINT_XFERTYPE_MASK == ch9.USB_ENDPOINT_XFER_ISOC and bool(ch9.USB_SS_SSP_ISOC_COMP( ss_companion_kw.get('bmAttributes', 0), )) ): raise ValueError('Inconsistent isochronous companion') if ssp_iso_kw: ss_list.append(getDescriptor( USBSSPIsocEndpointDescriptor, **ssp_iso_kw )) return (fs_list, hs_list, ss_list)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getDescriptor(klass, **kw): """ Automatically fills bLength and bDescriptorType. """
# XXX: ctypes Structure.__init__ ignores arguments which do not exist # as structure fields. So check it. # This is annoying, but not doing it is a huge waste of time for the # developer. empty = klass() assert hasattr(empty, 'bLength') assert hasattr(empty, 'bDescriptorType') unknown = [x for x in kw if not hasattr(empty, x)] if unknown: raise TypeError('Unknown fields %r' % (unknown, )) # XXX: not very pythonic... return klass( bLength=ctypes.sizeof(klass), # pylint: disable=protected-access bDescriptorType=klass._bDescriptorType, # pylint: enable=protected-access **kw )
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def getRealInterfaceNumber(self, interface): """ Returns the host-visible interface number, or None if there is no such interface. """
try: return self._ioctl(INTERFACE_REVMAP, interface) except IOError as exc: if exc.errno == errno.EDOM: return None raise
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def close(self): """ Close all endpoint file descriptors. """
ep_list = self._ep_list while ep_list: ep_list.pop().close() self._closed = True
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onSetup(self, request_type, request, value, index, length): """ Called when a setup USB transaction was received. Default implementation: - handles USB_REQ_GET_STATUS on interface and endpoints - handles USB_REQ_CLEAR_FEATURE(USB_ENDPOINT_HALT) on endpoints - handles USB_REQ_SET_FEATURE(USB_ENDPOINT_HALT) on endpoints - halts on everything else If this method raises anything, endpoint 0 is halted by its caller and exception is let through. May be overridden in subclass. """
if (request_type & ch9.USB_TYPE_MASK) == ch9.USB_TYPE_STANDARD: recipient = request_type & ch9.USB_RECIP_MASK is_in = (request_type & ch9.USB_DIR_IN) == ch9.USB_DIR_IN if request == ch9.USB_REQ_GET_STATUS: if is_in and length == 2: if recipient == ch9.USB_RECIP_INTERFACE: if value == 0: status = 0 if index == 0: if self.function_remote_wakeup_capable: status |= 1 << 0 if self.function_remote_wakeup: status |= 1 << 1 self.ep0.write(struct.pack('<H', status)[:length]) return elif recipient == ch9.USB_RECIP_ENDPOINT: if value == 0: try: endpoint = self.getEndpoint(index) except IndexError: pass else: status = 0 if endpoint.isHalted(): status |= 1 << 0 self.ep0.write( struct.pack('<H', status)[:length], ) return elif request == ch9.USB_REQ_CLEAR_FEATURE: if not is_in and length == 0: if recipient == ch9.USB_RECIP_ENDPOINT: if value == ch9.USB_ENDPOINT_HALT: try: endpoint = self.getEndpoint(index) except IndexError: pass else: endpoint.clearHalt() self.ep0.read(0) return elif recipient == ch9.USB_RECIP_INTERFACE: if value == ch9.USB_INTRF_FUNC_SUSPEND: if self.function_remote_wakeup_capable: self.disableRemoteWakeup() self.ep0.read(0) return elif request == ch9.USB_REQ_SET_FEATURE: if not is_in and length == 0: if recipient == ch9.USB_RECIP_ENDPOINT: if value == ch9.USB_ENDPOINT_HALT: try: endpoint = self.getEndpoint(index) except IndexError: pass else: endpoint.halt() self.ep0.read(0) return elif recipient == ch9.USB_RECIP_INTERFACE: if value == ch9.USB_INTRF_FUNC_SUSPEND: if self.function_remote_wakeup_capable: self.enableRemoteWakeup() self.ep0.read(0) return self.ep0.halt(request_type)
<SYSTEM_TASK:> Solve the following problem using Python, implementing the functions described below, one line at a time <END_TASK> <USER_TASK:> Description: def onEnable(self): """ The configuration containing this function has been enabled by host. Endpoints become working files, so submit some read operations. """
trace('onEnable') self._disable() self._aio_context.submit(self._aio_recv_block_list) self._real_onCanSend() self._enabled = True