text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Turn off the alsa_sink sink. <END_TASK> <USER_TASK:> Description: def off(self): """Turn off the alsa_sink sink. This disconnects the sink from the relevant session events. """
spotifyconnect._session_instance.player.off( spotifyconnect.PlayerEvent.MUSIC_DELIVERY, self._on_music_delivery) assert spotifyconnect._session_instance.player.num_listeners( spotifyconnect.PlayerEvent.MUSIC_DELIVERY) == 0 self._close()
<SYSTEM_TASK:> Patches a given API function to not send. <END_TASK> <USER_TASK:> Description: def patched(f): """Patches a given API function to not send."""
def wrapped(*args, **kwargs): kwargs['return_response'] = False kwargs['prefetch'] = True return f(*args, **kwargs) return wrapped
<SYSTEM_TASK:> Method output debug message into stderr <END_TASK> <USER_TASK:> Description: def _print_debug(self, *args): """Method output debug message into stderr :param args: Message(s) to output :rtype args: string """
if self.debuglevel > 1: print(datetime.datetime.now().time(), *args, file=sys.stderr) else: print(*args, file=sys.stderr)
<SYSTEM_TASK:> Parse provided hostname and extract port number <END_TASK> <USER_TASK:> Description: def _parse_host(cls, host='localhost', port=0): """ Parse provided hostname and extract port number :param host: Server hostname :type host: string :param port: Server port :return: Tuple of (host, port) :rtype: tuple """
if not port and (host.find(':') == host.rfind(':')): i = host.rfind(':') if i >= 0: host, port = host[:i], host[i + 1:] try: port = int(port) except ValueError: raise OSError('nonnumeric port') return host, port
<SYSTEM_TASK:> Connect to a host on a given port via proxy server <END_TASK> <USER_TASK:> Description: def connect_proxy(self, proxy_host='localhost', proxy_port=0, proxy_type=socks.HTTP, host='localhost', port=0): """Connect to a host on a given port via proxy server If the hostname ends with a colon (`:') followed by a number, and there is no port specified, that suffix will be stripped off and the number interpreted as the port number to use. Note: This method is automatically invoked by __init__, if a host and proxy server are specified during instantiation. :param proxy_host: Hostname of proxy server :type proxy_host: string :param proxy_port: Port of proxy server, by default port for specified proxy type is used :type proxy_port: int :param proxy_type: Proxy type to use (see socks.PROXY_TYPES for details) :type proxy_type: int :param host: Hostname of SMTP server :type host: string :param port: Port of SMTP server, by default smtplib.SMTP_PORT is used :type port: int :return: Tuple of (code, msg) :rtype: tuple """
if proxy_type not in socks.DEFAULT_PORTS.keys(): raise NotSupportedProxyType (proxy_host, proxy_port) = self._parse_host(host=proxy_host, port=proxy_port) if not proxy_port: proxy_port = socks.DEFAULT_PORTS[proxy_type] (host, port) = self._parse_host(host=host, port=port) if self.debuglevel > 0: self._print_debug('connect: via proxy', proxy_host, proxy_port) s = socks.socksocket() s.set_proxy(proxy_type=proxy_type, addr=proxy_host, port=proxy_port) s.settimeout(self.timeout) if self.source_address is not None: s.bind(self.source_address) s.connect((host, port)) # todo # Send CRLF in order to get first response from destination server. # Probably it's needed only for HTTP proxies. Further investigation required. s.sendall(bCRLF) self.sock = s (code, msg) = self.getreply() if self.debuglevel > 0: self._print_debug('connect:', repr(msg)) return code, msg
<SYSTEM_TASK:> read or write the child sub-reaper flag of the current process <END_TASK> <USER_TASK:> Description: def enabled(self): """ read or write the child sub-reaper flag of the current process This property behaves in the following manner: * If a read is attempted and a prior read or write has determined that this feature is unavailable (status is equal to ``SR_UNSUPPORTED``) then no further attempts are made and the outcome is ``False``. * If a read is attempted and the current status is ``SR_UNKNOWN`` then a ``prctl(PR_GET_CHILD_SUBREAPER, ...)`` call is made and the outcome depends on the returned value. If prctl fails then status is set to ``SR_UNSUPPORTED`` and the return value is ``False``. If the prctl call succeeds then status is set to either ``SR_ENABLED`` or ``SR_DISABLED`` and ``True`` or ``False`` is returned, respectively. * If a write is attempted and a prior read or write has determined that this feature is unavailable (status is equal to ``SR_UNSUPPORTED``) *and* the write would have enabled the flag, a ValueError is raised with an appropriate message. Otherwise a write is attempted. If the attempt to enable the flag fails a ValueError is raised, just as in the previous case. * If a write intending to disable the flag fails then this failure is silently ignored but status is set to ``SR_UNSUPPORTED``. * If a write succeeds then the status is set accordingly to ``SR_ENABLED`` or ``SR_DISABLED``, depending on the value written ``True`` or ``False`` respectively. In other words, this property behaves as if it was really calling prctl() but it is not going to repeat operations that will always fail. Nor will it ignore failures silently where that matters. """
if self._status == self.SR_UNSUPPORTED: return False status = c_int() try: prctl(PR_GET_CHILD_SUBREAPER, addressof(status), 0, 0, 0) except OSError: self._status = self.SR_UNSUPPORTED else: self._status = self.SR_ENABLED if status else self.SR_DISABLED return self._status == self.SR_ENABLED
<SYSTEM_TASK:> Returns the rotated minimum size. <END_TASK> <USER_TASK:> Description: def get_minimum_size(self, data): """Returns the rotated minimum size."""
size = self.element.get_minimum_size(data) if self.angle in (RotateLM.NORMAL, RotateLM.UPSIDE_DOWN): return size else: return datatypes.Point(size.y, size.x)
<SYSTEM_TASK:> Calculates the rotated minimum size from the given base minimum <END_TASK> <USER_TASK:> Description: def _calculate_ms_from_base(self, size): """Calculates the rotated minimum size from the given base minimum size."""
hw = size.x * 0.5 hh = size.y * 0.5 a = datatypes.Point(hw, hh).get_rotated(self.angle) b = datatypes.Point(-hw, hh).get_rotated(self.angle) c = datatypes.Point(hw, -hh).get_rotated(self.angle) d = datatypes.Point(-hw, -hh).get_rotated(self.angle) minp = a.get_minimum(b).get_minimum(c).get_minimum(d) maxp = a.get_maximum(b).get_maximum(c).get_maximum(d) return maxp - minp
<SYSTEM_TASK:> Get the value by name <END_TASK> <USER_TASK:> Description: def get(self, name): """Get the value by name """
# check read permission sm = getSecurityManager() permission = permissions.View if not sm.checkPermission(permission, self.context): raise Unauthorized("Not allowed to view the Plone portal") # read the attribute attr = getattr(self.context, name, None) if callable(attr): return attr() # XXX no really nice, but we want the portal to behave like an ordinary # content type. Therefore we need to inject the neccessary data. if name == "uid": return "0" if name == "path": return "/%s" % self.context.getId() return attr
<SYSTEM_TASK:> Set the attribute to the given value. <END_TASK> <USER_TASK:> Description: def set(self, name, value, **kw): """Set the attribute to the given value. The keyword arguments represent the other attribute values to integrate constraints to other values. """
# check write permission sm = getSecurityManager() permission = permissions.ManagePortal if not sm.checkPermission(permission, self.context): raise Unauthorized("Not allowed to modify the Plone portal") # set the attribute if not hasattr(self.context, name): return False self.context[name] = value return True
<SYSTEM_TASK:> Set the field to the given value. <END_TASK> <USER_TASK:> Description: def set(self, name, value, **kw): """Set the field to the given value. The keyword arguments represent the other field values to integrate constraints to other values. """
# fetch the field by name field = api.get_field(self.context, name) # bail out if we have no field if not field: return False # call the field adapter and set the value fieldmanager = IFieldManager(field) return fieldmanager.set(self.context, value, **kw)
<SYSTEM_TASK:> Check if the field is writeable <END_TASK> <USER_TASK:> Description: def can_write(self): """Check if the field is writeable """
sm = getSecurityManager() permission = permissions.ModifyPortalContent if not sm.checkPermission(permission, self.context): return False return True
<SYSTEM_TASK:> Check if the field is readable <END_TASK> <USER_TASK:> Description: def can_read(self): """Check if the field is readable """
sm = getSecurityManager() if not sm.checkPermission(permissions.View, self.context): return False return True
<SYSTEM_TASK:> This method will not be re-run always, only locally and when xsd <END_TASK> <USER_TASK:> Description: def send_s3_xsd(self, url_xsd): # pragma: no cover """This method will not be re-run always, only locally and when xsd are regenerated, read the test_008_force_s3_creation on test folder """
if self.check_s3(self.domain, urlparse(url_xsd).path[1:]): return url_xsd response = urllib2.urlopen(url_xsd) content = response.read() cached = NamedTemporaryFile(delete=False) named = cached.name # Find all urls in the main xslt file. urls = re.findall(r'href=[\'"]?([^\'" >]+)', content) # mapping in the main file the url's for original_url in urls: content = content.replace( original_url, self.s3_url(original_url)) with cached as cache: cache.write(content) created_url = self.cache_s3(url_xsd, named) print('Created Url Ok!: %s' % created_url) # Mapping all internal url in the file to s3 cached env. for original_url in urls: # Expecting 1 level of deepest links in xsd if more, refactor this. response = urllib2.urlopen(original_url) content = response.read() # Find all urls in the main xslt file. in_urls = re.findall(r'href=[\'"]?([^\'" >]+)', content) # mapping in the main file the url's for orig_url in in_urls: content = content.replace( orig_url, self.s3_url(orig_url)) cached = NamedTemporaryFile(delete=False) with cached as cache: cache.write(content) named = cached.name new_url = self.cache_s3(original_url, named) print('Created Url Ok!: %s' % new_url) return created_url
<SYSTEM_TASK:> This method is a helper con `cache_s3`. <END_TASK> <USER_TASK:> Description: def check_s3(self, bucket, element): # pragma: no cover """This method is a helper con `cache_s3`. Read method `cache_s3` for more information. :param bucket: :param element: :return: """
session = boto3.Session(profile_name=self.profile_name) s3 = session.resource('s3') try: s3.meta.client.head_bucket(Bucket=bucket) except ClientError: # If the bucket does not exists then simply use the original # I silently fail returning everything as it is in the url return False try: # If the key does not exists do not return False, but try to # create a readonly user in order to not have problems into the # travis environment. s3.Object(bucket, element).load() except ClientError: return False else: return True
<SYSTEM_TASK:> Take an url which deliver a plain document and convert it to a <END_TASK> <USER_TASK:> Description: def cache_it(self, url): """Take an url which deliver a plain document and convert it to a temporary file, this document is an xslt file expecting contains all xslt definitions, then the cache process is recursive. :param url: document origin url :type url: str :return file_path: local new absolute path :rtype file_path: str """
# TODO: Use directly the file object instead of the name of the file # with seek(0) cached = self._cache_it(url) if not isfile(cached.name): # If /tmp files are deleted self._cache_it.cache_clear() cached = self._cache_it(url) return cached.name
<SYSTEM_TASK:> Get the original chain given document path and xslt local path <END_TASK> <USER_TASK:> Description: def get_original(document, xslt): """Get the original chain given document path and xslt local path :param str document: local absolute path to document :param str xslt: local absolute path to xst file :return: new chain generated. :rtype: str """
dom = etree.parse(document) # TODO: cuando este probando - # fuente: # http://stackoverflow.com/questions/16698935/how-to-transform-an-xml-file-using-xslt-in-python xslt = etree.parse(xslt) transform = etree.XSLT(xslt) newdom = transform(dom) return newdom
<SYSTEM_TASK:> Close the internal signalfd file descriptor if it isn't closed <END_TASK> <USER_TASK:> Description: def close(self): """ Close the internal signalfd file descriptor if it isn't closed :raises OSError: If the underlying ``close(2)`` fails. The error message matches those found in the manual page. """
with self._close_lock: sfd = self._sfd if sfd >= 0: self._sfd = -1 self._signals = frozenset() close(sfd)
<SYSTEM_TASK:> Create a new signalfd object from a given file descriptor <END_TASK> <USER_TASK:> Description: def fromfd(cls, fd, signals): """ Create a new signalfd object from a given file descriptor :param fd: A pre-made file descriptor obtained from ``signalfd_create(2)` :param signals: A pre-made frozenset that describes the monitored signals :raises ValueError: If fd is not a valid file descriptor :returns: A new signalfd object .. note:: If the passed descriptor is incorrect then various methods will fail and raise OSError with an appropriate message. """
if fd < 0: _err_closed() self = cls.__new__() object.__init__(self) self._sfd = fd self._signals = signals return self
<SYSTEM_TASK:> Update the mask of signals this signalfd reacts to <END_TASK> <USER_TASK:> Description: def update(self, signals): """ Update the mask of signals this signalfd reacts to :param signals: A replacement set of signal numbers to monitor :raises ValueError: If :meth:`closed()` is True """
if self._sfd < 0: _err_closed() mask = sigset_t() sigemptyset(mask) if signals is not None: for signal in signals: sigaddset(mask, signal) # flags are ignored when sfd is not -1 _signalfd(self._sfd, mask, 0) self._signals = frozenset(signals)
<SYSTEM_TASK:> Read information about currently pending signals. <END_TASK> <USER_TASK:> Description: def read(self, maxsignals=None): """ Read information about currently pending signals. :param maxsignals: Maximum number of signals to read. By default this is the same as the number of signals registered with this signalfd. :returns: A list of signalfd_siginfo object with information about most recently read signals. This list may be empty (in non-blocking mode). :raises ValueError: If :meth:`closed()` is True Read up to maxsignals recent pending singals ouf of the set of signals being monitored by this signalfd. If there are no signals yet and SFD_NONBLOCK was not passed to flags in :meth:`__init__()` then this call blocks until such signal is ready. """
if maxsignals is None: maxsignals = len(self._signals) if maxsignals <= 0: raise ValueError("maxsignals must be greater than 0") info_list = (signalfd_siginfo * maxsignals)() num_read = read(self._sfd, byref(info_list), sizeof(info_list)) return info_list[:num_read // sizeof(signalfd_siginfo)]
<SYSTEM_TASK:> Adds a CURIE definition. <END_TASK> <USER_TASK:> Description: def add_curie(self, name, href): """Adds a CURIE definition. A CURIE link with the given ``name`` and ``href`` is added to the document. This method returns self, allowing it to be chained with additional method calls. """
self.draft.set_curie(self, name, href) return self
<SYSTEM_TASK:> Adds ``thing`` to links or embedded resources. <END_TASK> <USER_TASK:> Description: def _add_rel(self, key, rel, thing, wrap): """Adds ``thing`` to links or embedded resources. Calling code should not use this method directly and should use ``embed`` or ``add_link`` instead. """
self.o.setdefault(key, {}) if wrap: self.o[key].setdefault(rel, []) if rel not in self.o[key]: self.o[key][rel] = thing return existing = self.o[key].get(rel) if isinstance(existing, list): existing.append(thing) return self.o[key][rel] = [existing, thing]
<SYSTEM_TASK:> A simple about dialog box using the distribution data files. <END_TASK> <USER_TASK:> Description: def about_box(): """A simple about dialog box using the distribution data files."""
about_info = wx.adv.AboutDialogInfo() for k, v in metadata.items(): setattr(about_info, snake2ucamel(k), v) wx.adv.AboutBox(about_info)
<SYSTEM_TASK:> Sign the document with a third party signatory. <END_TASK> <USER_TASK:> Description: def sign(self, xml_doc): """Sign the document with a third party signatory. :param str xml_doc: Document self signed in plain xml :returns answer: Answer is given from the signatory itself if connected. """
try: self.client = Client(self.url) except ValueError as e: self.message = e.message except URLError: self.message = 'The url you provided: ' + \ '%s could not be reached' % self.url
<SYSTEM_TASK:> return the key from the request <END_TASK> <USER_TASK:> Description: def get(key, default=None): """ return the key from the request """
data = get_form() or get_query_string() return data.get(key, default)
<SYSTEM_TASK:> Check if the value is in TRUE_VALUES <END_TASK> <USER_TASK:> Description: def is_true(key, default=False): """ Check if the value is in TRUE_VALUES """
value = get(key, default) if isinstance(value, list): value = value[0] if isinstance(value, bool): return value if value is default: return default return value.lower() in TRUE_VALUES
<SYSTEM_TASK:> returns the 'sort_limit' from the request <END_TASK> <USER_TASK:> Description: def get_sort_limit(): """ returns the 'sort_limit' from the request """
limit = _.convert(get("sort_limit"), _.to_int) if (limit < 1): limit = None # catalog raises IndexError if limit < 1 return limit
<SYSTEM_TASK:> returns the 'sort_on' from the request <END_TASK> <USER_TASK:> Description: def get_sort_on(allowed_indexes=None): """ returns the 'sort_on' from the request """
sort_on = get("sort_on") if allowed_indexes and sort_on not in allowed_indexes: logger.warn("Index '{}' is not in allowed_indexes".format(sort_on)) return None return sort_on
<SYSTEM_TASK:> manipulate json data on the fly <END_TASK> <USER_TASK:> Description: def set_json_item(key, value): """ manipulate json data on the fly """
data = get_json() data[key] = value request = get_request() request["BODY"] = json.dumps(data)
<SYSTEM_TASK:> Returns a rounded rectangle wx.Region <END_TASK> <USER_TASK:> Description: def rounded_rectangle_region(width, height, radius): """ Returns a rounded rectangle wx.Region """
bmp = wx.Bitmap.FromRGBA(width, height) # Mask color is #000000 dc = wx.MemoryDC(bmp) dc.Brush = wx.Brush((255,) * 3) # Any non-black would do dc.DrawRoundedRectangle(0, 0, width, height, radius) dc.SelectObject(wx.NullBitmap) bmp.SetMaskColour((0,) * 3) return wx.Region(bmp)
<SYSTEM_TASK:> Parametrized decorator for calling a function after a time ``lag`` given <END_TASK> <USER_TASK:> Description: def call_after(lag): """ Parametrized decorator for calling a function after a time ``lag`` given in milliseconds. This cancels simultaneous calls. """
def decorator(func): @wraps(func) def wrapper(*args, **kwargs): wrapper.timer.cancel() # Debounce wrapper.timer = threading.Timer(lag, func, args=args, kwargs=kwargs) wrapper.timer.start() wrapper.timer = threading.Timer(0, lambda: None) # timer.cancel now exists return wrapper return decorator
<SYSTEM_TASK:> Starts watching the path and running the test jobs. <END_TASK> <USER_TASK:> Description: def start(self): """Starts watching the path and running the test jobs."""
assert not self.watching def selector(evt): if evt.is_directory: return False path = evt.path if path in self._last_fnames: # Detected a "killing cycle" return False for pattern in self.skip_pattern.split(";"): if fnmatch(path, pattern.strip()): return False return True def watchdog_handler(evt): wx.CallAfter(self._watchdog_handler, evt) # Force a first event self._watching = True self._last_fnames = [] self._evts = [None] self._run_subprocess() # Starts the watchdog observer from .watcher import watcher self._watcher = watcher(path=self.directory, selector=selector, handler=watchdog_handler) self._watcher.__enter__()
<SYSTEM_TASK:> Pop-up menu and wx.EVT_CLOSE closing event <END_TASK> <USER_TASK:> Description: def on_close(self, evt): """ Pop-up menu and wx.EVT_CLOSE closing event """
self.stop() # DoseWatcher if evt.EventObject is not self: # Avoid deadlocks self.Close() # wx.Frame evt.Skip()
<SYSTEM_TASK:> Decorator for ``Document`` methods that change the document. <END_TASK> <USER_TASK:> Description: def mutator(*cache_names): """Decorator for ``Document`` methods that change the document. This decorator ensures that the object's caches are kept in sync when changes are made. """
def deco(fn): @wraps(fn) def _fn(self, *args, **kwargs): try: return fn(self, *args, **kwargs) finally: for cache_name in cache_names: setattr(self, cache_name, None) return _fn return deco
<SYSTEM_TASK:> Returns the canonical key for the given ``key``. <END_TASK> <USER_TASK:> Description: def canonical_key(self, key): """Returns the canonical key for the given ``key``."""
if key.startswith('/'): return urlparse.urljoin(self.base_uri, key) else: return self.curies.expand(key)
<SYSTEM_TASK:> Returns the URL for the resource based on the ``self`` link. <END_TASK> <USER_TASK:> Description: def url(self): """Returns the URL for the resource based on the ``self`` link. This method returns the ``href`` of the document's ``self`` link if it has one, or ``None`` if the document lacks a ``self`` link, or the ``href`` of the document's first ``self`` link if it has more than one. """
if not 'self' in self.links: return None self_link = self.links['self'] if isinstance(self_link, list): for link in self_link: return link.url() return self_link.url()
<SYSTEM_TASK:> Set a property on the document. <END_TASK> <USER_TASK:> Description: def set_property(self, key, value): """Set a property on the document. Calling code should use this method to add and modify properties on the document instead of modifying ``properties`` directly. If ``key`` is ``"_links"`` or ``"_embedded"`` this method will silently fail. If there is no property with the name in ``key``, a new property is created with the name from ``key`` and the value from ``value``. If the document already has a property with that name, it's value is replaced with the value in ``value``. """
if key in self.RESERVED_ATTRIBUTE_NAMES: return self.o[key] = value
<SYSTEM_TASK:> Remove a property from the document. <END_TASK> <USER_TASK:> Description: def delete_property(self, key): """Remove a property from the document. Calling code should use this method to remove properties on the document instead of modifying ``properties`` directly. If there is a property with the name in ``key``, it will be removed. Otherwise, a ``KeyError`` will be thrown. """
if key in self.RESERVED_ATTRIBUTE_NAMES: raise KeyError(key) del self.o[key]
<SYSTEM_TASK:> Deletes links from the document. <END_TASK> <USER_TASK:> Description: def delete_link(self, rel=None, href=lambda _: True): """Deletes links from the document. Calling code should use this method to remove links instead of modyfying ``links`` directly. The optional arguments, ``rel`` and ``href`` are used to select the links that will be deleted. If neither of the optional arguments are given, this method deletes every link in the document. If ``rel`` is given, only links for the matching link relationship type are deleted. If ``href`` is given, only links with a matching ``href`` are deleted. If both ``rel`` and ``href`` are given, only links with matching ``href`` for the matching link relationship type are delted. Arguments: - ``rel``: an optional string specifying the link relationship type of the links to be deleted. - ``href``: optionally, a string specifying the ``href`` of the links to be deleted, or a callable that returns true when its single argument is in the set of ``href``s to be deleted. """
if not LINKS_KEY in self.o: return links = self.o[LINKS_KEY] if rel is None: for rel in list(links.keys()): self.delete_link(rel, href) return if callable(href): href_filter = href else: href_filter = lambda x: x == href links_for_rel = links.setdefault(rel, []) if isinstance(links_for_rel, dict): links_for_rel = [links_for_rel] new_links_for_rel = [] for link in links_for_rel: if not href_filter(link['href']): new_links_for_rel.append(link) if new_links_for_rel: if len(new_links_for_rel) == 1: new_links_for_rel = new_links_for_rel[0] links[rel] = new_links_for_rel else: del links[rel] if not self.o[LINKS_KEY]: del self.o[LINKS_KEY]
<SYSTEM_TASK:> Returns a new ``Document`` based on a JSON object or array. <END_TASK> <USER_TASK:> Description: def from_object(cls, o, base_uri=None, parent_curies=None, draft=AUTO): """Returns a new ``Document`` based on a JSON object or array. Arguments: - ``o``: a dictionary holding the deserializated JSON for the new ``Document``, or a ``list`` of such documents. - ``base_uri``: optional URL used as the basis when expanding relative URLs in the document. - ``parent_curies``: optional ``CurieCollection`` instance holding the CURIEs of the parent document in which the new document is to be embedded. Calling code should not normally provide this argument. - ``draft``: a ``Draft`` instance that selects the version of the spec to which the document should conform. Defaults to ``drafts.AUTO``. """
if isinstance(o, list): return [cls.from_object(x, base_uri, parent_curies, draft) for x in o] return cls(o, base_uri, parent_curies, draft)
<SYSTEM_TASK:> Returns an empty ``Document``. <END_TASK> <USER_TASK:> Description: def empty(cls, base_uri=None, draft=AUTO): """Returns an empty ``Document``. Arguments: - ``base_uri``: optional URL used as the basis when expanding relative URLs in the document. - ``draft``: a ``Draft`` instance that selects the version of the spec to which the document should conform. Defaults to ``drafts.AUTO``. """
return cls.from_object({}, base_uri=base_uri, draft=draft)
<SYSTEM_TASK:> Removes an embedded resource from this document. <END_TASK> <USER_TASK:> Description: def delete_embedded(self, rel=None, href=lambda _: True): """Removes an embedded resource from this document. Calling code should use this method to remove embedded resources instead of modifying ``embedded`` directly. The optional arguments, ``rel`` and ``href`` are used to select the embedded resources that will be removed. If neither of the optional arguments are given, this method removes every embedded resource from this document. If ``rel`` is given, only embedded resources for the matching link relationship type are removed. If ``href`` is given, only embedded resources with a ``self`` link matching ``href`` are deleted. If both ``rel`` and ``href`` are given, only embedded resources with matching ``self`` link for the matching link relationship type are removed. Arguments: - ``rel``: an optional string specifying the link relationship type of the embedded resources to be removed. - ``href``: optionally, a string specifying the ``href`` of the ``self`` links of the resources to be removed, or a callable that returns true when its single argument matches the ``href`` of the ``self`` link of one of the resources to be removed. """
if EMBEDDED_KEY not in self.o: return if rel is None: for rel in list(self.o[EMBEDDED_KEY].keys()): self.delete_embedded(rel, href) return if rel not in self.o[EMBEDDED_KEY]: return if callable(href): url_filter = href else: url_filter = lambda x: x == href rel_embeds = self.o[EMBEDDED_KEY][rel] if isinstance(rel_embeds, dict): del self.o[EMBEDDED_KEY][rel] if not self.o[EMBEDDED_KEY]: del self.o[EMBEDDED_KEY] return new_rel_embeds = [] for embedded in list(rel_embeds): embedded_doc = Document(embedded, self.base_uri) if not url_filter(embedded_doc.url()): new_rel_embeds.append(embedded) if not new_rel_embeds: del self.o[EMBEDDED_KEY][rel] elif len(new_rel_embeds) == 1: self.o[EMBEDDED_KEY][rel] = new_rel_embeds[0] else: self.o[EMBEDDED_KEY][rel] = new_rel_embeds if not self.o[EMBEDDED_KEY]: del self.o[EMBEDDED_KEY]
<SYSTEM_TASK:> Removes a CURIE. <END_TASK> <USER_TASK:> Description: def drop_curie(self, name): """Removes a CURIE. The CURIE link with the given name is removed from the document. """
curies = self.o[LINKS_KEY][self.draft.curies_rel] if isinstance(curies, dict) and curies['name'] == name: del self.o[LINKS_KEY][self.draft.curies_rel] return for i, curie in enumerate(curies): if curie['name'] == name: del curies[i] break continue
<SYSTEM_TASK:> Return a new cigar with cigar string where the first `n_seq_bases` are <END_TASK> <USER_TASK:> Description: def mask_left(self, n_seq_bases, mask="S"): """ Return a new cigar with cigar string where the first `n_seq_bases` are soft-masked unless they are already hard-masked. """
cigs = list(self.items()) new_cigs = [] c, cum_len = self.cigar, 0 for i, (l, op) in enumerate(cigs): if op in Cigar.read_consuming_ops: cum_len += l if op == "H": cum_len += l new_cigs.append(cigs[i]) elif cum_len < n_seq_bases: new_cigs.append(cigs[i]) else: # the current cigar element is split by the masking. right_extra = cum_len - n_seq_bases new_cigs.append((l - right_extra, 'S')) if right_extra != 0: new_cigs.append((right_extra, cigs[i][1])) if cum_len >= n_seq_bases: break else: pass new_cigs[:i] = [(l, op if op in "HS" else "S") for l, op in new_cigs[:i]] new_cigs.extend(cigs[i + 1:]) return Cigar(Cigar.string_from_elements(new_cigs)).merge_like_ops()
<SYSTEM_TASK:> Return a new cigar with cigar string where the last `n_seq_bases` are <END_TASK> <USER_TASK:> Description: def mask_right(self, n_seq_bases, mask="S"): """ Return a new cigar with cigar string where the last `n_seq_bases` are soft-masked unless they are already hard-masked. """
return Cigar(Cigar(self._reverse_cigar()).mask_left(n_seq_bases, mask)._reverse_cigar())
<SYSTEM_TASK:> Internal test job runner context manager. <END_TASK> <USER_TASK:> Description: def runner(test_command, work_dir=None): """ Internal test job runner context manager. Run the test_command in a subprocess and instantiates 2 FlushStreamThread, one for each standard stream (output/stdout and error/stderr). It yields the subprocess.Popen instance that was spawned to run the given test command in the given working directory. Leaving the context manager kills the process and joins the flushing threads. Use the ``process.wait`` method to avoid that. """
process = subprocess.Popen(test_command, bufsize=0, shell=True, cwd=work_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) with flush_stream_threads(process): try: yield process finally: if process.poll() is None: process.terminate()
<SYSTEM_TASK:> Terminate the test job. <END_TASK> <USER_TASK:> Description: def kill(self, sig=signal.SIGTERM): """ Terminate the test job. Kill the subprocess if it was spawned, abort the spawning process otherwise. This information can be collected afterwards by reading the self.killed and self.spawned flags. Also join the 3 related threads to the caller thread. This can be safely called from any thread. This method behaves as self.join() when the thread isn't alive, i.e., it doesn't raise an exception. The ``sig`` parameter should be either: * ``signal.SIGKILL`` (``9``), on Linux or OSX; * ``signal.SIGTERM`` (``15``), the default value. On Windows this one calls the ``TerminateProcess`` Win32 API function. """
while self.is_alive(): self.killed = True time.sleep(POLLING_DELAY) # "Was a process spawned?" polling if not self.spawned: continue # Either self.run returns or runner yields if self.process.poll() is None: # It's running self.process.send_signal(sig) try: # There's no os.WNOHANG in Windows os.waitpid(self.process.pid, getattr(os, "WNOHANG", 1)) except OSError: # Ignore "No child processes" error pass break # We already either killed or finished it self.join()
<SYSTEM_TASK:> Create a load function based on schema dict and Validator class. <END_TASK> <USER_TASK:> Description: def loader(schema, validator=CerberusValidator, update=None): """Create a load function based on schema dict and Validator class. :param schema: a Cerberus schema dict. :param validator: the validator class which must be a subclass of more.cerberus.CerberusValidator which is the default. :param update: will pass the update flag to the validator, when ``True`` the ``required`` rules will not be checked. By default it will be set for PUT and PATCH requests to ``True`` and for other requests to ``False``. You can plug this ``load`` function into a json view. Returns a ``load`` function that takes a request JSON body and uses the schema to validate it. This function raises :class:`more.cerberus.ValidationError` if validation is not successful. """
if not issubclass(validator, CerberusValidator): raise TypeError( "Validator must be a subclass of more.cerberus.CerberusValidator" ) return partial(load, schema, validator, update)
<SYSTEM_TASK:> Draws the signature mark. <END_TASK> <USER_TASK:> Description: def render(self, rectangle, data): """Draws the signature mark. Note that this draws OUTSIDE the rectangle we're given. If cropping is involved, then this obviously won't work."""
size = (1.0 - 2.0*self.margin) * rectangle.h offset = self.margin * rectangle.h per_mark = 1.0 / float(self.total) bottom = offset + size * float(self.index) * per_mark top = bottom + per_mark * size c = data['output'] with c: c.translate(rectangle.x, rectangle.y) c.draw_polygon( 0, top, -self.width, bottom, self.width, bottom, fill=self.color )
<SYSTEM_TASK:> Supernodal multifrontal projected inverse. The routine computes the projected inverse <END_TASK> <USER_TASK:> Description: def projected_inverse(L): """ Supernodal multifrontal projected inverse. The routine computes the projected inverse .. math:: Y = P(L^{-T}L^{-1}) where :math:`L` is a Cholesky factor. On exit, the argument :math:`L` contains the projected inverse :math:`Y`. :param L: :py:class:`cspmatrix` (factor) """
assert isinstance(L, cspmatrix) and L.is_factor is True, "L must be a cspmatrix factor" n = L.symb.n snpost = L.symb.snpost snptr = L.symb.snptr chptr = L.symb.chptr chidx = L.symb.chidx relptr = L.symb.relptr relidx = L.symb.relidx blkptr = L.symb.blkptr blkval = L.blkval stack = [] for k in reversed(list(snpost)): nn = snptr[k+1]-snptr[k] # |Nk| na = relptr[k+1]-relptr[k] # |Ak| nj = na + nn # invert factor of D_{Nk,Nk} lapack.trtri(blkval, offsetA = blkptr[k], ldA = nj, n = nn) # zero-out strict upper triangular part of {Nj,Nj} block (just in case!) for i in range(1,nn): blas.scal(0.0, blkval, offset = blkptr[k] + nj*i, n = i) # compute inv(D_{Nk,Nk}) (store in 1,1 block of F) F = matrix(0.0, (nj,nj)) blas.syrk(blkval, F, trans = 'T', offsetA = blkptr[k], ldA = nj, n = nn, k = nn) # if supernode k is not a root node: if na > 0: # copy "update matrix" to 2,2 block of F Vk = stack.pop() lapack.lacpy(Vk, F, ldB = nj, offsetB = nn*nj+nn, m = na, n = na, uplo = 'L') # compute S_{Ak,Nk} = -Vk*L_{Ak,Nk}; store in 2,1 block of F blas.symm(Vk, blkval, F, m = na, n = nn, offsetB = blkptr[k]+nn,\ ldB = nj, offsetC = nn, ldC = nj, alpha = -1.0) # compute S_nn = inv(D_{Nk,Nk}) - S_{Ak,Nk}'*L_{Ak,Nk}; store in 1,1 block of F blas.gemm(F, blkval, F, transA = 'T', m = nn, n = nn, k = na,\ offsetA = nn, alpha = -1.0, beta = 1.0,\ offsetB = blkptr[k]+nn, ldB = nj) # extract update matrices if supernode k has any children for ii in range(chptr[k],chptr[k+1]): i = chidx[ii] stack.append(frontal_get_update(F, relidx, relptr, i)) # copy S_{Jk,Nk} (i.e., 1,1 and 2,1 blocks of F) to blkval lapack.lacpy(F, blkval, m = nj, n = nn, offsetB = blkptr[k], ldB = nj, uplo = 'L') L._is_factor = False return
<SYSTEM_TASK:> Append the parser arguments for the 'system' commands <END_TASK> <USER_TASK:> Description: def define_system_args(subparsers): """Append the parser arguments for the 'system' commands"""
system_parser = subparsers.add_parser("system", help='Available commands: \'info\'') system_subparsers = system_parser.add_subparsers(help='System commands') # system info arguments info_parser = system_subparsers.add_parser('info', help='Get system status information') info_parser.add_argument('--src', required=True, dest='src', metavar='src', help='The instance name of the target SDC (must match the name in sdc-hosts.yml)') info_parser.set_defaults(func=info_command)
<SYSTEM_TASK:> Symmetric permutation of a symmetric sparse matrix. <END_TASK> <USER_TASK:> Description: def perm(A, p): """ Symmetric permutation of a symmetric sparse matrix. :param A: :py:class:`spmatrix` :param p: :py:class:`matrix` or :class:`list` of length `A.size[0]` """
assert isinstance(A,spmatrix), "argument must be a sparse matrix" assert A.size[0] == A.size[1], "A must be a square matrix" assert A.size[0] == len(p), "length of p must be equal to the order of A" return A[p,p]
<SYSTEM_TASK:> Identifies the HAL draft level of a given JSON object. <END_TASK> <USER_TASK:> Description: def detect(self, obj): """Identifies the HAL draft level of a given JSON object."""
links = obj.get(LINKS_KEY, {}) for detector in [LATEST, DRAFT_3]: if detector.draft.curies_rel in links: return detector.detect(obj) return LATEST.detect(obj)
<SYSTEM_TASK:> converts an object to a list <END_TASK> <USER_TASK:> Description: def to_list(thing): """ converts an object to a list >>> to_list(1) [1] >>> to_list([1,2,3]) [1, 2, 3] >>> to_list(("a", "b", "c")) ['a', 'b', 'c'] >>> to_list(dict(a=1, b=2)) [{'a': 1, 'b': 2}] >>> to_list(None) [] >>> to_list("['a', 'b', 'c']") ['a', 'b', 'c'] >>> to_list("") [''] >>> to_list([]) [] >>> to_list("['[]']") ['[]'] >>> sorted(to_list(set(["a", "b", "c"]))) ['a', 'b', 'c'] """
if thing is None: return [] if isinstance(thing, set): return list(thing) if isinstance(thing, types.StringTypes): if thing.startswith("["): # handle a list inside a string coming from the batch navigation return ast.literal_eval(thing) if not (is_list(thing) or is_tuple(thing)): return [thing] return list(thing)
<SYSTEM_TASK:> Extracts a list of values from a collection of dictionaries <END_TASK> <USER_TASK:> Description: def pluck(col, key, default=None): """ Extracts a list of values from a collection of dictionaries >>> stooges = [{"name": "moe", "age": 40}, ... {"name": "larry", "age": 50}, ... {"name": "curly", "age": 60}] >>> pluck(stooges, "name") ['moe', 'larry', 'curly'] It only works with collections >>> curly = stooges.pop() >>> pluck(curly, "age") Traceback (most recent call last): ... RuntimeError: First argument must be a list or tuple """
if not (is_list(col) or is_tuple(col)): fail("First argument must be a list or tuple") def _block(dct): if not is_dict(dct): return [] return dct.get(key, default) return map(_block, col)
<SYSTEM_TASK:> Rename the keys of a dictionary with the given mapping <END_TASK> <USER_TASK:> Description: def rename(dct, mapping): """ Rename the keys of a dictionary with the given mapping >>> rename({"a": 1, "BBB": 2}, {"a": "AAA"}) {'AAA': 1, 'BBB': 2} """
def _block(memo, key): if key in dct: memo[mapping[key]] = dct[key] return memo else: return memo return reduce(_block, mapping, omit(dct, *mapping.keys()))
<SYSTEM_TASK:> Returns a collection of dictionaries with the keys renamed according to <END_TASK> <USER_TASK:> Description: def alias(col, mapping): """ Returns a collection of dictionaries with the keys renamed according to the mapping >>> libraries = [{"isbn": 1, "ed": 1}, {"isbn": 2, "ed": 2}] >>> alias(libraries, {"ed": "edition"}) [{'edition': 1, 'isbn': 1}, {'edition': 2, 'isbn': 2}] >>> alias({"a": 1}, {"a": "b"}) [{'b': 1}] """
if not is_list(col): col = [col] def _block(dct): return rename(dct, mapping) return map(_block, col)
<SYSTEM_TASK:> List STB in the network. <END_TASK> <USER_TASK:> Description: async def discover(ignore_list=[], max_wait=30, loop=None): """List STB in the network."""
stbs = [] try: async with timeout(max_wait, loop=loop): def responses_callback(notify): """Queue notify messages.""" _LOGGER.debug("Found: %s", notify.ip_address) stbs.append(notify.ip_address) mr_protocol = await install_mediaroom_protocol(responses_callback=responses_callback) await asyncio.sleep(max_wait) except asyncio.TimeoutError: mr_protocol.close() _LOGGER.debug("discover() timeout!") return list(set([stb for stb in stbs if stb not in ignore_list]))
<SYSTEM_TASK:> Send remote command to STB. <END_TASK> <USER_TASK:> Description: async def send_cmd(self, cmd, loop=None): """Send remote command to STB."""
_LOGGER.info("Send cmd = %s", cmd) if cmd not in COMMANDS and cmd not in range(0, 999): _LOGGER.error("Unknown command") raise PyMediaroomError("Unknown commands") keys = [] if cmd in range(0, 999): for character in str(cmd): keys.append(COMMANDS["Number"+str(character)]) if len(keys) < 3: keys.append(COMMANDS["OK"]) self.current_channel = cmd else: keys = [COMMANDS[cmd]] try: async with timeout(OPEN_CONTROL_TIMEOUT, loop=loop): async with self.lock: _LOGGER.debug("Connecting to %s:%s", self.stb_ip, REMOTE_CONTROL_PORT) stb_recv, stb_send = await asyncio.open_connection(self.stb_ip, REMOTE_CONTROL_PORT, loop=loop) await stb_recv.read(6) _LOGGER.info("Connected to %s:%s", self.stb_ip, REMOTE_CONTROL_PORT) for key in keys: _LOGGER.debug("%s key=%s", cmd, key) stb_send.write("key={}\n".format(key).encode('UTF-8')) _ = await stb_recv.read(3) await asyncio.sleep(0.300) except asyncio.TimeoutError as error: _LOGGER.warning(error) raise PyMediaroomError("Timeout connecting to {}".format(self.stb_ip)) except ConnectionRefusedError as error: _LOGGER.warning(error) raise PyMediaroomError("Connection refused to {}".format(self.stb_ip))
<SYSTEM_TASK:> get an index by name <END_TASK> <USER_TASK:> Description: def get_index(self, name): """get an index by name TODO: Combine indexes of relevant catalogs depending on the portal_type which is searched for. """
catalog = self.get_catalog() index = catalog._catalog.getIndex(name) logger.debug("get_index={} of catalog '{}' --> {}".format( name, catalog.__name__, index)) return index
<SYSTEM_TASK:> Convert the value for a given index <END_TASK> <USER_TASK:> Description: def to_index_value(self, value, index): """Convert the value for a given index """
# ZPublisher records can be passed to the catalog as is. if isinstance(value, HTTPRequest.record): return value if isinstance(index, basestring): index = self.get_index(index) if index.id == "portal_type": return filter(lambda x: x, _.to_list(value)) if index.meta_type == "DateIndex": return DateTime(value) if index.meta_type == "BooleanIndex": return bool(value) if index.meta_type == "KeywordIndex": return value.split(",") return value
<SYSTEM_TASK:> create a query suitable for the catalog <END_TASK> <USER_TASK:> Description: def make_query(self, **kw): """create a query suitable for the catalog """
query = kw.pop("query", {}) query.update(self.get_request_query()) query.update(self.get_custom_query()) query.update(self.get_keyword_query(**kw)) sort_on, sort_order = self.get_sort_spec() if sort_on and "sort_on" not in query: query.update({"sort_on": sort_on}) if sort_order and "sort_order" not in query: query.update({"sort_order": sort_order}) logger.info("make_query:: query={} | catalog={}".format( query, self.catalog)) return query
<SYSTEM_TASK:> Checks the request for known catalog indexes and converts the values <END_TASK> <USER_TASK:> Description: def get_request_query(self): """Checks the request for known catalog indexes and converts the values to fit the type of the catalog index. :param catalog: The catalog to build the query for :type catalog: ZCatalog :returns: Catalog query :rtype: dict """
query = {} # only known indexes get observed indexes = self.catalog.get_indexes() for index in indexes: # Check if the request contains a parameter named like the index value = req.get(index) # No value found, continue if value is None: continue # Convert the found value to format understandable by the index index_value = self.catalog.to_index_value(value, index) # Conversion returned None, continue if index_value is None: continue # Append the found value to the query query[index] = index_value return query
<SYSTEM_TASK:> Extracts custom query keys from the index. <END_TASK> <USER_TASK:> Description: def get_custom_query(self): """Extracts custom query keys from the index. Parameters which get extracted from the request: `q`: Passes the value to the `SearchableText` `path`: Creates a path query `recent_created`: Creates a date query `recent_modified`: Creates a date query :param catalog: The catalog to build the query for :type catalog: ZCatalog :returns: Catalog query :rtype: dict """
query = {} # searchable text queries q = req.get_query() if q: query["SearchableText"] = q # physical path queries path = req.get_path() if path: query["path"] = {'query': path, 'depth': req.get_depth()} # special handling for recent created/modified recent_created = req.get_recent_created() if recent_created: date = api.calculate_delta_date(recent_created) query["created"] = {'query': date, 'range': 'min'} recent_modified = req.get_recent_modified() if recent_modified: date = api.calculate_delta_date(recent_modified) query["modified"] = {'query': date, 'range': 'min'} return query
<SYSTEM_TASK:> Generates a query from the given keywords. <END_TASK> <USER_TASK:> Description: def get_keyword_query(self, **kw): """Generates a query from the given keywords. Only known indexes make it into the generated query. :returns: Catalog query :rtype: dict """
query = dict() # Only known indexes get observed indexes = self.catalog.get_indexes() # Handle additional keyword parameters for k, v in kw.iteritems(): # handle uid in keywords if k.lower() == "uid": k = "UID" # handle portal_type in keywords if k.lower() == "portal_type": if v: v = _.to_list(v) if k not in indexes: logger.warn("Skipping unknown keyword parameter '%s=%s'" % (k, v)) continue if v is None: logger.warn("Skip None value in kw parameter '%s=%s'" % (k, v)) continue logger.debug("Adding '%s=%s' to query" % (k, v)) query[k] = v return query
<SYSTEM_TASK:> r""" <END_TASK> <USER_TASK:> Description: def trsm(self,B,trans='N'): r""" Solves a triangular system of equations with multiple righthand sides. Computes .. math:: B &:= L^{-1} B \text{ if trans is 'N'} B &:= L^{-T} B \text{ if trans is 'T'} """
if trans=='N': cp.trsm(self._L0,B) pftrsm(self._V,self._L,self._B,B,trans='N') elif trans=='T': pftrsm(self._V,self._L,self._B,B,trans='T') cp.trsm(self._L0,B,trans='T') elif type(trans) is str: raise ValueError("trans must be 'N' or 'T'") else: raise TypeError("trans must be 'N' or 'T'") return
<SYSTEM_TASK:> Renders the given layout manager on a page of the given canvas. <END_TASK> <USER_TASK:> Description: def render_to_reportlab_canvas(rl_canvas, papersize_tuple, layout): """Renders the given layout manager on a page of the given canvas."""
rl_canvas.setPageSize(papersize_tuple) layout.render( Rectangle(0, 0, *papersize_tuple), dict(output=ReportlabOutput(rl_canvas)) )
<SYSTEM_TASK:> Attaches HTTP Basic Authentication to the given Request object. <END_TASK> <USER_TASK:> Description: def http_basic(r, username, password): """Attaches HTTP Basic Authentication to the given Request object. Arguments should be considered non-positional. """
username = str(username) password = str(password) auth_s = b64encode('%s:%s' % (username, password)) r.headers['Authorization'] = ('Basic %s' % auth_s) return r
<SYSTEM_TASK:> Attaches HTTP Digest Authentication to the given Request object. <END_TASK> <USER_TASK:> Description: def http_digest(r, username, password): """Attaches HTTP Digest Authentication to the given Request object. Arguments should be considered non-positional. """
def handle_401(r): """Takes the given response and tries digest-auth, if needed.""" s_auth = r.headers.get('www-authenticate', '') if 'digest' in s_auth.lower(): last_nonce = '' nonce_count = 0 chal = parse_dict_header(s_auth.replace('Digest ', '')) realm = chal['realm'] nonce = chal['nonce'] qop = chal.get('qop') algorithm = chal.get('algorithm', 'MD5') opaque = chal.get('opaque', None) algorithm = algorithm.upper() # lambdas assume digest modules are imported at the top level if algorithm == 'MD5': H = lambda x: hashlib.md5(x).hexdigest() elif algorithm == 'SHA': H = lambda x: hashlib.sha1(x).hexdigest() # XXX MD5-sess KD = lambda s, d: H("%s:%s" % (s, d)) if H is None: return None # XXX not implemented yet entdig = None p_parsed = urlparse(r.request.url) path = p_parsed.path + p_parsed.query A1 = "%s:%s:%s" % (username, realm, password) A2 = "%s:%s" % (r.request.method, path) if qop == 'auth': if nonce == last_nonce: nonce_count += 1 else: nonce_count = 1 last_nonce = nonce ncvalue = '%08x' % nonce_count cnonce = (hashlib.sha1("%s:%s:%s:%s" % ( nonce_count, nonce, time.ctime(), randombytes(8))) .hexdigest()[:16] ) noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2)) respdig = KD(H(A1), noncebit) elif qop is None: respdig = KD(H(A1), "%s:%s" % (nonce, H(A2))) else: # XXX handle auth-int. return None # XXX should the partial digests be encoded too? base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ 'response="%s"' % (username, realm, nonce, path, respdig) if opaque: base += ', opaque="%s"' % opaque if entdig: base += ', digest="%s"' % entdig base += ', algorithm="%s"' % algorithm if qop: base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) r.request.headers['Authorization'] = 'Digest %s' % (base) r.request.send(anyway=True) _r = r.request.response _r.history.append(r) return _r return r r.hooks['response'] = handle_401 return r
<SYSTEM_TASK:> Given an auth tuple, return an expanded version. <END_TASK> <USER_TASK:> Description: def dispatch(t): """Given an auth tuple, return an expanded version."""
if not t: return t else: t = list(t) # Make sure they're passing in something. assert len(t) >= 2 # If only two items are passed in, assume HTTPBasic. if (len(t) == 2): t.insert(0, 'basic') # Allow built-in string referenced auths. if isinstance(t[0], basestring): if t[0] in ('basic', 'forced_basic'): t[0] = http_basic elif t[0] in ('digest',): t[0] = http_digest # Return a custom callable. return (t[0], tuple(t[1:]))
<SYSTEM_TASK:> A simple HTML help dialog box using the distribution data files. <END_TASK> <USER_TASK:> Description: def help_box(): """A simple HTML help dialog box using the distribution data files."""
style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER dialog_box = wx.Dialog(None, wx.ID_ANY, HELP_TITLE, style=style, size=(620, 450)) html_widget = HtmlHelp(dialog_box, wx.ID_ANY) html_widget.page = build_help_html() dialog_box.ShowModal() dialog_box.Destroy()
<SYSTEM_TASK:> Callback function for the audio stream. Don't use directly. <END_TASK> <USER_TASK:> Description: def get_frame(self, outdata, frames, timedata, status): """ Callback function for the audio stream. Don't use directly. """
if not self.keep_listening: raise sd.CallbackStop try: chunk = self.queue.get_nowait() # Check if the chunk contains the expected number of frames # The callback function raises a ValueError otherwise. if chunk.shape[0] == frames: outdata[:] = chunk else: outdata.fill(0) except Empty: outdata.fill(0)
<SYSTEM_TASK:> Returns the component product of this vector and the given <END_TASK> <USER_TASK:> Description: def get_component_product(self, other): """Returns the component product of this vector and the given other vector."""
return Point(self.x * other.x, self.y * other.y)
<SYSTEM_TASK:> Returns a vector of unit length, unless it is the zero <END_TASK> <USER_TASK:> Description: def get_normalized(self): """Returns a vector of unit length, unless it is the zero vector, in which case it is left as is."""
magnitude = self.get_magnitude() if magnitude > 0: magnitude = 1.0 / magnitude return Point(self.x * magnitude, self.y * magnitude) else: return self
<SYSTEM_TASK:> Rotates this vector through the given anti-clockwise angle <END_TASK> <USER_TASK:> Description: def get_rotated(self, angle): """Rotates this vector through the given anti-clockwise angle in radians."""
ca = math.cos(angle) sa = math.sin(angle) return Point(self.x*ca-self.y*sa, self.x*sa+self.y*ca)
<SYSTEM_TASK:> Returns the magnitude of this vector. <END_TASK> <USER_TASK:> Description: def get_magnitude(self): """Returns the magnitude of this vector."""
return math.sqrt(self.x*self.x + self.y*self.y)
<SYSTEM_TASK:> Returns the square of the magnitude of this vector. <END_TASK> <USER_TASK:> Description: def get_magnitude_squared(self): """Returns the square of the magnitude of this vector."""
return self.x*self.x + self.y*self.y
<SYSTEM_TASK:> Returns the scalar product of this vector with the given <END_TASK> <USER_TASK:> Description: def get_scalar_product(self, other): """Returns the scalar product of this vector with the given other vector."""
return self.x*other.x+self.y*other.y
<SYSTEM_TASK:> Returns the smallest angle between this vector and the <END_TASK> <USER_TASK:> Description: def get_angle_between(self, other): """Returns the smallest angle between this vector and the given other vector."""
# The scalar product is the sum of the squares of the # magnitude times the cosine of the angle - so normalizing the # vectors first means the scalar product is just the cosine of # the angle. normself = self.get_normalized() normother = other.get_normalized() sp = normself.get_scalar_product(normother) return math.acos(sp)
<SYSTEM_TASK:> Updates this vector so its components are the lower of its <END_TASK> <USER_TASK:> Description: def get_minimum(self, other): """Updates this vector so its components are the lower of its current components and those of the given other value."""
return Point(min(self.x, other.x), min(self.y, other.y))
<SYSTEM_TASK:> Updates this vector so its components are the higher of its <END_TASK> <USER_TASK:> Description: def get_maximum(self, other): """Updates this vector so its components are the higher of its current components and those of the given other value."""
return Point(max(self.x, other.x), max(self.y, other.y))
<SYSTEM_TASK:> Returns a random vector in the given range. <END_TASK> <USER_TASK:> Description: def get_random(min_pt, max_pt): """Returns a random vector in the given range."""
result = Point(random.random(), random.random()) return result.get_component_product(max_pt - min_pt) + min_pt
<SYSTEM_TASK:> Generate a list of candidate temporary directories which <END_TASK> <USER_TASK:> Description: def _candidate_tempdir_list(): """Generate a list of candidate temporary directories which _get_default_tempdir will try."""
dirlist = [] # First, try the environment. for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = _os.getenv(envname) if dirname: dirlist.append(dirname) # Failing that, try OS-specific locations. if _os.name == 'nt': dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ]) else: dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ]) # As a last resort, the current directory. try: dirlist.append(_os.getcwd()) except (AttributeError, OSError): dirlist.append(_os.curdir) return dirlist
<SYSTEM_TASK:> Calculate the default directory to use for temporary files. <END_TASK> <USER_TASK:> Description: def _get_default_tempdir(): """Calculate the default directory to use for temporary files. This routine should be called exactly once. We determine whether or not a candidate temp dir is usable by trying to create and write to a file in that directory. If this is successful, the test file is deleted. To prevent denial of service, the name of the test file must be randomized."""
namer = _RandomNameSequence() dirlist = _candidate_tempdir_list() for dir in dirlist: if dir != _os.curdir: dir = _os.path.abspath(dir) # Try only a few names per directory. for seq in range(100): name = next(namer) filename = _os.path.join(dir, name) try: fd = _os.open(filename, _bin_openflags, 0o600) try: try: with _io.open(fd, 'wb', closefd=False) as fp: fp.write(b'blat') finally: _os.close(fd) finally: _os.unlink(filename) return dir except FileExistsError: pass except OSError: break # no point trying more names in this directory raise FileNotFoundError(_errno.ENOENT, "No usable temporary directory found in %s" % dirlist)
<SYSTEM_TASK:> Common setup sequence for all user-callable interfaces. <END_TASK> <USER_TASK:> Description: def _get_candidate_names(): """Common setup sequence for all user-callable interfaces."""
global _name_sequence if _name_sequence is None: _once_lock.acquire() try: if _name_sequence is None: _name_sequence = _RandomNameSequence() finally: _once_lock.release() return _name_sequence
<SYSTEM_TASK:> User-callable function to create and return a unique temporary <END_TASK> <USER_TASK:> Description: def mkdtemp(suffix="", prefix=template, dir=None): """User-callable function to create and return a unique temporary directory. The return value is the pathname of the directory. Arguments are as for mkstemp, except that the 'text' argument is not accepted. The directory is readable, writable, and searchable only by the creating user. Caller is responsible for deleting the directory when done with it. """
if dir is None: dir = gettempdir() names = _get_candidate_names() for seq in range(TMP_MAX): name = next(names) file = _os.path.join(dir, prefix + name + suffix) try: _os.mkdir(file, 0o700) return file except FileExistsError: continue # try again raise FileExistsError(_errno.EEXIST, "No usable temporary directory name found")
<SYSTEM_TASK:> Conditionally wrap function to ensure 5 input arguments <END_TASK> <USER_TASK:> Description: def _ensure_5args(func): """ Conditionally wrap function to ensure 5 input arguments Parameters ---------- func: callable with four or five positional arguments Returns ------- callable which possibly ignores 0 or 1 positional arguments """
if func is None: return None self_arg = 1 if inspect.ismethod(func) else 0 if len(inspect.getargspec(func)[0]) == 5 + self_arg: return func if len(inspect.getargspec(func)[0]) == 4 + self_arg: return lambda t, y, J, dfdt, fy=None: func(t, y, J, dfdt) else: raise ValueError("Incorrect numer of arguments")
<SYSTEM_TASK:> Shadow a module with an instance of LazyModule <END_TASK> <USER_TASK:> Description: def shadow_normal_module(cls, mod_name=None): """ Shadow a module with an instance of LazyModule :param mod_name: Name of the module to shadow. By default this is the module that is making the call into this method. This is not hard-coded as that module might be called '__main__' if it is executed via 'python -m' :returns: A fresh instance of :class:`LazyModule`. """
if mod_name is None: frame = inspect.currentframe() try: mod_name = frame.f_back.f_locals['__name__'] finally: del frame orig_mod = sys.modules[mod_name] lazy_mod = cls(orig_mod.__name__, orig_mod.__doc__, orig_mod) for attr in dir(orig_mod): setattr(lazy_mod, attr, getattr(orig_mod, attr)) sys.modules[mod_name] = lazy_mod return lazy_mod
<SYSTEM_TASK:> Load something lazily <END_TASK> <USER_TASK:> Description: def lazily(self, name, callable, args): """ Load something lazily """
self._lazy[name] = callable, args self._all.add(name)
<SYSTEM_TASK:> Load something immediately <END_TASK> <USER_TASK:> Description: def immediate(self, name, value): """ Load something immediately """
setattr(self, name, value) self._all.add(name)
<SYSTEM_TASK:> Parses the given text into one dimension and returns its <END_TASK> <USER_TASK:> Description: def parse_dimension(text): """ Parses the given text into one dimension and returns its equivalent size in points. The numbers provided can be integer or decimal, but exponents are not supported. The units may be inches (", in, ins, inch, inchs, inches), millimeters (mm), points (pt, pts, point, points, or nothing), or centimeters (cm). Because this module is intended for use with paper-size dimensions, no larger or smaller units are currently supported. """
size, unit = _split_dimension(text) factor = _unit_lookup[unit] return size*factor
<SYSTEM_TASK:> Parses a set of dimensions into tuple of values representing the <END_TASK> <USER_TASK:> Description: def parse_dimensions(text): """ Parses a set of dimensions into tuple of values representing the sizes in points. The dimensions that this method supports are exactly as for parse_dimension. It supports multiple dimensions, separated by whitespace, comma, semicolon, hyphen, or the letter x. The units may follow each dimension (in which case they can be different in each case) or only after the last one. Because no-units also means points, if you have some dimensions in the middle of the set with units and some without, then those without will be interpreted as points: i.e. the 2 in "1in, 2, 3mm" will be treated as 2 points, where as the 1 and 2 in "1, 2, 3mm" will be treated as millimeters. """
components = _dimension_separator.split(text) if len(components) == 0: raise DimensionError("No dimensions found in string.") # Split each component into size and units pairs = [] units = 0 for component in components: value, unit = _split_dimension(component) pairs.append((value, unit)) if unit is not None: units += 1 # Work out what to do with empty units if units == 1 and pairs[-1][1]: # We need to infer the units empty_unit = _unit_lookup[pairs[-1][1]] else: empty_unit = 1 # Compile and return the result result = [] for value, unit in pairs: if unit: result.append(value * _unit_lookup[unit]) else: result.append(value * empty_unit) return tuple(result)
<SYSTEM_TASK:> Returns the number and unit from the given piece of text as a pair. <END_TASK> <USER_TASK:> Description: def _split_dimension(text): """ Returns the number and unit from the given piece of text as a pair. >>> _split_dimension('1pt') (1, 'pt') >>> _split_dimension('1 pt') (1, 'pt') >>> _split_dimension('1 \tpt') (1, 'pt') >>> _split_dimension('1 \tpt ') (1, 'pt') >>> _split_dimension(' 1 \tpt ') (1, 'pt') >>> _split_dimension('3') (3, None) >>> _split_dimension('-12.43mm') (-12.43, 'mm') >>> _split_dimension('-12.43"') (-12.43, '"') """
match = _dimension_finder.match(text) if not match: raise DimensionError("Can't parse dimension '%s'." % text) number = match.group(1) unit = match.group(4) if '.' in number: return (float(number), unit) else: return (int(number), unit)
<SYSTEM_TASK:> Sets the audioqueue. <END_TASK> <USER_TASK:> Description: def queue(self, value): """ Sets the audioqueue. Parameters ---------- value : queue.Queue The buffer from which audioframes are received. """
if not isinstance(value, Queue): raise TypeError("queue is not a Queue object") self._queue = value
<SYSTEM_TASK:> Returns the minimum size of the managed element, as long as <END_TASK> <USER_TASK:> Description: def get_minimum_size(self, data): """Returns the minimum size of the managed element, as long as it is larger than any manually set minima."""
size = self.element.get_minimum_size(data) return datatypes.Point( max(size.x, self.min_width), max(size.y, self.min_height) )
<SYSTEM_TASK:> Draws the managed element in the correct alignment. <END_TASK> <USER_TASK:> Description: def render(self, rect, data): """Draws the managed element in the correct alignment."""
# We can't use our get minimum size, because that enforces # the size limits. size = self.element.get_minimum_size(data) # Assume we're bottom left at our natural size. x = rect.x y = rect.y w = size.x h = size.y extra_width = rect.w - w extra_height = rect.h - h if self.horizontal_align == AlignLM.ALIGN_CENTER: x += extra_width * 0.5 elif self.horizontal_align == AlignLM.ALIGN_RIGHT: x += extra_width elif self.horizontal_align == AlignLM.GROW_X: w = rect.w if self.vertical_align == AlignLM.ALIGN_MIDDLE: y += extra_height * 0.5 elif self.vertical_align == AlignLM.ALIGN_TOP: y += extra_height elif self.vertical_align == AlignLM.GROW_Y: h = rect.h self.element.render(datatypes.Rectangle(x, y, w, h), data)
<SYSTEM_TASK:> Return a copy of the graph G with all the data removed <END_TASK> <USER_TASK:> Description: def create_copy_without_data(G): """Return a copy of the graph G with all the data removed"""
H = nx.Graph() for i in H.nodes_iter(): H.node[i] = {} return H
<SYSTEM_TASK:> Store an object to file by pickling <END_TASK> <USER_TASK:> Description: def dump(stuff, filename, verbose=True, protocol=3): """Store an object to file by pickling Parameters ---------- stuff : object to be pickled filename : path verbose : bool protocol : 1,2,3 (default = 3) Protocol used by Pickler, higher number means more recent version of Python is needed to read the pickle produced. Default is protocol=3, which only works with Python 3+ Return ------ path Path where pickled object was saved. """
filename = os.path.normcase(filename) dir_path = os.path.dirname(filename) if not os.path.exists(dir_path): os.makedirs(dir_path) with open(filename, 'wb') as f: p = pickle.Pickler(f, protocol=protocol) p.dump(stuff) if verbose: print('Written {} items to pickled binary file: {}'.format(len(stuff), filename)) return filename
<SYSTEM_TASK:> Sign the given soap request with the given key <END_TASK> <USER_TASK:> Description: def sign_envelope(envelope, key_file): """Sign the given soap request with the given key"""
doc = etree.fromstring(envelope) body = get_body(doc) queue = SignQueue() queue.push_and_mark(body) security_node = ensure_security_header(doc, queue) security_token_node = create_binary_security_token(key_file) signature_node = Signature( xmlsec.TransformExclC14N, xmlsec.TransformRsaSha1) security_node.append(security_token_node) security_node.append(signature_node) queue.insert_references(signature_node) key_info = create_key_info_node(security_token_node) signature_node.append(key_info) # Sign the generated xml xmlsec.addIDs(doc, ['Id']) dsigCtx = xmlsec.DSigCtx() dsigCtx.signKey = xmlsec.Key.load(key_file, xmlsec.KeyDataFormatPem, None) dsigCtx.sign(signature_node) return etree.tostring(doc)