text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Require packages for this EntryPoint, then resolve it. <END_TASK> <USER_TASK:> Description: def load(self, require=True, *args, **kwargs): """ Require packages for this EntryPoint, then resolve it. """
if not require or args or kwargs: warnings.warn( "Parameters to load are deprecated. Call .resolve and " ".require separately.", PkgResourcesDeprecationWarning, stacklevel=2, ) if require: self.require(*args, **kwargs) return self.resolve()
<SYSTEM_TASK:> Resolve the entry point from its module and attrs. <END_TASK> <USER_TASK:> Description: def resolve(self): """ Resolve the entry point from its module and attrs. """
module = __import__(self.module_name, fromlist=['__name__'], level=0) try: return functools.reduce(getattr, self.attrs, module) except AttributeError as exc: raise ImportError(str(exc))
<SYSTEM_TASK:> Parse a single entry point from string `src` <END_TASK> <USER_TASK:> Description: def parse(cls, src, dist=None): """Parse a single entry point from string `src` Entry point syntax follows the form:: name = some.module:some.attr [extra1, extra2] The entry name and module name are required, but the ``:attrs`` and ``[extras]`` parts are optional """
m = cls.pattern.match(src) if not m: msg = "EntryPoint must be in 'name=module:attrs [extras]' format" raise ValueError(msg, src) res = m.groupdict() extras = cls._parse_extras(res['extras']) attrs = res['attr'].split('.') if res['attr'] else () return cls(res['name'], res['module'], attrs, extras, dist)
<SYSTEM_TASK:> Parse a map of entry point groups <END_TASK> <USER_TASK:> Description: def parse_map(cls, data, dist=None): """Parse a map of entry point groups"""
if isinstance(data, dict): data = data.items() else: data = split_sections(data) maps = {} for group, lines in data: if group is None: if not lines: continue raise ValueError("Entry points must be listed in groups") group = group.strip() if group in maps: raise ValueError("Duplicate group name", group) maps[group] = cls.parse_group(group, lines, dist) return maps
<SYSTEM_TASK:> Given a mapping of extras to dependencies, strip off <END_TASK> <USER_TASK:> Description: def _filter_extras(dm): """ Given a mapping of extras to dependencies, strip off environment markers and filter out any dependencies not matching the markers. """
for extra in list(filter(None, dm)): new_extra = extra reqs = dm.pop(extra) new_extra, _, marker = extra.partition(':') fails_marker = marker and ( invalid_marker(marker) or not evaluate_marker(marker) ) if fails_marker: reqs = [] new_extra = safe_extra(new_extra) or None dm.setdefault(new_extra, []).extend(reqs) return dm
<SYSTEM_TASK:> List of Requirements needed for this distro if `extras` are used <END_TASK> <USER_TASK:> Description: def requires(self, extras=()): """List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map deps = [] deps.extend(dm.get(None, ())) for ext in extras: try: deps.extend(dm[safe_extra(ext)]) except KeyError: raise UnknownExtra( "%s has no such extra feature %r" % (self, ext) ) return deps
<SYSTEM_TASK:> Return what this distribution's standard .egg filename should be <END_TASK> <USER_TASK:> Description: def egg_name(self): """Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % ( to_filename(self.project_name), to_filename(self.version), self.py_version or PY_MAJOR ) if self.platform: filename += '-' + self.platform return filename
<SYSTEM_TASK:> Return a ``Requirement`` that matches this distribution exactly <END_TASK> <USER_TASK:> Description: def as_requirement(self): """Return a ``Requirement`` that matches this distribution exactly"""
if isinstance(self.parsed_version, packaging.version.Version): spec = "%s==%s" % (self.project_name, self.parsed_version) else: spec = "%s===%s" % (self.project_name, self.parsed_version) return Requirement.parse(spec)
<SYSTEM_TASK:> Return the `name` entry point of `group` or raise ImportError <END_TASK> <USER_TASK:> Description: def load_entry_point(self, group, name): """Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group, name) if ep is None: raise ImportError("Entry point %r not found" % ((group, name),)) return ep.load()
<SYSTEM_TASK:> Return the entry point map for `group`, or the full entry map <END_TASK> <USER_TASK:> Description: def get_entry_map(self, group=None): """Return the entry point map for `group`, or the full entry map"""
try: ep_map = self._ep_map except AttributeError: ep_map = self._ep_map = EntryPoint.parse_map( self._get_metadata('entry_points.txt'), self ) if group is not None: return ep_map.get(group, {}) return ep_map
<SYSTEM_TASK:> Copy this distribution, substituting in any changed keyword args <END_TASK> <USER_TASK:> Description: def clone(self, **kw): """Copy this distribution, substituting in any changed keyword args"""
names = 'project_name version py_version platform location precedence' for attr in names.split(): kw.setdefault(attr, getattr(self, attr, None)) kw.setdefault('metadata', self._provider) return self.__class__(**kw)
<SYSTEM_TASK:> Recompute this distribution's dependencies. <END_TASK> <USER_TASK:> Description: def _compute_dependencies(self): """Recompute this distribution's dependencies."""
dm = self.__dep_map = {None: []} reqs = [] # Including any condition expressions for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: reqs.extend(parse_requirements(req)) def reqs_for_extra(extra): for req in reqs: if not req.marker or req.marker.evaluate({'extra': extra}): yield req common = frozenset(reqs_for_extra(None)) dm[None].extend(common) for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: s_extra = safe_extra(extra.strip()) dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) return dm
<SYSTEM_TASK:> Expand agglutinated rules in a definition-schema. <END_TASK> <USER_TASK:> Description: def _expand_logical_shortcuts(cls, schema): """ Expand agglutinated rules in a definition-schema. :param schema: The schema-definition to expand. :return: The expanded schema-definition. """
def is_of_rule(x): return isinstance(x, _str_type) and \ x.startswith(('allof_', 'anyof_', 'noneof_', 'oneof_')) for field in schema: for of_rule in (x for x in schema[field] if is_of_rule(x)): operator, rule = of_rule.split('_') schema[field].update({operator: []}) for value in schema[field][of_rule]: schema[field][operator].append({rule: value}) del schema[field][of_rule] return schema
<SYSTEM_TASK:> Validates a schema that defines rules against supported rules. <END_TASK> <USER_TASK:> Description: def _validate(self, schema): """ Validates a schema that defines rules against supported rules. :param schema: The schema to be validated as a legal cerberus schema according to the rules of this Validator object. """
if isinstance(schema, _str_type): schema = self.validator.schema_registry.get(schema, schema) if schema is None: raise SchemaError(errors.SCHEMA_ERROR_MISSING) schema = copy(schema) for field in schema: if isinstance(schema[field], _str_type): schema[field] = rules_set_registry.get(schema[field], schema[field]) if not self.schema_validator(schema, normalize=False): raise SchemaError(self.schema_validator.errors)
<SYSTEM_TASK:> Register a definition to the registry. Existing definitions are <END_TASK> <USER_TASK:> Description: def add(self, name, definition): """ Register a definition to the registry. Existing definitions are replaced silently. :param name: The name which can be used as reference in a validation schema. :type name: :class:`str` :param definition: The definition. :type definition: any :term:`mapping` """
self._storage[name] = self._expand_definition(definition)
<SYSTEM_TASK:> Add several definitions at once. Existing definitions are <END_TASK> <USER_TASK:> Description: def extend(self, definitions): """ Add several definitions at once. Existing definitions are replaced silently. :param definitions: The names and definitions. :type definitions: a :term:`mapping` or an :term:`iterable` with two-value :class:`tuple` s """
for name, definition in dict(definitions).items(): self.add(name, definition)
<SYSTEM_TASK:> Context manager to temporarily change working directories <END_TASK> <USER_TASK:> Description: def cd(path): """Context manager to temporarily change working directories :param str path: The directory to move into >>> print(os.path.abspath(os.curdir)) '/home/user/code/myrepo' >>> with cd("/home/user/code/otherdir/subdir"): ... print("Changed directory: %s" % os.path.abspath(os.curdir)) Changed directory: /home/user/code/otherdir/subdir >>> print(os.path.abspath(os.curdir)) '/home/user/code/myrepo' """
if not path: return prev_cwd = Path.cwd().as_posix() if isinstance(path, Path): path = path.as_posix() os.chdir(str(path)) try: yield finally: os.chdir(prev_cwd)
<SYSTEM_TASK:> Get a spinner object or a dummy spinner to wrap a context. <END_TASK> <USER_TASK:> Description: def spinner( spinner_name=None, start_text=None, handler_map=None, nospin=False, write_to_stdout=True, ): """Get a spinner object or a dummy spinner to wrap a context. :param str spinner_name: A spinner type e.g. "dots" or "bouncingBar" (default: {"bouncingBar"}) :param str start_text: Text to start off the spinner with (default: {None}) :param dict handler_map: Handler map for signals to be handled gracefully (default: {None}) :param bool nospin: If true, use the dummy spinner (default: {False}) :param bool write_to_stdout: Writes to stdout if true, otherwise writes to stderr (default: True) :return: A spinner object which can be manipulated while alive :rtype: :class:`~vistir.spin.VistirSpinner` Raises: RuntimeError -- Raised if the spinner extra is not installed """
from .spin import create_spinner has_yaspin = None try: import yaspin except ImportError: has_yaspin = False if not nospin: raise RuntimeError( "Failed to import spinner! Reinstall vistir with command:" " pip install --upgrade vistir[spinner]" ) else: spinner_name = "" else: has_yaspin = True spinner_name = "" use_yaspin = (has_yaspin is False) or (nospin is True) if has_yaspin is None or has_yaspin is True and not nospin: use_yaspin = True if start_text is None and use_yaspin is True: start_text = "Running..." with create_spinner( spinner_name=spinner_name, text=start_text, handler_map=handler_map, nospin=nospin, use_yaspin=use_yaspin, write_to_stdout=write_to_stdout, ) as _spinner: yield _spinner
<SYSTEM_TASK:> Atomically open `target` for writing. <END_TASK> <USER_TASK:> Description: def atomic_open_for_write(target, binary=False, newline=None, encoding=None): """Atomically open `target` for writing. This is based on Lektor's `atomic_open()` utility, but simplified a lot to handle only writing, and skip many multi-process/thread edge cases handled by Werkzeug. :param str target: Target filename to write :param bool binary: Whether to open in binary mode, default False :param str newline: The newline character to use when writing, determined from system if not supplied :param str encoding: The encoding to use when writing, defaults to system encoding How this works: * Create a temp file (in the same directory of the actual target), and yield for surrounding code to write to it. * If some thing goes wrong, try to remove the temp file. The actual target is not touched whatsoever. * If everything goes well, close the temp file, and replace the actual target with this new file. .. code:: python >>> fn = "test_file.txt" >>> def read_test_file(filename=fn): with open(filename, 'r') as fh: print(fh.read().strip()) >>> with open(fn, "w") as fh: fh.write("this is some test text") >>> read_test_file() this is some test text >>> def raise_exception_while_writing(filename): with open(filename, "w") as fh: fh.write("writing some new text") raise RuntimeError("Uh oh, hope your file didn't get overwritten") >>> raise_exception_while_writing(fn) Traceback (most recent call last): ... RuntimeError: Uh oh, hope your file didn't get overwritten >>> read_test_file() writing some new text # Now try with vistir >>> def raise_exception_while_writing(filename): with vistir.contextmanagers.atomic_open_for_write(filename) as fh: fh.write("Overwriting all the text from before with even newer text") raise RuntimeError("But did it get overwritten now?") >>> raise_exception_while_writing(fn) Traceback (most recent call last): ... RuntimeError: But did it get overwritten now? >>> read_test_file() writing some new text """
mode = "w+b" if binary else "w" f = NamedTemporaryFile( dir=os.path.dirname(target), prefix=".__atomic-write", mode=mode, encoding=encoding, newline=newline, delete=False, ) # set permissions to 0644 os.chmod(f.name, stat.S_IWUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) try: yield f except BaseException: f.close() try: os.remove(f.name) except OSError: pass raise else: f.close() try: os.remove(target) # This is needed on Windows. except OSError: pass os.rename(f.name, target)
<SYSTEM_TASK:> Open local or remote file for reading. <END_TASK> <USER_TASK:> Description: def open_file(link, session=None, stream=True): """ Open local or remote file for reading. :type link: pip._internal.index.Link or str :type session: requests.Session :param bool stream: Try to stream if remote, default True :raises ValueError: If link points to a local directory. :return: a context manager to the opened file-like object """
if not isinstance(link, six.string_types): try: link = link.url_without_fragment except AttributeError: raise ValueError("Cannot parse url from unkown type: {0!r}".format(link)) if not is_valid_url(link) and os.path.exists(link): link = path_to_url(link) if is_file_url(link): # Local URL local_path = url_to_path(link) if os.path.isdir(local_path): raise ValueError("Cannot open directory for read: {}".format(link)) else: with io.open(local_path, "rb") as local_file: yield local_file else: # Remote URL headers = {"Accept-Encoding": "identity"} if not session: from requests import Session session = Session() with session.get(link, headers=headers, stream=stream) as resp: try: raw = getattr(resp, "raw", None) result = raw if raw else resp yield result finally: if raw: conn = getattr(raw, "_connection") if conn is not None: conn.close() result.close()
<SYSTEM_TASK:> This reads data from the file descriptor. <END_TASK> <USER_TASK:> Description: def read_nonblocking(self, size=1, timeout=None): """This reads data from the file descriptor. This is a simple implementation suitable for a regular file. Subclasses using ptys or pipes should override it. The timeout parameter is ignored. """
try: s = os.read(self.child_fd, size) except OSError as err: if err.args[0] == errno.EIO: # Linux-style EOF self.flag_eof = True raise EOF('End Of File (EOF). Exception style platform.') raise if s == b'': # BSD-style EOF self.flag_eof = True raise EOF('End Of File (EOF). Empty string style platform.') s = self._decoder.decode(s, final=False) self._log(s, 'read') return s
<SYSTEM_TASK:> Page through text by invoking a program on a temporary file. <END_TASK> <USER_TASK:> Description: def _tempfilepager(generator, cmd, color): """Page through text by invoking a program on a temporary file."""
import tempfile filename = tempfile.mktemp() # TODO: This never terminates if the passed generator never terminates. text = "".join(generator) if not color: text = strip_ansi(text) encoding = get_best_encoding(sys.stdout) with open_stream(filename, 'wb')[0] as f: f.write(text.encode(encoding)) try: os.system(cmd + ' "' + filename + '"') finally: os.unlink(filename)
<SYSTEM_TASK:> Simply print unformatted text. This is the ultimate fallback. <END_TASK> <USER_TASK:> Description: def _nullpager(stream, generator, color): """Simply print unformatted text. This is the ultimate fallback."""
for text in generator: if not color: text = strip_ansi(text) stream.write(text)
<SYSTEM_TASK:> Progress iterator. Prints a dot for each item being iterated <END_TASK> <USER_TASK:> Description: def dots(it, label="", hide=None, every=1): """Progress iterator. Prints a dot for each item being iterated"""
count = 0 if not hide: STREAM.write(label) for i, item in enumerate(it): if not hide: if i % every == 0: # True every "every" updates STREAM.write(DOTS_CHAR) sys.stderr.flush() count += 1 yield item STREAM.write("\n") STREAM.flush()
<SYSTEM_TASK:> Parse version to major, minor, patch, pre-release, build parts. <END_TASK> <USER_TASK:> Description: def parse(version): """Parse version to major, minor, patch, pre-release, build parts. :param version: version string :return: dictionary with the keys 'build', 'major', 'minor', 'patch', and 'prerelease'. The prerelease or build keys can be None if not provided :rtype: dict >>> import semver >>> ver = semver.parse('3.4.5-pre.2+build.4') >>> ver['major'] 3 >>> ver['minor'] 4 >>> ver['patch'] 5 >>> ver['prerelease'] 'pre.2' >>> ver['build'] 'build.4' """
match = _REGEX.match(version) if match is None: raise ValueError('%s is not valid SemVer string' % version) version_parts = match.groupdict() version_parts['major'] = int(version_parts['major']) version_parts['minor'] = int(version_parts['minor']) version_parts['patch'] = int(version_parts['patch']) return version_parts
<SYSTEM_TASK:> Parse version string to a VersionInfo instance. <END_TASK> <USER_TASK:> Description: def parse_version_info(version): """Parse version string to a VersionInfo instance. :param version: version string :return: a :class:`VersionInfo` instance :rtype: :class:`VersionInfo` >>> import semver >>> version_info = semver.parse_version_info("3.4.5-pre.2+build.4") >>> version_info.major 3 >>> version_info.minor 4 >>> version_info.patch 5 >>> version_info.prerelease 'pre.2' >>> version_info.build 'build.4' """
parts = parse(version) version_info = VersionInfo( parts['major'], parts['minor'], parts['patch'], parts['prerelease'], parts['build']) return version_info
<SYSTEM_TASK:> Compare two versions <END_TASK> <USER_TASK:> Description: def compare(ver1, ver2): """Compare two versions :param ver1: version string 1 :param ver2: version string 2 :return: The return value is negative if ver1 < ver2, zero if ver1 == ver2 and strictly positive if ver1 > ver2 :rtype: int >>> import semver >>> semver.compare("1.0.0", "2.0.0") -1 >>> semver.compare("2.0.0", "1.0.0") 1 >>> semver.compare("2.0.0", "2.0.0") 0 """
v1, v2 = parse(ver1), parse(ver2) return _compare_by_keys(v1, v2)
<SYSTEM_TASK:> Compare two versions through a comparison <END_TASK> <USER_TASK:> Description: def match(version, match_expr): """Compare two versions through a comparison :param str version: a version string :param str match_expr: operator and version; valid operators are < smaller than > greater than >= greator or equal than <= smaller or equal than == equal != not equal :return: True if the expression matches the version, otherwise False :rtype: bool >>> import semver >>> semver.match("2.0.0", ">=1.0.0") True >>> semver.match("1.0.0", ">1.0.0") False """
prefix = match_expr[:2] if prefix in ('>=', '<=', '==', '!='): match_version = match_expr[2:] elif prefix and prefix[0] in ('>', '<'): prefix = prefix[0] match_version = match_expr[1:] else: raise ValueError("match_expr parameter should be in format <op><ver>, " "where <op> is one of " "['<', '>', '==', '<=', '>=', '!=']. " "You provided: %r" % match_expr) possibilities_dict = { '>': (1,), '<': (-1,), '==': (0,), '!=': (-1, 1), '>=': (0, 1), '<=': (-1, 0) } possibilities = possibilities_dict[prefix] cmp_res = compare(version, match_version) return cmp_res in possibilities
<SYSTEM_TASK:> Returns the greater version of two versions <END_TASK> <USER_TASK:> Description: def max_ver(ver1, ver2): """Returns the greater version of two versions :param ver1: version string 1 :param ver2: version string 2 :return: the greater version of the two :rtype: :class:`VersionInfo` >>> import semver >>> semver.max_ver("1.0.0", "2.0.0") '2.0.0' """
cmp_res = compare(ver1, ver2) if cmp_res == 0 or cmp_res == 1: return ver1 else: return ver2
<SYSTEM_TASK:> Returns the smaller version of two versions <END_TASK> <USER_TASK:> Description: def min_ver(ver1, ver2): """Returns the smaller version of two versions :param ver1: version string 1 :param ver2: version string 2 :return: the smaller version of the two :rtype: :class:`VersionInfo` >>> import semver >>> semver.min_ver("1.0.0", "2.0.0") '1.0.0' """
cmp_res = compare(ver1, ver2) if cmp_res == 0 or cmp_res == -1: return ver1 else: return ver2
<SYSTEM_TASK:> Format a version according to the Semantic Versioning specification <END_TASK> <USER_TASK:> Description: def format_version(major, minor, patch, prerelease=None, build=None): """Format a version according to the Semantic Versioning specification :param str major: the required major part of a version :param str minor: the required minor part of a version :param str patch: the required patch part of a version :param str prerelease: the optional prerelease part of a version :param str build: the optional build part of a version :return: the formatted string :rtype: str >>> import semver >>> semver.format_version(3, 4, 5, 'pre.2', 'build.4') '3.4.5-pre.2+build.4' """
version = "%d.%d.%d" % (major, minor, patch) if prerelease is not None: version = version + "-%s" % prerelease if build is not None: version = version + "+%s" % build return version
<SYSTEM_TASK:> Set constants _EOF and _INTR. <END_TASK> <USER_TASK:> Description: def _make_eof_intr(): """Set constants _EOF and _INTR. This avoids doing potentially costly operations on module load. """
global _EOF, _INTR if (_EOF is not None) and (_INTR is not None): return # inherit EOF and INTR definitions from controlling process. try: from termios import VEOF, VINTR fd = None for name in 'stdin', 'stdout': stream = getattr(sys, '__%s__' % name, None) if stream is None or not hasattr(stream, 'fileno'): continue try: fd = stream.fileno() except ValueError: continue if fd is None: # no fd, raise ValueError to fallback on CEOF, CINTR raise ValueError("No stream has a fileno") intr = ord(termios.tcgetattr(fd)[6][VINTR]) eof = ord(termios.tcgetattr(fd)[6][VEOF]) except (ImportError, OSError, IOError, ValueError, termios.error): # unless the controlling process is also not a terminal, # such as cron(1), or when stdin and stdout are both closed. # Fall-back to using CEOF and CINTR. There try: from termios import CEOF, CINTR (intr, eof) = (CINTR, CEOF) except ImportError: # ^C, ^D (intr, eof) = (3, 4) _INTR = _byte(intr) _EOF = _byte(eof)
<SYSTEM_TASK:> Read and return at most ``size`` bytes from the pty. <END_TASK> <USER_TASK:> Description: def read(self, size=1024): """Read and return at most ``size`` bytes from the pty. Can block if there is nothing to read. Raises :exc:`EOFError` if the terminal was closed. Unlike Pexpect's ``read_nonblocking`` method, this doesn't try to deal with the vagaries of EOF on platforms that do strange things, like IRIX or older Solaris systems. It handles the errno=EIO pattern used on Linux, and the empty-string return used on BSD platforms and (seemingly) on recent Solaris. """
try: s = self.fileobj.read1(size) except (OSError, IOError) as err: if err.args[0] == errno.EIO: # Linux-style EOF self.flag_eof = True raise EOFError('End Of File (EOF). Exception style platform.') raise if s == b'': # BSD-style EOF (also appears to work on recent Solaris (OpenIndiana)) self.flag_eof = True raise EOFError('End Of File (EOF). Empty string style platform.') return s
<SYSTEM_TASK:> Write bytes to the pseudoterminal. <END_TASK> <USER_TASK:> Description: def write(self, s, flush=True): """Write bytes to the pseudoterminal. Returns the number of bytes written. """
return self._writeb(s, flush=flush)
<SYSTEM_TASK:> Send the given signal to the child application. <END_TASK> <USER_TASK:> Description: def kill(self, sig): """Send the given signal to the child application. In keeping with UNIX tradition it has a misleading name. It does not necessarily kill the child unless you send the right signal. See the :mod:`signal` module for constants representing signal numbers. """
# Same as os.kill, but the pid is given for you. if self.isalive(): os.kill(self.pid, sig)
<SYSTEM_TASK:> Read at most ``size`` bytes from the pty, return them as unicode. <END_TASK> <USER_TASK:> Description: def read(self, size=1024): """Read at most ``size`` bytes from the pty, return them as unicode. Can block if there is nothing to read. Raises :exc:`EOFError` if the terminal was closed. The size argument still refers to bytes, not unicode code points. """
b = super(PtyProcessUnicode, self).read(size) return self.decoder.decode(b, final=False)
<SYSTEM_TASK:> Write the unicode string ``s`` to the pseudoterminal. <END_TASK> <USER_TASK:> Description: def write(self, s): """Write the unicode string ``s`` to the pseudoterminal. Returns the number of bytes written. """
b = s.encode(self.encoding) return super(PtyProcessUnicode, self).write(b)
<SYSTEM_TASK:> Build an instance from a requirement. <END_TASK> <USER_TASK:> Description: def from_requirement(cls, provider, requirement, parent): """Build an instance from a requirement. """
candidates = provider.find_matches(requirement) if not candidates: raise NoVersionsAvailable(requirement, parent) return cls( candidates=candidates, information=[RequirementInformation(requirement, parent)], )
<SYSTEM_TASK:> Build a new instance from this and a new requirement. <END_TASK> <USER_TASK:> Description: def merged_with(self, provider, requirement, parent): """Build a new instance from this and a new requirement. """
infos = list(self.information) infos.append(RequirementInformation(requirement, parent)) candidates = [ c for c in self.candidates if provider.is_satisfied_by(requirement, c) ] if not candidates: raise RequirementsConflicted(self) return type(self)(candidates, infos)
<SYSTEM_TASK:> Push a new state into history. <END_TASK> <USER_TASK:> Description: def _push_new_state(self): """Push a new state into history. This new state will be used to hold resolution results of the next coming round. """
try: base = self._states[-1] except IndexError: graph = DirectedGraph() graph.add(None) # Sentinel as root dependencies' parent. state = State(mapping={}, graph=graph) else: state = State( mapping=base.mapping.copy(), graph=base.graph.copy(), ) self._states.append(state)
<SYSTEM_TASK:> Take a collection of constraints, spit out the resolution result. <END_TASK> <USER_TASK:> Description: def resolve(self, requirements, max_rounds=20): """Take a collection of constraints, spit out the resolution result. The return value is a representation to the final resolution result. It is a tuple subclass with two public members: * `mapping`: A dict of resolved candidates. Each key is an identifier of a requirement (as returned by the provider's `identify` method), and the value is the resolved candidate. * `graph`: A `DirectedGraph` instance representing the dependency tree. The vertices are keys of `mapping`, and each edge represents *why* a particular package is included. A special vertex `None` is included to represent parents of user-supplied requirements. The following exceptions may be raised if a resolution cannot be found: * `NoVersionsAvailable`: A requirement has no available candidates. * `ResolutionImpossible`: A resolution cannot be found for the given combination of requirements. * `ResolutionTooDeep`: The dependency tree is too deeply nested and the resolver gave up. This is usually caused by a circular dependency, but you can try to resolve this by increasing the `max_rounds` argument. """
resolution = Resolution(self.provider, self.reporter) resolution.resolve(requirements, max_rounds=max_rounds) return resolution.state
<SYSTEM_TASK:> Remove the "extra == ..." operands from the list. <END_TASK> <USER_TASK:> Description: def _strip_extra(elements): """Remove the "extra == ..." operands from the list. This is not a comprehensive implementation, but relies on an important characteristic of metadata generation: The "extra == ..." operand is always associated with an "and" operator. This means that we can simply remove the operand and the "and" operator associated with it. """
extra_indexes = [] for i, element in enumerate(elements): if isinstance(element, list): cancelled = _strip_extra(element) if cancelled: extra_indexes.append(i) elif isinstance(element, tuple) and element[0].value == "extra": extra_indexes.append(i) for i in reversed(extra_indexes): del elements[i] if i > 0 and elements[i - 1] == "and": # Remove the "and" before it. del elements[i - 1] elif elements: # This shouldn't ever happen, but is included for completeness. # If there is not an "and" before this element, try to remove the # operator after it. del elements[0] return (not elements)
<SYSTEM_TASK:> Build a new marker without the `extra == ...` part. <END_TASK> <USER_TASK:> Description: def get_without_extra(marker): """Build a new marker without the `extra == ...` part. The implementation relies very deep into packaging's internals, but I don't have a better way now (except implementing the whole thing myself). This could return `None` if the `extra == ...` part is the only one in the input marker. """
# TODO: Why is this very deep in the internals? Why is a better solution # implementing it yourself when someone is already maintaining a codebase # for this? It's literally a grammar implementation that is required to # meet the demands of a pep... -d if not marker: return None marker = Marker(str(marker)) elements = marker._markers _strip_extra(elements) if elements: return marker return None
<SYSTEM_TASK:> Return any errors which have occurred. <END_TASK> <USER_TASK:> Description: def get_errors(self): """ Return any errors which have occurred. """
result = [] while not self.errors.empty(): # pragma: no cover try: e = self.errors.get(False) result.append(e) except self.errors.Empty: continue self.errors.task_done() return result
<SYSTEM_TASK:> For a given project, get a dictionary mapping available versions to Distribution <END_TASK> <USER_TASK:> Description: def get_project(self, name): """ For a given project, get a dictionary mapping available versions to Distribution instances. This calls _get_project to do all the work, and just implements a caching layer on top. """
if self._cache is None: # pragma: no cover result = self._get_project(name) elif name in self._cache: result = self._cache[name] else: self.clear_errors() result = self._get_project(name) self._cache[name] = result return result
<SYSTEM_TASK:> Get a digest from a dictionary by looking at keys of the form <END_TASK> <USER_TASK:> Description: def _get_digest(self, info): """ Get a digest from a dictionary by looking at keys of the form 'algo_digest'. Returns a 2-tuple (algo, digest) if found, else None. Currently looks only for SHA256, then MD5. """
result = None for algo in ('sha256', 'md5'): key = '%s_digest' % algo if key in info: result = (algo, info[key]) break return result
<SYSTEM_TASK:> Return the URLs of all the links on a page together with information <END_TASK> <USER_TASK:> Description: def links(self): """ Return the URLs of all the links on a page together with information about their "rel" attribute, for determining which ones to treat as downloads and which ones to queue for further scraping. """
def clean(url): "Tidy up an URL." scheme, netloc, path, params, query, frag = urlparse(url) return urlunparse((scheme, netloc, quote(path), params, query, frag)) result = set() for match in self._href.finditer(self.data): d = match.groupdict('') rel = (d['rel1'] or d['rel2'] or d['rel3'] or d['rel4'] or d['rel5'] or d['rel6']) url = d['url1'] or d['url2'] or d['url3'] url = urljoin(self.base_url, url) url = unescape(url) url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) result.add((url, rel)) # We sort the result, hoping to bring the most recent versions # to the front result = sorted(result, key=lambda t: t[0], reverse=True) return result
<SYSTEM_TASK:> See if an URL is a suitable download for a project. <END_TASK> <USER_TASK:> Description: def _process_download(self, url): """ See if an URL is a suitable download for a project. If it is, register information in the result dictionary (for _get_project) about the specific version it's for. Note that the return value isn't actually used other than as a boolean value. """
if self.platform_check and self._is_platform_dependent(url): info = None else: info = self.convert_url_to_download_info(url, self.project_name) logger.debug('process_download: %s -> %s', url, info) if info: with self._lock: # needed because self.result is shared self._update_version_data(self.result, info) return info
<SYSTEM_TASK:> Determine whether a link URL from a referring page and with a <END_TASK> <USER_TASK:> Description: def _should_queue(self, link, referrer, rel): """ Determine whether a link URL from a referring page and with a particular "rel" attribute should be queued for scraping. """
scheme, netloc, path, _, _, _ = urlparse(link) if path.endswith(self.source_extensions + self.binary_extensions + self.excluded_extensions): result = False elif self.skip_externals and not link.startswith(self.base_url): result = False elif not referrer.startswith(self.base_url): result = False elif rel not in ('homepage', 'download'): result = False elif scheme not in ('http', 'https', 'ftp'): result = False elif self._is_platform_dependent(link): result = False else: host = netloc.split(':', 1)[0] if host.lower() == 'localhost': result = False else: result = True logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, referrer, result) return result
<SYSTEM_TASK:> Find the distributions which can fulfill a requirement. <END_TASK> <USER_TASK:> Description: def find_providers(self, reqt): """ Find the distributions which can fulfill a requirement. :param reqt: The requirement. :type reqt: str :return: A set of distribution which can fulfill the requirement. """
matcher = self.get_matcher(reqt) name = matcher.key # case-insensitive result = set() provided = self.provided if name in provided: for version, provider in provided[name]: try: match = matcher.match(version) except UnsupportedVersionError: match = False if match: result.add(provider) break return result
<SYSTEM_TASK:> Run in a thread to move output from a pipe to a queue. <END_TASK> <USER_TASK:> Description: def _read_incoming(self): """Run in a thread to move output from a pipe to a queue."""
fileno = self.proc.stdout.fileno() while 1: buf = b'' try: buf = os.read(fileno, 1024) except OSError as e: self._log(e, 'read') if not buf: # This indicates we have reached EOF self._read_queue.put(None) return self._read_queue.put(buf)
<SYSTEM_TASK:> Like `mkdir`, but does not raise an exception if the <END_TASK> <USER_TASK:> Description: def mkdir_p(*args, **kwargs): """Like `mkdir`, but does not raise an exception if the directory already exists. """
try: return os.mkdir(*args, **kwargs) except OSError as exc: if exc.errno != errno.EEXIST: raise
<SYSTEM_TASK:> Attach a regular expression pattern matcher to a custom type converter <END_TASK> <USER_TASK:> Description: def with_pattern(pattern, regex_group_count=None): """Attach a regular expression pattern matcher to a custom type converter function. This annotates the type converter with the :attr:`pattern` attribute. EXAMPLE: >>> import parse >>> @parse.with_pattern(r"\d+") ... def parse_number(text): ... return int(text) is equivalent to: >>> def parse_number(text): ... return int(text) >>> parse_number.pattern = r"\d+" :param pattern: regular expression pattern (as text) :param regex_group_count: Indicates how many regex-groups are in pattern. :return: wrapped function """
def decorator(func): func.pattern = pattern func.regex_group_count = regex_group_count return func return decorator
<SYSTEM_TASK:> Un-installs a provided package and removes it from Pipfile. <END_TASK> <USER_TASK:> Description: def uninstall( ctx, state, all_dev=False, all=False, **kwargs ): """Un-installs a provided package and removes it from Pipfile."""
from ..core import do_uninstall retcode = do_uninstall( packages=state.installstate.packages, editable_packages=state.installstate.editables, three=state.three, python=state.python, system=state.system, lock=not state.installstate.skip_lock, all_dev=all_dev, all=all, keep_outdated=state.installstate.keep_outdated, pypi_mirror=state.pypi_mirror, ctx=ctx ) if retcode: sys.exit(retcode)
<SYSTEM_TASK:> Spawns a shell within the virtualenv. <END_TASK> <USER_TASK:> Description: def shell( state, fancy=False, shell_args=None, anyway=False, ): """Spawns a shell within the virtualenv."""
from ..core import load_dot_env, do_shell # Prevent user from activating nested environments. if "PIPENV_ACTIVE" in os.environ: # If PIPENV_ACTIVE is set, VIRTUAL_ENV should always be set too. venv_name = os.environ.get("VIRTUAL_ENV", "UNKNOWN_VIRTUAL_ENVIRONMENT") if not anyway: echo( "{0} {1} {2}\nNo action taken to avoid nested environments.".format( crayons.normal("Shell for"), crayons.green(venv_name, bold=True), crayons.normal("already activated.", bold=True), ), err=True, ) sys.exit(1) # Load .env file. load_dot_env() # Use fancy mode for Windows. if os.name == "nt": fancy = True do_shell( three=state.three, python=state.python, fancy=fancy, shell_args=shell_args, pypi_mirror=state.pypi_mirror, )
<SYSTEM_TASK:> Spawns a command installed into the virtualenv. <END_TASK> <USER_TASK:> Description: def run(state, command, args): """Spawns a command installed into the virtualenv."""
from ..core import do_run do_run( command=command, args=args, three=state.three, python=state.python, pypi_mirror=state.pypi_mirror )
<SYSTEM_TASK:> Checks for security vulnerabilities and against PEP 508 markers provided in Pipfile. <END_TASK> <USER_TASK:> Description: def check( state, unused=False, style=False, ignore=None, args=None, **kwargs ): """Checks for security vulnerabilities and against PEP 508 markers provided in Pipfile."""
from ..core import do_check do_check( three=state.three, python=state.python, system=state.system, unused=unused, ignore=ignore, args=args, pypi_mirror=state.pypi_mirror, )
<SYSTEM_TASK:> Displays currently-installed dependency graph information. <END_TASK> <USER_TASK:> Description: def graph(bare=False, json=False, json_tree=False, reverse=False): """Displays currently-installed dependency graph information."""
from ..core import do_graph do_graph(bare=bare, json=json, json_tree=json_tree, reverse=reverse)
<SYSTEM_TASK:> View a given module in your editor. <END_TASK> <USER_TASK:> Description: def run_open(state, module, *args, **kwargs): """View a given module in your editor. This uses the EDITOR environment variable. You can temporarily override it, for example: EDITOR=atom pipenv open requests """
from ..core import which, ensure_project, inline_activate_virtual_environment # Ensure that virtualenv is available. ensure_project( three=state.three, python=state.python, validate=False, pypi_mirror=state.pypi_mirror, ) c = delegator.run( '{0} -c "import {1}; print({1}.__file__);"'.format(which("python"), module) ) try: assert c.return_code == 0 except AssertionError: echo(crayons.red("Module not found!")) sys.exit(1) if "__init__.py" in c.out: p = os.path.dirname(c.out.strip().rstrip("cdo")) else: p = c.out.strip().rstrip("cdo") echo(crayons.normal("Opening {0!r} in your EDITOR.".format(p), bold=True)) inline_activate_virtual_environment() edit(filename=p) return 0
<SYSTEM_TASK:> Installs all packages specified in Pipfile.lock. <END_TASK> <USER_TASK:> Description: def sync( ctx, state, bare=False, user=False, unused=False, **kwargs ): """Installs all packages specified in Pipfile.lock."""
from ..core import do_sync retcode = do_sync( ctx=ctx, dev=state.installstate.dev, three=state.three, python=state.python, bare=bare, dont_upgrade=(not state.installstate.keep_outdated), user=user, clear=state.clear, unused=unused, sequential=state.installstate.sequential, pypi_mirror=state.pypi_mirror, ) if retcode: ctx.abort()
<SYSTEM_TASK:> Uninstalls all packages not specified in Pipfile.lock. <END_TASK> <USER_TASK:> Description: def clean(ctx, state, dry_run=False, bare=False, user=False): """Uninstalls all packages not specified in Pipfile.lock."""
from ..core import do_clean do_clean(ctx=ctx, three=state.three, python=state.python, dry_run=dry_run, system=state.system)
<SYSTEM_TASK:> This method is a generic handler for emitting the tags. It also sets <END_TASK> <USER_TASK:> Description: def emitCurrentToken(self): """This method is a generic handler for emitting the tags. It also sets the state to "data" because that's what's needed after a token has been emitted. """
token = self.currentToken # Add token to the queue to be yielded if (token["type"] in tagTokenTypes): token["name"] = token["name"].translate(asciiUpper2Lower) if token["type"] == tokenTypes["EndTag"]: if token["data"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "attributes-in-end-tag"}) if token["selfClosing"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "self-closing-flag-on-end-tag"}) self.tokenQueue.append(token) self.state = self.dataState
<SYSTEM_TASK:> Converts a list of distributions into a PackageSet. <END_TASK> <USER_TASK:> Description: def create_package_set_from_installed(**kwargs): # type: (**Any) -> Tuple[PackageSet, bool] """Converts a list of distributions into a PackageSet. """
# Default to using all packages installed on the system if kwargs == {}: kwargs = {"local_only": False, "skip": ()} package_set = {} problems = False for dist in get_installed_distributions(**kwargs): name = canonicalize_name(dist.project_name) try: package_set[name] = PackageDetails(dist.version, dist.requires()) except RequirementParseError as e: # Don't crash on broken metadata logging.warning("Error parsing requirements for %s: %s", name, e) problems = True return package_set, problems
<SYSTEM_TASK:> Check if a package set is consistent <END_TASK> <USER_TASK:> Description: def check_package_set(package_set, should_ignore=None): # type: (PackageSet, Optional[Callable[[str], bool]]) -> CheckResult """Check if a package set is consistent If should_ignore is passed, it should be a callable that takes a package name and returns a boolean. """
if should_ignore is None: def should_ignore(name): return False missing = dict() conflicting = dict() for package_name in package_set: # Info about dependencies of package_name missing_deps = set() # type: Set[Missing] conflicting_deps = set() # type: Set[Conflicting] if should_ignore(package_name): continue for req in package_set[package_name].requires: name = canonicalize_name(req.project_name) # type: str # Check if it's missing if name not in package_set: missed = True if req.marker is not None: missed = req.marker.evaluate() if missed: missing_deps.add((name, req)) continue # Check if there's a conflict version = package_set[name].version # type: str if not req.specifier.contains(version, prereleases=True): conflicting_deps.add((name, version, req)) if missing_deps: missing[package_name] = sorted(missing_deps, key=str) if conflicting_deps: conflicting[package_name] = sorted(conflicting_deps, key=str) return missing, conflicting
<SYSTEM_TASK:> For checking if the dependency graph would be consistent after \ <END_TASK> <USER_TASK:> Description: def check_install_conflicts(to_install): # type: (List[InstallRequirement]) -> Tuple[PackageSet, CheckResult] """For checking if the dependency graph would be consistent after \ installing given requirements """
# Start from the current state package_set, _ = create_package_set_from_installed() # Install packages would_be_installed = _simulate_installation_of(to_install, package_set) # Only warn about directly-dependent packages; create a whitelist of them whitelist = _create_whitelist(would_be_installed, package_set) return ( package_set, check_package_set( package_set, should_ignore=lambda name: name not in whitelist ) )
<SYSTEM_TASK:> Computes the version of packages after installing to_install. <END_TASK> <USER_TASK:> Description: def _simulate_installation_of(to_install, package_set): # type: (List[InstallRequirement], PackageSet) -> Set[str] """Computes the version of packages after installing to_install. """
# Keep track of packages that were installed installed = set() # Modify it as installing requirement_set would (assuming no errors) for inst_req in to_install: dist = make_abstract_dist(inst_req).dist() name = canonicalize_name(dist.key) package_set[name] = PackageDetails(dist.version, dist.requires()) installed.add(name) return installed
<SYSTEM_TASK:> A list of the final component's suffixes, if any. <END_TASK> <USER_TASK:> Description: def suffixes(self): """A list of the final component's suffixes, if any."""
name = self.name if name.endswith('.'): return [] name = name.lstrip('.') return ['.' + suffix for suffix in name.split('.')[1:]]
<SYSTEM_TASK:> The final path component, minus its last suffix. <END_TASK> <USER_TASK:> Description: def stem(self): """The final path component, minus its last suffix."""
name = self.name i = name.rfind('.') if 0 < i < len(name) - 1: return name[:i] else: return name
<SYSTEM_TASK:> An object providing sequence-like access to the <END_TASK> <USER_TASK:> Description: def parts(self): """An object providing sequence-like access to the components in the filesystem path."""
# We cache the tuple to avoid building a new one each time .parts # is accessed. XXX is this necessary? try: return self._pparts except AttributeError: self._pparts = tuple(self._parts) return self._pparts
<SYSTEM_TASK:> Return True if this path matches the given pattern. <END_TASK> <USER_TASK:> Description: def match(self, path_pattern): """ Return True if this path matches the given pattern. """
cf = self._flavour.casefold path_pattern = cf(path_pattern) drv, root, pat_parts = self._flavour.parse_parts((path_pattern,)) if not pat_parts: raise ValueError("empty pattern") if drv and drv != cf(self._drv): return False if root and root != cf(self._root): return False parts = self._cparts if drv or root: if len(pat_parts) != len(parts): return False pat_parts = pat_parts[1:] elif len(pat_parts) > len(parts): return False for part, pat in zip(reversed(parts), reversed(pat_parts)): if not fnmatch.fnmatchcase(part, pat): return False return True
<SYSTEM_TASK:> Create this file with the given access mode, if it doesn't exist. <END_TASK> <USER_TASK:> Description: def touch(self, mode=0o666, exist_ok=True): """ Create this file with the given access mode, if it doesn't exist. """
if self._closed: self._raise_closed() if exist_ok: # First try to bump modification time # Implementation note: GNU touch uses the UTIME_NOW option of # the utimensat() / futimens() functions. try: self._accessor.utime(self, None) except OSError: # Avoid exception chaining pass else: return flags = os.O_CREAT | os.O_WRONLY if not exist_ok: flags |= os.O_EXCL fd = self._raw_open(flags, mode) os.close(fd)
<SYSTEM_TASK:> Create a new directory at this given path. <END_TASK> <USER_TASK:> Description: def mkdir(self, mode=0o777, parents=False, exist_ok=False): """ Create a new directory at this given path. """
if self._closed: self._raise_closed() def _try_func(): self._accessor.mkdir(self, mode) def _exc_func(exc): if not parents or self.parent == self: raise exc self.parent.mkdir(parents=True, exist_ok=True) self.mkdir(mode, parents=False, exist_ok=exist_ok) try: _try_except_filenotfounderror(_try_func, _exc_func) except OSError: if not exist_ok or not self.is_dir(): raise
<SYSTEM_TASK:> Remove this directory. The directory must be empty. <END_TASK> <USER_TASK:> Description: def rmdir(self): """ Remove this directory. The directory must be empty. """
if self._closed: self._raise_closed() self._accessor.rmdir(self)
<SYSTEM_TASK:> Rename this path to the given path. <END_TASK> <USER_TASK:> Description: def rename(self, target): """ Rename this path to the given path. """
if self._closed: self._raise_closed() self._accessor.rename(self, target)
<SYSTEM_TASK:> Whether this path exists. <END_TASK> <USER_TASK:> Description: def exists(self): """ Whether this path exists. """
try: self.stat() except OSError as e: if e.errno not in (ENOENT, ENOTDIR): raise return False return True
<SYSTEM_TASK:> Whether this path is a directory. <END_TASK> <USER_TASK:> Description: def is_dir(self): """ Whether this path is a directory. """
try: return S_ISDIR(self.stat().st_mode) except OSError as e: if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) return False
<SYSTEM_TASK:> Whether this path is a FIFO. <END_TASK> <USER_TASK:> Description: def is_fifo(self): """ Whether this path is a FIFO. """
try: return S_ISFIFO(self.stat().st_mode) except OSError as e: if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) return False
<SYSTEM_TASK:> Whether this path is a socket. <END_TASK> <USER_TASK:> Description: def is_socket(self): """ Whether this path is a socket. """
try: return S_ISSOCK(self.stat().st_mode) except OSError as e: if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) return False
<SYSTEM_TASK:> Return the python codec name corresponding to an encoding or None if the <END_TASK> <USER_TASK:> Description: def lookupEncoding(encoding): """Return the python codec name corresponding to an encoding or None if the string doesn't correspond to a valid encoding."""
if isinstance(encoding, binary_type): try: encoding = encoding.decode("ascii") except UnicodeDecodeError: return None if encoding is not None: try: return webencodings.lookup(encoding) except AttributeError: return None else: return None
<SYSTEM_TASK:> Read one character from the stream or queue if available. Return <END_TASK> <USER_TASK:> Description: def char(self): """ Read one character from the stream or queue if available. Return EOF when EOF is reached. """
# Read a new chunk from the input stream if necessary if self.chunkOffset >= self.chunkSize: if not self.readChunk(): return EOF chunkOffset = self.chunkOffset char = self.chunk[chunkOffset] self.chunkOffset = chunkOffset + 1 return char
<SYSTEM_TASK:> Returns a string of characters from the stream up to but not <END_TASK> <USER_TASK:> Description: def charsUntil(self, characters, opposite=False): """ Returns a string of characters from the stream up to but not including any character in 'characters' or EOF. 'characters' must be a container that supports the 'in' method and iteration over its characters. """
# Use a cache of regexps to find the required characters try: chars = charsUntilRegEx[(characters, opposite)] except KeyError: if __debug__: for c in characters: assert(ord(c) < 128) regex = "".join(["\\x%02x" % ord(c) for c in characters]) if not opposite: regex = "^%s" % regex chars = charsUntilRegEx[(characters, opposite)] = re.compile("[%s]+" % regex) rv = [] while True: # Find the longest matching prefix m = chars.match(self.chunk, self.chunkOffset) if m is None: # If nothing matched, and it wasn't because we ran out of chunk, # then stop if self.chunkOffset != self.chunkSize: break else: end = m.end() # If not the whole chunk matched, return everything # up to the part that didn't match if end != self.chunkSize: rv.append(self.chunk[self.chunkOffset:end]) self.chunkOffset = end break # If the whole remainder of the chunk matched, # use it all and read the next chunk rv.append(self.chunk[self.chunkOffset:]) if not self.readChunk(): # Reached EOF break r = "".join(rv) return r
<SYSTEM_TASK:> Report the encoding declared by the meta element <END_TASK> <USER_TASK:> Description: def detectEncodingMeta(self): """Report the encoding declared by the meta element """
buffer = self.rawStream.read(self.numBytesMeta) assert isinstance(buffer, bytes) parser = EncodingParser(buffer) self.rawStream.seek(0) encoding = parser.getEncoding() if encoding is not None and encoding.name in ("utf-16be", "utf-16le"): encoding = lookupEncoding("utf-8") return encoding
<SYSTEM_TASK:> Skip past a list of characters <END_TASK> <USER_TASK:> Description: def skip(self, chars=spaceCharactersBytes): """Skip past a list of characters"""
p = self.position # use property for the error-checking while p < len(self): c = self[p:p + 1] if c not in chars: self._position = p return c p += 1 self._position = p return None
<SYSTEM_TASK:> Look for a sequence of bytes at the start of a string. If the bytes <END_TASK> <USER_TASK:> Description: def matchBytes(self, bytes): """Look for a sequence of bytes at the start of a string. If the bytes are found return True and advance the position to the byte after the match. Otherwise return False and leave the position alone"""
p = self.position data = self[p:p + len(bytes)] rv = data.startswith(bytes) if rv: self.position += len(bytes) return rv
<SYSTEM_TASK:> Look for the next sequence of bytes matching a given sequence. If <END_TASK> <USER_TASK:> Description: def jumpTo(self, bytes): """Look for the next sequence of bytes matching a given sequence. If a match is found advance the position to the last byte of the match"""
newPosition = self[self.position:].find(bytes) if newPosition > -1: # XXX: This is ugly, but I can't see a nicer way to fix this. if self._position == -1: self._position = 0 self._position += (newPosition + len(bytes) - 1) return True else: raise StopIteration
<SYSTEM_TASK:> Return a name,value pair for the next attribute in the stream, <END_TASK> <USER_TASK:> Description: def getAttribute(self): """Return a name,value pair for the next attribute in the stream, if one is found, or None"""
data = self.data # Step 1 (skip chars) c = data.skip(spaceCharactersBytes | frozenset([b"/"])) assert c is None or len(c) == 1 # Step 2 if c in (b">", None): return None # Step 3 attrName = [] attrValue = [] # Step 4 attribute name while True: if c == b"=" and attrName: break elif c in spaceCharactersBytes: # Step 6! c = data.skip() break elif c in (b"/", b">"): return b"".join(attrName), b"" elif c in asciiUppercaseBytes: attrName.append(c.lower()) elif c is None: return None else: attrName.append(c) # Step 5 c = next(data) # Step 7 if c != b"=": data.previous() return b"".join(attrName), b"" # Step 8 next(data) # Step 9 c = data.skip() # Step 10 if c in (b"'", b'"'): # 10.1 quoteChar = c while True: # 10.2 c = next(data) # 10.3 if c == quoteChar: next(data) return b"".join(attrName), b"".join(attrValue) # 10.4 elif c in asciiUppercaseBytes: attrValue.append(c.lower()) # 10.5 else: attrValue.append(c) elif c == b">": return b"".join(attrName), b"" elif c in asciiUppercaseBytes: attrValue.append(c.lower()) elif c is None: return None else: attrValue.append(c) # Step 11 while True: c = next(data) if c in spacesAngleBrackets: return b"".join(attrName), b"".join(attrValue) elif c in asciiUppercaseBytes: attrValue.append(c.lower()) elif c is None: return None else: attrValue.append(c)
<SYSTEM_TASK:> Find and load the build backend <END_TASK> <USER_TASK:> Description: def _build_backend(): """Find and load the build backend"""
ep = os.environ['PEP517_BUILD_BACKEND'] mod_path, _, obj_path = ep.partition(':') try: obj = import_module(mod_path) except ImportError: raise BackendUnavailable if obj_path: for path_part in obj_path.split('.'): obj = getattr(obj, path_part) return obj
<SYSTEM_TASK:> Invoke optional prepare_metadata_for_build_wheel <END_TASK> <USER_TASK:> Description: def prepare_metadata_for_build_wheel(metadata_directory, config_settings): """Invoke optional prepare_metadata_for_build_wheel Implements a fallback by building a wheel if the hook isn't defined. """
backend = _build_backend() try: hook = backend.prepare_metadata_for_build_wheel except AttributeError: return _get_wheel_metadata_from_wheel(backend, metadata_directory, config_settings) else: return hook(metadata_directory, config_settings)
<SYSTEM_TASK:> Identify the .dist-info folder inside a wheel ZipFile. <END_TASK> <USER_TASK:> Description: def _dist_info_files(whl_zip): """Identify the .dist-info folder inside a wheel ZipFile."""
res = [] for path in whl_zip.namelist(): m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path) if m: res.append(path) if res: return res raise Exception("No .dist-info folder found in wheel")
<SYSTEM_TASK:> Build a wheel and extract the metadata from it. <END_TASK> <USER_TASK:> Description: def _get_wheel_metadata_from_wheel( backend, metadata_directory, config_settings): """Build a wheel and extract the metadata from it. Fallback for when the build backend does not define the 'get_wheel_metadata' hook. """
from zipfile import ZipFile whl_basename = backend.build_wheel(metadata_directory, config_settings) with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'): pass # Touch marker file whl_file = os.path.join(metadata_directory, whl_basename) with ZipFile(whl_file) as zipf: dist_info = _dist_info_files(zipf) zipf.extractall(path=metadata_directory, members=dist_info) return dist_info[0].split('/')[0]
<SYSTEM_TASK:> Check for a wheel already built during the get_wheel_metadata hook. <END_TASK> <USER_TASK:> Description: def _find_already_built_wheel(metadata_directory): """Check for a wheel already built during the get_wheel_metadata hook. """
if not metadata_directory: return None metadata_parent = os.path.dirname(metadata_directory) if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)): return None whl_files = glob(os.path.join(metadata_parent, '*.whl')) if not whl_files: print('Found wheel built marker, but no .whl files') return None if len(whl_files) > 1: print('Found multiple .whl files; unspecified behaviour. ' 'Will call build_wheel.') return None # Exactly one .whl file return whl_files[0]
<SYSTEM_TASK:> Invoke the mandatory build_wheel hook. <END_TASK> <USER_TASK:> Description: def build_wheel(wheel_directory, config_settings, metadata_directory=None): """Invoke the mandatory build_wheel hook. If a wheel was already built in the prepare_metadata_for_build_wheel fallback, this will copy it rather than rebuilding the wheel. """
prebuilt_whl = _find_already_built_wheel(metadata_directory) if prebuilt_whl: shutil.copy2(prebuilt_whl, wheel_directory) return os.path.basename(prebuilt_whl) return _build_backend().build_wheel(wheel_directory, config_settings, metadata_directory)
<SYSTEM_TASK:> All assignments to names go through this function. <END_TASK> <USER_TASK:> Description: def visit_Name(self, node, store_as_param=False, **kwargs): """All assignments to names go through this function."""
if store_as_param or node.ctx == 'param': self.symbols.declare_parameter(node.name) elif node.ctx == 'store': self.symbols.store(node.name) elif node.ctx == 'load': self.symbols.load(node.name)
<SYSTEM_TASK:> Visit assignments in the correct order. <END_TASK> <USER_TASK:> Description: def visit_Assign(self, node, **kwargs): """Visit assignments in the correct order."""
self.visit(node.node, **kwargs) self.visit(node.target, **kwargs)
<SYSTEM_TASK:> Moved into a function for testability. <END_TASK> <USER_TASK:> Description: def make_set_closure_cell(): """ Moved into a function for testability. """
if PYPY: # pragma: no cover def set_closure_cell(cell, value): cell.__setstate__((value,)) else: try: ctypes = import_ctypes() set_closure_cell = ctypes.pythonapi.PyCell_Set set_closure_cell.argtypes = (ctypes.py_object, ctypes.py_object) set_closure_cell.restype = ctypes.c_int except Exception: # We try best effort to set the cell, but sometimes it's not # possible. For example on Jython or on GAE. set_closure_cell = just_warn return set_closure_cell
<SYSTEM_TASK:> Write control characters to the appropriate log files <END_TASK> <USER_TASK:> Description: def _log_control(self, s): """Write control characters to the appropriate log files"""
if self.encoding is not None: s = s.decode(self.encoding, 'replace') self._log(s, 'send')
<SYSTEM_TASK:> Turn a list of extras into a string <END_TASK> <USER_TASK:> Description: def extras_to_string(extras): # type: (Iterable[S]) -> S """Turn a list of extras into a string"""
if isinstance(extras, six.string_types): if extras.startswith("["): return extras else: extras = [extras] if not extras: return "" return "[{0}]".format(",".join(sorted(set(extras))))
<SYSTEM_TASK:> Turn a string of extras into a parsed extras list <END_TASK> <USER_TASK:> Description: def parse_extras(extras_str): # type: (AnyStr) -> List[AnyStr] """ Turn a string of extras into a parsed extras list """
from pkg_resources import Requirement extras = Requirement.parse("fakepkg{0}".format(extras_to_string(extras_str))).extras return sorted(dedup([extra.lower() for extra in extras]))
<SYSTEM_TASK:> Turn a list of specifier tuples into a string <END_TASK> <USER_TASK:> Description: def specs_to_string(specs): # type: (List[Union[STRING_TYPE, Specifier]]) -> AnyStr """ Turn a list of specifier tuples into a string """
if specs: if isinstance(specs, six.string_types): return specs try: extras = ",".join(["".join(spec) for spec in specs]) except TypeError: extras = ",".join(["".join(spec._spec) for spec in specs]) # type: ignore return extras return ""
<SYSTEM_TASK:> Given a base path, look for the corresponding ``pyproject.toml`` file and return its <END_TASK> <USER_TASK:> Description: def get_pyproject(path): # type: (Union[STRING_TYPE, Path]) -> Optional[Tuple[List[STRING_TYPE], STRING_TYPE]] """ Given a base path, look for the corresponding ``pyproject.toml`` file and return its build_requires and build_backend. :param AnyStr path: The root path of the project, should be a directory (will be truncated) :return: A 2 tuple of build requirements and the build backend :rtype: Optional[Tuple[List[AnyStr], AnyStr]] """
if not path: return from vistir.compat import Path if not isinstance(path, Path): path = Path(path) if not path.is_dir(): path = path.parent pp_toml = path.joinpath("pyproject.toml") setup_py = path.joinpath("setup.py") if not pp_toml.exists(): if not setup_py.exists(): return None requires = ["setuptools>=40.8", "wheel"] backend = get_default_pyproject_backend() else: pyproject_data = {} with io.open(pp_toml.as_posix(), encoding="utf-8") as fh: pyproject_data = tomlkit.loads(fh.read()) build_system = pyproject_data.get("build-system", None) if build_system is None: if setup_py.exists(): requires = ["setuptools>=40.8", "wheel"] backend = get_default_pyproject_backend() else: requires = ["setuptools>=40.8", "wheel"] backend = get_default_pyproject_backend() build_system = {"requires": requires, "build-backend": backend} pyproject_data["build_system"] = build_system else: requires = build_system.get("requires", ["setuptools>=40.8", "wheel"]) backend = build_system.get("build-backend", get_default_pyproject_backend()) return requires, backend