text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Encodes a string into the proper filesystem encoding <END_TASK> <USER_TASK:> Description: def fs_str(string): """Encodes a string into the proper filesystem encoding Borrowed from pip-tools """
if isinstance(string, str): return string assert not isinstance(string, bytes) return string.encode(_fs_encoding)
<SYSTEM_TASK:> Fetch the string value from a path-like object <END_TASK> <USER_TASK:> Description: def _get_path(path): """ Fetch the string value from a path-like object Returns **None** if there is no string value. """
if isinstance(path, (six.string_types, bytes)): return path path_type = type(path) try: path_repr = path_type.__fspath__(path) except AttributeError: return if isinstance(path_repr, (six.string_types, bytes)): return path_repr return
<SYSTEM_TASK:> Build a collection of "traces" for each package. <END_TASK> <USER_TASK:> Description: def trace_graph(graph): """Build a collection of "traces" for each package. A trace is a list of names that eventually leads to the package. For example, if A and B are root dependencies, A depends on C and D, B depends on C, and C depends on D, the return value would be like:: { None: [], "A": [None], "B": [None], "C": [[None, "A"], [None, "B"]], "D": [[None, "B", "C"], [None, "A"]], } """
result = {None: []} for vertex in graph: result[vertex] = [] for root in graph.iter_children(None): paths = [] _trace_visit_vertex(graph, root, vertex, {None}, [None], paths) result[vertex].extend(paths) return result
<SYSTEM_TASK:> Check that a timeout attribute is valid. <END_TASK> <USER_TASK:> Description: def _validate_timeout(cls, value, name): """ Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If it is a numeric value less than or equal to zero, or the type is not an integer, float, or None. """
if value is _Default: return cls.DEFAULT_TIMEOUT if value is None or value is cls.DEFAULT_TIMEOUT: return value if isinstance(value, bool): raise ValueError("Timeout cannot be a boolean value. It must " "be an int, float or None.") try: float(value) except (TypeError, ValueError): raise ValueError("Timeout value %s was %s, but it must be an " "int, float or None." % (name, value)) try: if value <= 0: raise ValueError("Attempted to set %s timeout to %s, but the " "timeout cannot be set to a value less " "than or equal to 0." % (name, value)) except TypeError: # Python 3 raise ValueError("Timeout value %s was %s, but it must be an " "int, float or None." % (name, value)) return value
<SYSTEM_TASK:> Create a copy of the timeout object <END_TASK> <USER_TASK:> Description: def clone(self): """ Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout` """
# We can't use copy.deepcopy because that will also create a new object # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to # detect the user default. return Timeout(connect=self._connect, read=self._read, total=self.total)
<SYSTEM_TASK:> Get the value to use when setting a connection timeout. <END_TASK> <USER_TASK:> Description: def connect_timeout(self): """ Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None """
if self.total is None: return self._connect if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: return self.total return min(self._connect, self.total)
<SYSTEM_TASK:> Get the value for the read timeout. <END_TASK> <USER_TASK:> Description: def read_timeout(self): """ Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object. """
if (self.total is not None and self.total is not self.DEFAULT_TIMEOUT and self._read is not None and self._read is not self.DEFAULT_TIMEOUT): # In case the connect timeout has not yet been established. if self._start_connect is None: return self._read return max(0, min(self.total - self.get_connect_duration(), self._read)) elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: return max(0, self.total - self.get_connect_duration()) else: return self._read
<SYSTEM_TASK:> Establish a new connection via the SOCKS proxy. <END_TASK> <USER_TASK:> Description: def _new_conn(self): """ Establish a new connection via the SOCKS proxy. """
extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = socks.create_connection( (self.host, self.port), proxy_type=self._socks_options['socks_version'], proxy_addr=self._socks_options['proxy_host'], proxy_port=self._socks_options['proxy_port'], proxy_username=self._socks_options['username'], proxy_password=self._socks_options['password'], proxy_rdns=self._socks_options['rdns'], timeout=self.timeout, **extra_kw ) except SocketTimeout as e: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) except socks.ProxyError as e: # This is fragile as hell, but it seems to be the only way to raise # useful errors here. if e.socket_err: error = e.socket_err if isinstance(error, SocketTimeout): raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout) ) else: raise NewConnectionError( self, "Failed to establish a new connection: %s" % error ) else: raise NewConnectionError( self, "Failed to establish a new connection: %s" % e ) except SocketError as e: # Defensive: PySocks should catch all these. raise NewConnectionError( self, "Failed to establish a new connection: %s" % e) return conn
<SYSTEM_TASK:> Exit without garbage collection, this speeds up exit by about 10ms for <END_TASK> <USER_TASK:> Description: def fast_exit(code): """Exit without garbage collection, this speeds up exit by about 10ms for things like bash completion. """
sys.stdout.flush() sys.stderr.flush() os._exit(code)
<SYSTEM_TASK:> Context manager that attaches extra information to exceptions that <END_TASK> <USER_TASK:> Description: def augment_usage_errors(ctx, param=None): """Context manager that attaches extra information to exceptions that fly. """
try: yield except BadParameter as e: if e.ctx is None: e.ctx = ctx if param is not None and e.param is None: e.param = param raise except UsageError as e: if e.ctx is None: e.ctx = ctx raise
<SYSTEM_TASK:> Given a sequence of parameters in the order as should be considered <END_TASK> <USER_TASK:> Description: def iter_params_for_processing(invocation_order, declaration_order): """Given a sequence of parameters in the order as should be considered for processing and an iterable of parameters that exist, this returns a list in the correct order as they should be processed. """
def sort_key(item): try: idx = invocation_order.index(item) except ValueError: idx = float('inf') return (not item.is_eager, idx) return sorted(declaration_order, key=sort_key)
<SYSTEM_TASK:> The computed command path. This is used for the ``usage`` <END_TASK> <USER_TASK:> Description: def command_path(self): """The computed command path. This is used for the ``usage`` information on the help page. It's automatically created by combining the info names of the chain of contexts to the root. """
rv = '' if self.info_name is not None: rv = self.info_name if self.parent is not None: rv = self.parent.command_path + ' ' + rv return rv.lstrip()
<SYSTEM_TASK:> Finds the outermost context. <END_TASK> <USER_TASK:> Description: def find_root(self): """Finds the outermost context."""
node = self while node.parent is not None: node = node.parent return node
<SYSTEM_TASK:> Finds the closest object of a given type. <END_TASK> <USER_TASK:> Description: def find_object(self, object_type): """Finds the closest object of a given type."""
node = self while node is not None: if isinstance(node.obj, object_type): return node.obj node = node.parent
<SYSTEM_TASK:> Writes the usage line into the formatter. <END_TASK> <USER_TASK:> Description: def format_usage(self, ctx, formatter): """Writes the usage line into the formatter."""
pieces = self.collect_usage_pieces(ctx) formatter.write_usage(ctx.command_path, ' '.join(pieces))
<SYSTEM_TASK:> Returns all the pieces that go into the usage line and returns <END_TASK> <USER_TASK:> Description: def collect_usage_pieces(self, ctx): """Returns all the pieces that go into the usage line and returns it as a list of strings. """
rv = [self.options_metavar] for param in self.get_params(ctx): rv.extend(param.get_usage_pieces(ctx)) return rv
<SYSTEM_TASK:> Returns the names for the help option. <END_TASK> <USER_TASK:> Description: def get_help_option_names(self, ctx): """Returns the names for the help option."""
all_names = set(ctx.help_option_names) for param in self.params: all_names.difference_update(param.opts) all_names.difference_update(param.secondary_opts) return all_names
<SYSTEM_TASK:> Gets short help for the command or makes it by shortening the long help string. <END_TASK> <USER_TASK:> Description: def get_short_help_str(self, limit=45): """Gets short help for the command or makes it by shortening the long help string."""
return self.short_help or self.help and make_default_short_help(self.help, limit) or ''
<SYSTEM_TASK:> Writes the help into the formatter if it exists. <END_TASK> <USER_TASK:> Description: def format_help(self, ctx, formatter): """Writes the help into the formatter if it exists. This calls into the following methods: - :meth:`format_usage` - :meth:`format_help_text` - :meth:`format_options` - :meth:`format_epilog` """
self.format_usage(ctx, formatter) self.format_help_text(ctx, formatter) self.format_options(ctx, formatter) self.format_epilog(ctx, formatter)
<SYSTEM_TASK:> Writes all the options into the formatter if they exist. <END_TASK> <USER_TASK:> Description: def format_options(self, ctx, formatter): """Writes all the options into the formatter if they exist."""
opts = [] for param in self.get_params(ctx): rv = param.get_help_record(ctx) if rv is not None: opts.append(rv) if opts: with formatter.section('Options'): formatter.write_dl(opts)
<SYSTEM_TASK:> Writes the epilog into the formatter if it exists. <END_TASK> <USER_TASK:> Description: def format_epilog(self, ctx, formatter): """Writes the epilog into the formatter if it exists."""
if self.epilog: formatter.write_paragraph() with formatter.indentation(): formatter.write_text(self.epilog)
<SYSTEM_TASK:> Given a context variable this calculates the default value. <END_TASK> <USER_TASK:> Description: def get_default(self, ctx): """Given a context variable this calculates the default value."""
# Otherwise go with the regular default. if callable(self.default): rv = self.default() else: rv = self.default return self.type_cast_value(ctx, rv)
<SYSTEM_TASK:> Given a value this runs it properly through the type system. <END_TASK> <USER_TASK:> Description: def type_cast_value(self, ctx, value): """Given a value this runs it properly through the type system. This automatically handles things like `nargs` and `multiple` as well as composite types. """
if self.type.is_composite: if self.nargs <= 1: raise TypeError('Attempted to invoke composite type ' 'but nargs has been set to %s. This is ' 'not supported; nargs needs to be set to ' 'a fixed value > 1.' % self.nargs) if self.multiple: return tuple(self.type(x or (), self, ctx) for x in value or ()) return self.type(value or (), self, ctx) def _convert(value, level): if level == 0: return self.type(value, self, ctx) return tuple(_convert(x, level - 1) for x in value or ()) return _convert(value, (self.nargs != 1) + bool(self.multiple))
<SYSTEM_TASK:> Get a stringified version of the param for use in error messages to <END_TASK> <USER_TASK:> Description: def get_error_hint(self, ctx): """Get a stringified version of the param for use in error messages to indicate which param caused the error. """
hint_list = self.opts or [self.human_readable_name] return ' / '.join('"%s"' % x for x in hint_list)
<SYSTEM_TASK:> Find all matching dependencies using the supplied finder and the <END_TASK> <USER_TASK:> Description: def find_all_matches(finder, ireq, pre=False): # type: (PackageFinder, InstallRequirement, bool) -> List[InstallationCandidate] """Find all matching dependencies using the supplied finder and the given ireq. :param finder: A package finder for discovering matching candidates. :type finder: :class:`~pip._internal.index.PackageFinder` :param ireq: An install requirement. :type ireq: :class:`~pip._internal.req.req_install.InstallRequirement` :return: A list of matching candidates. :rtype: list[:class:`~pip._internal.index.InstallationCandidate`] """
candidates = clean_requires_python(finder.find_all_candidates(ireq.name)) versions = {candidate.version for candidate in candidates} allowed_versions = _get_filtered_versions(ireq, versions, pre) if not pre and not allowed_versions: allowed_versions = _get_filtered_versions(ireq, versions, True) candidates = {c for c in candidates if c.version in allowed_versions} return candidates
<SYSTEM_TASK:> Get all abstract dependencies for a given list of requirements. <END_TASK> <USER_TASK:> Description: def get_abstract_dependencies(reqs, sources=None, parent=None): """Get all abstract dependencies for a given list of requirements. Given a set of requirements, convert each requirement to an Abstract Dependency. :param reqs: A list of Requirements :type reqs: list[:class:`~requirementslib.models.requirements.Requirement`] :param sources: Pipfile-formatted sources, defaults to None :param sources: list[dict], optional :param parent: The parent of this list of dependencies, defaults to None :param parent: :class:`~requirementslib.models.requirements.Requirement`, optional :return: A list of Abstract Dependencies :rtype: list[:class:`~requirementslib.models.dependency.AbstractDependency`] """
deps = [] from .requirements import Requirement for req in reqs: if isinstance(req, pip_shims.shims.InstallRequirement): requirement = Requirement.from_line( "{0}{1}".format(req.name, req.specifier) ) if req.link: requirement.req.link = req.link requirement.markers = req.markers requirement.req.markers = req.markers requirement.extras = req.extras requirement.req.extras = req.extras elif isinstance(req, Requirement): requirement = copy.deepcopy(req) else: requirement = Requirement.from_line(req) dep = AbstractDependency.from_requirement(requirement, parent=parent) deps.append(dep) return deps
<SYSTEM_TASK:> Retrieves dependencies for the given install requirement from the wheel cache. <END_TASK> <USER_TASK:> Description: def get_dependencies_from_wheel_cache(ireq): """Retrieves dependencies for the given install requirement from the wheel cache. :param ireq: A single InstallRequirement :type ireq: :class:`~pip._internal.req.req_install.InstallRequirement` :return: A set of dependency lines for generating new InstallRequirements. :rtype: set(str) or None """
if ireq.editable or not is_pinned_requirement(ireq): return matches = WHEEL_CACHE.get(ireq.link, name_from_req(ireq.req)) if matches: matches = set(matches) if not DEPENDENCY_CACHE.get(ireq): DEPENDENCY_CACHE[ireq] = [format_requirement(m) for m in matches] return matches return
<SYSTEM_TASK:> Retrieves dependencies for the given install requirement from the json api. <END_TASK> <USER_TASK:> Description: def get_dependencies_from_json(ireq): """Retrieves dependencies for the given install requirement from the json api. :param ireq: A single InstallRequirement :type ireq: :class:`~pip._internal.req.req_install.InstallRequirement` :return: A set of dependency lines for generating new InstallRequirements. :rtype: set(str) or None """
if ireq.editable or not is_pinned_requirement(ireq): return # It is technically possible to parse extras out of the JSON API's # requirement format, but it is such a chore let's just use the simple API. if ireq.extras: return session = requests.session() atexit.register(session.close) version = str(ireq.req.specifier).lstrip("=") def gen(ireq): info = None try: info = session.get( "https://pypi.org/pypi/{0}/{1}/json".format(ireq.req.name, version) ).json()["info"] finally: session.close() requires_dist = info.get("requires_dist", info.get("requires")) if not requires_dist: # The API can return None for this. return for requires in requires_dist: i = pip_shims.shims.InstallRequirement.from_line(requires) # See above, we don't handle requirements with extras. if not _marker_contains_extra(i): yield format_requirement(i) if ireq not in DEPENDENCY_CACHE: try: reqs = DEPENDENCY_CACHE[ireq] = list(gen(ireq)) except JSONDecodeError: return req_iter = iter(reqs) else: req_iter = gen(ireq) return set(req_iter)
<SYSTEM_TASK:> Retrieves dependencies for the given install requirement from the dependency cache. <END_TASK> <USER_TASK:> Description: def get_dependencies_from_cache(ireq): """Retrieves dependencies for the given install requirement from the dependency cache. :param ireq: A single InstallRequirement :type ireq: :class:`~pip._internal.req.req_install.InstallRequirement` :return: A set of dependency lines for generating new InstallRequirements. :rtype: set(str) or None """
if ireq.editable or not is_pinned_requirement(ireq): return if ireq not in DEPENDENCY_CACHE: return cached = set(DEPENDENCY_CACHE[ireq]) # Preserving sanity: Run through the cache and make sure every entry if # valid. If this fails, something is wrong with the cache. Drop it. try: broken = False for line in cached: dep_ireq = pip_shims.shims.InstallRequirement.from_line(line) name = canonicalize_name(dep_ireq.name) if _marker_contains_extra(dep_ireq): broken = True # The "extra =" marker breaks everything. elif name == canonicalize_name(ireq.name): broken = True # A package cannot depend on itself. if broken: break except Exception: broken = True if broken: del DEPENDENCY_CACHE[ireq] return return cached
<SYSTEM_TASK:> Retrieves dependencies for the given install requirement from the pip resolver. <END_TASK> <USER_TASK:> Description: def get_dependencies_from_index(dep, sources=None, pip_options=None, wheel_cache=None): """Retrieves dependencies for the given install requirement from the pip resolver. :param dep: A single InstallRequirement :type dep: :class:`~pip._internal.req.req_install.InstallRequirement` :param sources: Pipfile-formatted sources, defaults to None :type sources: list[dict], optional :return: A set of dependency lines for generating new InstallRequirements. :rtype: set(str) or None """
finder = get_finder(sources=sources, pip_options=pip_options) if not wheel_cache: wheel_cache = WHEEL_CACHE dep.is_direct = True reqset = pip_shims.shims.RequirementSet() reqset.add_requirement(dep) requirements = None setup_requires = {} with temp_environ(), start_resolver(finder=finder, wheel_cache=wheel_cache) as resolver: os.environ['PIP_EXISTS_ACTION'] = 'i' dist = None if dep.editable and not dep.prepared and not dep.req: with cd(dep.setup_py_dir): from setuptools.dist import distutils try: dist = distutils.core.run_setup(dep.setup_py) except (ImportError, TypeError, AttributeError): dist = None else: setup_requires[dist.get_name()] = dist.setup_requires if not dist: try: dist = dep.get_dist() except (TypeError, ValueError, AttributeError): pass else: setup_requires[dist.get_name()] = dist.setup_requires resolver.require_hashes = False try: results = resolver._resolve_one(reqset, dep) except Exception: # FIXME: Needs to bubble the exception somehow to the user. results = [] finally: try: wheel_cache.cleanup() except AttributeError: pass resolver_requires_python = getattr(resolver, "requires_python", None) requires_python = getattr(reqset, "requires_python", resolver_requires_python) if requires_python: add_marker = fix_requires_python_marker(requires_python) reqset.remove(dep) if dep.req.marker: dep.req.marker._markers.extend(['and',].extend(add_marker._markers)) else: dep.req.marker = add_marker reqset.add(dep) requirements = set() for r in results: if requires_python: if r.req.marker: r.req.marker._markers.extend(['and',].extend(add_marker._markers)) else: r.req.marker = add_marker requirements.add(format_requirement(r)) for section in setup_requires: python_version = section not_python = not is_python(section) # This is for cleaning up :extras: formatted markers # by adding them to the results of the resolver # since any such extra would have been returned as a result anyway for value in setup_requires[section]: # This is a marker. if is_python(section): python_version = value[1:-1] else: not_python = True if ':' not in value and not_python: try: requirement_str = "{0}{1}".format(value, python_version).replace(":", ";") requirements.add(format_requirement(make_install_requirement(requirement_str).ireq)) # Anything could go wrong here -- can't be too careful. except Exception: pass if not dep.editable and is_pinned_requirement(dep) and requirements is not None: DEPENDENCY_CACHE[dep] = list(requirements) return requirements
<SYSTEM_TASK:> Build a pip command from a list of sources <END_TASK> <USER_TASK:> Description: def get_pip_options(args=[], sources=None, pip_command=None): """Build a pip command from a list of sources :param args: positional arguments passed through to the pip parser :param sources: A list of pipfile-formatted sources, defaults to None :param sources: list[dict], optional :param pip_command: A pre-built pip command instance :type pip_command: :class:`~pip._internal.cli.base_command.Command` :return: An instance of pip_options using the supplied arguments plus sane defaults :rtype: :class:`~pip._internal.cli.cmdoptions` """
if not pip_command: pip_command = get_pip_command() if not sources: sources = [ {"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True} ] _ensure_dir(CACHE_DIR) pip_args = args pip_args = prepare_pip_source_args(sources, pip_args) pip_options, _ = pip_command.parser.parse_args(pip_args) pip_options.cache_dir = CACHE_DIR return pip_options
<SYSTEM_TASK:> Get a package finder for looking up candidates to install <END_TASK> <USER_TASK:> Description: def get_finder(sources=None, pip_command=None, pip_options=None): # type: (List[Dict[S, Union[S, bool]]], Optional[Command], Any) -> PackageFinder """Get a package finder for looking up candidates to install :param sources: A list of pipfile-formatted sources, defaults to None :param sources: list[dict], optional :param pip_command: A pip command instance, defaults to None :type pip_command: :class:`~pip._internal.cli.base_command.Command` :param pip_options: A pip options, defaults to None :type pip_options: :class:`~pip._internal.cli.cmdoptions` :return: A package finder :rtype: :class:`~pip._internal.index.PackageFinder` """
if not pip_command: pip_command = get_pip_command() if not sources: sources = [ {"url": "https://pypi.org/simple", "name": "pypi", "verify_ssl": True} ] if not pip_options: pip_options = get_pip_options(sources=sources, pip_command=pip_command) session = pip_command._build_session(pip_options) atexit.register(session.close) finder = pip_shims.shims.PackageFinder( find_links=[], index_urls=[s.get("url") for s in sources], trusted_hosts=[], allow_all_prereleases=pip_options.pre, session=session, ) return finder
<SYSTEM_TASK:> Context manager to produce a resolver. <END_TASK> <USER_TASK:> Description: def start_resolver(finder=None, wheel_cache=None): """Context manager to produce a resolver. :param finder: A package finder to use for searching the index :type finder: :class:`~pip._internal.index.PackageFinder` :return: A 3-tuple of finder, preparer, resolver :rtype: (:class:`~pip._internal.operations.prepare.RequirementPreparer`, :class:`~pip._internal.resolve.Resolver`) """
pip_command = get_pip_command() pip_options = get_pip_options(pip_command=pip_command) if not finder: finder = get_finder(pip_command=pip_command, pip_options=pip_options) if not wheel_cache: wheel_cache = WHEEL_CACHE _ensure_dir(fs_str(os.path.join(wheel_cache.cache_dir, "wheels"))) download_dir = PKGS_DOWNLOAD_DIR _ensure_dir(download_dir) _build_dir = create_tracked_tempdir(fs_str("build")) _source_dir = create_tracked_tempdir(fs_str("source")) preparer = partialclass( pip_shims.shims.RequirementPreparer, build_dir=_build_dir, src_dir=_source_dir, download_dir=download_dir, wheel_download_dir=WHEEL_DOWNLOAD_DIR, progress_bar="off", build_isolation=False, ) resolver = partialclass( pip_shims.shims.Resolver, finder=finder, session=finder.session, upgrade_strategy="to-satisfy-only", force_reinstall=True, ignore_dependencies=False, ignore_requires_python=True, ignore_installed=True, isolated=False, wheel_cache=wheel_cache, use_user_site=False, ) try: if packaging.version.parse(pip_shims.shims.pip_version) >= packaging.version.parse('18'): with pip_shims.shims.RequirementTracker() as req_tracker: preparer = preparer(req_tracker=req_tracker) yield resolver(preparer=preparer) else: preparer = preparer() yield resolver(preparer=preparer) finally: finder.session.close()
<SYSTEM_TASK:> Writes the cache to disk as JSON. <END_TASK> <USER_TASK:> Description: def write_cache(self): """Writes the cache to disk as JSON."""
doc = { '__format__': 1, 'dependencies': self._cache, } with open(self._cache_file, 'w') as f: json.dump(doc, f, sort_keys=True)
<SYSTEM_TASK:> Returns a lookup table of reverse dependencies for all the given ireqs. <END_TASK> <USER_TASK:> Description: def reverse_dependencies(self, ireqs): """ Returns a lookup table of reverse dependencies for all the given ireqs. Since this is all static, it only works if the dependency cache contains the complete data, otherwise you end up with a partial view. This is typically no problem if you use this function after the entire dependency tree is resolved. """
ireqs_as_cache_values = [self.as_cache_key(ireq) for ireq in ireqs] return self._reverse_dependencies(ireqs_as_cache_values)
<SYSTEM_TASK:> Returns a lookup table of reverse dependencies for all the given cache keys. <END_TASK> <USER_TASK:> Description: def _reverse_dependencies(self, cache_keys): """ Returns a lookup table of reverse dependencies for all the given cache keys. Example input: [('pep8', '1.5.7'), ('flake8', '2.4.0'), ('mccabe', '0.3'), ('pyflakes', '0.8.1')] Example output: {'pep8': ['flake8'], 'flake8': [], 'mccabe': ['flake8'], 'pyflakes': ['flake8']} """
# First, collect all the dependencies into a sequence of (parent, child) tuples, like [('flake8', 'pep8'), # ('flake8', 'mccabe'), ...] return lookup_table((key_from_req(Requirement(dep_name)), name) for name, version_and_extras in cache_keys for dep_name in self.cache[name][version_and_extras])
<SYSTEM_TASK:> Given a requirement, return its cache key. <END_TASK> <USER_TASK:> Description: def as_cache_key(self, ireq): """Given a requirement, return its cache key. This behavior is a little weird in order to allow backwards compatibility with cache files. For a requirement without extras, this will return, for example:: ("ipython", "2.1.0") For a requirement with extras, the extras will be comma-separated and appended to the version, inside brackets, like so:: ("ipython", "2.1.0[nbconvert,notebook]") """
extras = tuple(sorted(ireq.extras)) if not extras: extras_string = "" else: extras_string = "[{}]".format(",".join(extras)) name = key_from_req(ireq.req) version = get_pinned_version(ireq) return name, "{}{}".format(version, extras_string)
<SYSTEM_TASK:> Decorator which enables locks for decorated function. <END_TASK> <USER_TASK:> Description: def locked(path, timeout=None): """Decorator which enables locks for decorated function. Arguments: - path: path for lockfile. - timeout (optional): Timeout for acquiring lock. Usage: @locked('/var/run/myname', timeout=0) def myname(...): ... """
def decor(func): @functools.wraps(func) def wrapper(*args, **kwargs): lock = FileLock(path, timeout=timeout) lock.acquire() try: return func(*args, **kwargs) finally: lock.release() return wrapper return decor
<SYSTEM_TASK:> Get a TreeWalker class for various types of tree with built-in support <END_TASK> <USER_TASK:> Description: def getTreeWalker(treeType, implementation=None, **kwargs): """Get a TreeWalker class for various types of tree with built-in support :arg str treeType: the name of the tree type required (case-insensitive). Supported values are: * "dom": The xml.dom.minidom DOM implementation * "etree": A generic walker for tree implementations exposing an elementtree-like interface (known to work with ElementTree, cElementTree and lxml.etree). * "lxml": Optimized walker for lxml.etree * "genshi": a Genshi stream :arg implementation: A module implementing the tree type e.g. xml.etree.ElementTree or cElementTree (Currently applies to the "etree" tree type only). :arg kwargs: keyword arguments passed to the etree walker--for other walkers, this has no effect :returns: a TreeWalker class """
treeType = treeType.lower() if treeType not in treeWalkerCache: if treeType == "dom": from . import dom treeWalkerCache[treeType] = dom.TreeWalker elif treeType == "genshi": from . import genshi treeWalkerCache[treeType] = genshi.TreeWalker elif treeType == "lxml": from . import etree_lxml treeWalkerCache[treeType] = etree_lxml.TreeWalker elif treeType == "etree": from . import etree if implementation is None: implementation = default_etree # XXX: NEVER cache here, caching is done in the etree submodule return etree.getETreeModule(implementation, **kwargs).TreeWalker return treeWalkerCache.get(treeType)
<SYSTEM_TASK:> Hide the spinner to allow for custom writing to the terminal. <END_TASK> <USER_TASK:> Description: def hide(self): """Hide the spinner to allow for custom writing to the terminal."""
thr_is_alive = self._spin_thread and self._spin_thread.is_alive() if thr_is_alive and not self._hide_spin.is_set(): # set the hidden spinner flag self._hide_spin.set() # clear the current line sys.stdout.write("\r") self._clear_line() # flush the stdout buffer so the current line can be rewritten to sys.stdout.flush()
<SYSTEM_TASK:> Write text in the terminal without breaking the spinner. <END_TASK> <USER_TASK:> Description: def write(self, text): """Write text in the terminal without breaking the spinner."""
# similar to tqdm.write() # https://pypi.python.org/pypi/tqdm#writing-messages sys.stdout.write("\r") self._clear_line() _text = to_unicode(text) if PY2: _text = _text.encode(ENCODING) # Ensure output is bytes for Py2 and Unicode for Py3 assert isinstance(_text, builtin_str) sys.stdout.write("{0}\n".format(_text))
<SYSTEM_TASK:> Make a copy of the current instance, but with a new rev. <END_TASK> <USER_TASK:> Description: def make_new(self, rev): # type: (str) -> RevOptions """ Make a copy of the current instance, but with a new rev. Args: rev: the name of the revision for the new object. """
return self.vcs.make_rev_options(rev, extra_args=self.extra_args)
<SYSTEM_TASK:> Parse the repository URL to use, and return the URL, revision, <END_TASK> <USER_TASK:> Description: def get_url_rev_and_auth(self, url): # type: (str) -> Tuple[str, Optional[str], AuthInfo] """ Parse the repository URL to use, and return the URL, revision, and auth info to use. Returns: (url, rev, (username, password)). """
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) if '+' not in scheme: raise ValueError( "Sorry, {!r} is a malformed VCS url. " "The format is <vcs>+<protocol>://<url>, " "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url) ) # Remove the vcs prefix. scheme = scheme.split('+', 1)[1] netloc, user_pass = self.get_netloc_and_auth(netloc, scheme) rev = None if '@' in path: path, rev = path.rsplit('@', 1) url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) return url, rev, user_pass
<SYSTEM_TASK:> Compare two repo URLs for identity, ignoring incidental differences. <END_TASK> <USER_TASK:> Description: def compare_urls(self, url1, url2): # type: (str, str) -> bool """ Compare two repo URLs for identity, ignoring incidental differences. """
return (self.normalize_url(url1) == self.normalize_url(url2))
<SYSTEM_TASK:> Install or update in editable mode the package represented by this <END_TASK> <USER_TASK:> Description: def obtain(self, dest): # type: (str) -> None """ Install or update in editable mode the package represented by this VersionControl object. Args: dest: the repository directory in which to install or update. """
url, rev_options = self.get_url_rev_options(self.url) if not os.path.exists(dest): self.fetch_new(dest, url, rev_options) return rev_display = rev_options.to_display() if self.is_repository_directory(dest): existing_url = self.get_remote_url(dest) if self.compare_urls(existing_url, url): logger.debug( '%s in %s exists, and has correct URL (%s)', self.repo_name.title(), display_path(dest), url, ) if not self.is_commit_id_equal(dest, rev_options.rev): logger.info( 'Updating %s %s%s', display_path(dest), self.repo_name, rev_display, ) self.update(dest, url, rev_options) else: logger.info('Skipping because already up-to-date.') return logger.warning( '%s %s in %s exists with URL %s', self.name, self.repo_name, display_path(dest), existing_url, ) prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', ('s', 'i', 'w', 'b')) else: logger.warning( 'Directory %s already exists, and is not a %s %s.', dest, self.name, self.repo_name, ) # https://github.com/python/mypy/issues/1174 prompt = ('(i)gnore, (w)ipe, (b)ackup ', # type: ignore ('i', 'w', 'b')) logger.warning( 'The plan is to install the %s repository %s', self.name, url, ) response = ask_path_exists('What to do? %s' % prompt[0], prompt[1]) if response == 'a': sys.exit(-1) if response == 'w': logger.warning('Deleting %s', display_path(dest)) rmtree(dest) self.fetch_new(dest, url, rev_options) return if response == 'b': dest_dir = backup_dir(dest) logger.warning( 'Backing up %s to %s', display_path(dest), dest_dir, ) shutil.move(dest, dest_dir) self.fetch_new(dest, url, rev_options) return # Do nothing if the response is "i". if response == 's': logger.info( 'Switching %s %s to %s%s', self.repo_name, display_path(dest), url, rev_display, ) self.switch(dest, url, rev_options)
<SYSTEM_TASK:> Run a VCS subcommand <END_TASK> <USER_TASK:> Description: def run_command( cls, cmd, # type: List[str] show_stdout=True, # type: bool cwd=None, # type: Optional[str] on_returncode='raise', # type: str extra_ok_returncodes=None, # type: Optional[Iterable[int]] command_desc=None, # type: Optional[str] extra_environ=None, # type: Optional[Mapping[str, Any]] spinner=None # type: Optional[SpinnerInterface] ): # type: (...) -> Optional[Text] """ Run a VCS subcommand This is simply a wrapper around call_subprocess that adds the VCS command name, and checks that the VCS is available """
cmd = [cls.name] + cmd try: return call_subprocess(cmd, show_stdout, cwd, on_returncode=on_returncode, extra_ok_returncodes=extra_ok_returncodes, command_desc=command_desc, extra_environ=extra_environ, unset_environ=cls.unset_environ, spinner=spinner) except OSError as e: # errno.ENOENT = no such file or directory # In other words, the VCS executable isn't available if e.errno == errno.ENOENT: raise BadCommand( 'Cannot find command %r - do you have ' '%r installed and in your ' 'PATH?' % (cls.name, cls.name)) else: raise
<SYSTEM_TASK:> Return whether a directory path is a repository directory. <END_TASK> <USER_TASK:> Description: def is_repository_directory(cls, path): # type: (str) -> bool """ Return whether a directory path is a repository directory. """
logger.debug('Checking in %s for %s (%s)...', path, cls.dirname, cls.name) return os.path.exists(os.path.join(path, cls.dirname))
<SYSTEM_TASK:> Returns a set containing the paths that need to be renamed. <END_TASK> <USER_TASK:> Description: def compress_for_rename(paths): """Returns a set containing the paths that need to be renamed. This set may include directories when the original sequence of paths included every file on disk. """
case_map = dict((os.path.normcase(p), p) for p in paths) remaining = set(case_map) unchecked = sorted(set(os.path.split(p)[0] for p in case_map.values()), key=len) wildcards = set() def norm_join(*a): return os.path.normcase(os.path.join(*a)) for root in unchecked: if any(os.path.normcase(root).startswith(w) for w in wildcards): # This directory has already been handled. continue all_files = set() all_subdirs = set() for dirname, subdirs, files in os.walk(root): all_subdirs.update(norm_join(root, dirname, d) for d in subdirs) all_files.update(norm_join(root, dirname, f) for f in files) # If all the files we found are in our remaining set of files to # remove, then remove them from the latter set and add a wildcard # for the directory. if not (all_files - remaining): remaining.difference_update(all_files) wildcards.add(root + os.sep) return set(map(case_map.__getitem__, remaining)) | wildcards
<SYSTEM_TASK:> Returns a tuple of 2 sets of which paths to display to user <END_TASK> <USER_TASK:> Description: def compress_for_output_listing(paths): """Returns a tuple of 2 sets of which paths to display to user The first set contains paths that would be deleted. Files of a package are not added and the top-level directory of the package has a '*' added at the end - to signify that all it's contents are removed. The second set contains files that would have been skipped in the above folders. """
will_remove = list(paths) will_skip = set() # Determine folders and files folders = set() files = set() for path in will_remove: if path.endswith(".pyc"): continue if path.endswith("__init__.py") or ".dist-info" in path: folders.add(os.path.dirname(path)) files.add(path) _normcased_files = set(map(os.path.normcase, files)) folders = compact(folders) # This walks the tree using os.walk to not miss extra folders # that might get added. for folder in folders: for dirpath, _, dirfiles in os.walk(folder): for fname in dirfiles: if fname.endswith(".pyc"): continue file_ = os.path.join(dirpath, fname) if (os.path.isfile(file_) and os.path.normcase(file_) not in _normcased_files): # We are skipping this file. Add it to the set. will_skip.add(file_) will_remove = files | { os.path.join(folder, "*") for folder in folders } return will_remove, will_skip
<SYSTEM_TASK:> Stashes a directory. <END_TASK> <USER_TASK:> Description: def _get_directory_stash(self, path): """Stashes a directory. Directories are stashed adjacent to their original location if possible, or else moved/copied into the user's temp dir."""
try: save_dir = AdjacentTempDirectory(path) save_dir.create() except OSError: save_dir = TempDirectory(kind="uninstall") save_dir.create() self._save_dirs[os.path.normcase(path)] = save_dir return save_dir.path
<SYSTEM_TASK:> Stashes a file. <END_TASK> <USER_TASK:> Description: def _get_file_stash(self, path): """Stashes a file. If no root has been provided, one will be created for the directory in the user's temp directory."""
path = os.path.normcase(path) head, old_head = os.path.dirname(path), None save_dir = None while head != old_head: try: save_dir = self._save_dirs[head] break except KeyError: pass head, old_head = os.path.dirname(head), head else: # Did not find any suitable root head = os.path.dirname(path) save_dir = TempDirectory(kind='uninstall') save_dir.create() self._save_dirs[head] = save_dir relpath = os.path.relpath(path, head) if relpath and relpath != os.path.curdir: return os.path.join(save_dir.path, relpath) return save_dir.path
<SYSTEM_TASK:> Stashes the directory or file and returns its new location. <END_TASK> <USER_TASK:> Description: def stash(self, path): """Stashes the directory or file and returns its new location. """
if os.path.isdir(path): new_path = self._get_directory_stash(path) else: new_path = self._get_file_stash(path) self._moves.append((path, new_path)) if os.path.isdir(path) and os.path.isdir(new_path): # If we're moving a directory, we need to # remove the destination first or else it will be # moved to inside the existing directory. # We just created new_path ourselves, so it will # be removable. os.rmdir(new_path) renames(path, new_path) return new_path
<SYSTEM_TASK:> Commits the uninstall by removing stashed files. <END_TASK> <USER_TASK:> Description: def commit(self): """Commits the uninstall by removing stashed files."""
for _, save_dir in self._save_dirs.items(): save_dir.cleanup() self._moves = [] self._save_dirs = {}
<SYSTEM_TASK:> Undoes the uninstall by moving stashed files back. <END_TASK> <USER_TASK:> Description: def rollback(self): """Undoes the uninstall by moving stashed files back."""
for p in self._moves: logging.info("Moving to %s\n from %s", *p) for new_path, path in self._moves: try: logger.debug('Replacing %s from %s', new_path, path) if os.path.isfile(new_path): os.unlink(new_path) elif os.path.isdir(new_path): rmtree(new_path) renames(path, new_path) except OSError as ex: logger.error("Failed to restore %s", new_path) logger.debug("Exception: %s", ex) self.commit()
<SYSTEM_TASK:> Display which files would be deleted and prompt for confirmation <END_TASK> <USER_TASK:> Description: def _allowed_to_proceed(self, verbose): """Display which files would be deleted and prompt for confirmation """
def _display(msg, paths): if not paths: return logger.info(msg) with indent_log(): for path in sorted(compact(paths)): logger.info(path) if not verbose: will_remove, will_skip = compress_for_output_listing(self.paths) else: # In verbose mode, display all the files that are going to be # deleted. will_remove = list(self.paths) will_skip = set() _display('Would remove:', will_remove) _display('Would not remove (might be manually added):', will_skip) _display('Would not remove (outside of prefix):', self._refuse) if verbose: _display('Will actually move:', compress_for_rename(self.paths)) return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
<SYSTEM_TASK:> Checks that path is an executable regular file, or a symlink towards one. <END_TASK> <USER_TASK:> Description: def is_executable_file(path): """Checks that path is an executable regular file, or a symlink towards one. This is roughly ``os.path isfile(path) and os.access(path, os.X_OK)``. """
# follow symlinks, fpath = os.path.realpath(path) if not os.path.isfile(fpath): # non-files (directories, fifo, etc.) return False mode = os.stat(fpath).st_mode if (sys.platform.startswith('sunos') and os.getuid() == 0): # When root on Solaris, os.X_OK is True for *all* files, irregardless # of their executability -- instead, any permission bit of any user, # group, or other is fine enough. # # (This may be true for other "Unix98" OS's such as HP-UX and AIX) return bool(mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)) return os.access(fpath, os.X_OK)
<SYSTEM_TASK:> Try to suggest a semantic form for a version for which <END_TASK> <USER_TASK:> Description: def _suggest_semantic_version(s): """ Try to suggest a semantic form for a version for which _suggest_normalized_version couldn't come up with anything. """
result = s.strip().lower() for pat, repl in _REPLACEMENTS: result = pat.sub(repl, result) if not result: result = '0.0.0' # Now look for numeric prefix, and separate it out from # the rest. #import pdb; pdb.set_trace() m = _NUMERIC_PREFIX.match(result) if not m: prefix = '0.0.0' suffix = result else: prefix = m.groups()[0].split('.') prefix = [int(i) for i in prefix] while len(prefix) < 3: prefix.append(0) if len(prefix) == 3: suffix = result[m.end():] else: suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] prefix = prefix[:3] prefix = '.'.join([str(i) for i in prefix]) suffix = suffix.strip() if suffix: #import pdb; pdb.set_trace() # massage the suffix. for pat, repl in _SUFFIX_REPLACEMENTS: suffix = pat.sub(repl, suffix) if not suffix: result = prefix else: sep = '-' if 'dev' in suffix else '+' result = prefix + sep + suffix if not is_semver(result): result = None return result
<SYSTEM_TASK:> Suggest a normalized version close to the given version string. <END_TASK> <USER_TASK:> Description: def _suggest_normalized_version(s): """Suggest a normalized version close to the given version string. If you have a version string that isn't rational (i.e. NormalizedVersion doesn't like it) then you might be able to get an equivalent (or close) rational version from this function. This does a number of simple normalizations to the given string, based on observation of versions currently in use on PyPI. Given a dump of those version during PyCon 2009, 4287 of them: - 2312 (53.93%) match NormalizedVersion without change with the automatic suggestion - 3474 (81.04%) match when using this suggestion method @param s {str} An irrational version string. @returns A rational version string, or None, if couldn't determine one. """
try: _normalized_key(s) return s # already rational except UnsupportedVersionError: pass rs = s.lower() # part of this could use maketrans for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), ('beta', 'b'), ('rc', 'c'), ('-final', ''), ('-pre', 'c'), ('-release', ''), ('.release', ''), ('-stable', ''), ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), ('final', '')): rs = rs.replace(orig, repl) # if something ends with dev or pre, we add a 0 rs = re.sub(r"pre$", r"pre0", rs) rs = re.sub(r"dev$", r"dev0", rs) # if we have something like "b-2" or "a.2" at the end of the # version, that is probably beta, alpha, etc # let's remove the dash or dot rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) # 1.0-dev-r371 -> 1.0.dev371 # 0.1-dev-r79 -> 0.1.dev79 rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) # Clean: v0.3, v1.0 if rs.startswith('v'): rs = rs[1:] # Clean leading '0's on numbers. #TODO: unintended side-effect on, e.g., "2003.05.09" # PyPI stats: 77 (~2%) better rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers # zero. # PyPI stats: 245 (7.56%) better rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) # the 'dev-rNNN' tag is a dev tag rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) # clean the - when used as a pre delimiter rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) # a terminal "dev" or "devel" can be changed into ".dev0" rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) # a terminal "dev" can be changed into ".dev0" rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) # a terminal "final" or "stable" can be removed rs = re.sub(r"(final|stable)$", "", rs) # The 'r' and the '-' tags are post release tags # 0.4a1.r10 -> 0.4a1.post10 # 0.9.33-17222 -> 0.9.33.post17222 # 0.9.33-r17222 -> 0.9.33.post17222 rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) # Clean 'r' instead of 'dev' usage: # 0.9.33+r17222 -> 0.9.33.dev17222 # 1.0dev123 -> 1.0.dev123 # 1.0.git123 -> 1.0.dev123 # 1.0.bzr123 -> 1.0.dev123 # 0.1a0dev.123 -> 0.1a0.dev123 # PyPI stats: ~150 (~4%) better rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: # 0.2.pre1 -> 0.2c1 # 0.2-c1 -> 0.2c1 # 1.0preview123 -> 1.0c123 # PyPI stats: ~21 (0.62%) better rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) # Tcl/Tk uses "px" for their post release markers rs = re.sub(r"p(\d+)$", r".post\1", rs) try: _normalized_key(rs) except UnsupportedVersionError: rs = None return rs
<SYSTEM_TASK:> Check if the provided version matches the constraints. <END_TASK> <USER_TASK:> Description: def match(self, version): """ Check if the provided version matches the constraints. :param version: The version to match against this instance. :type version: String or :class:`Version` instance. """
if isinstance(version, string_types): version = self.version_class(version) for operator, constraint, prefix in self._parts: f = self._operators.get(operator) if isinstance(f, string_types): f = getattr(self, f) if not f: msg = ('%r not implemented ' 'for %s' % (operator, self.__class__.__name__)) raise NotImplementedError(msg) if not f(version, constraint, prefix): return False return True
<SYSTEM_TASK:> Yield directories starting from the given directory up to the root <END_TASK> <USER_TASK:> Description: def _walk_to_root(path): """ Yield directories starting from the given directory up to the root """
if not os.path.exists(path): raise IOError('Starting path not found') if os.path.isfile(path): path = os.path.dirname(path) last_dir = None current_dir = os.path.abspath(path) while last_dir != current_dir: yield current_dir parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) last_dir, current_dir = current_dir, parent_dir
<SYSTEM_TASK:> Run command in sub process. <END_TASK> <USER_TASK:> Description: def run_command(command, env): """Run command in sub process. Runs the command in a sub process with the variables from `env` added in the current environment variables. Parameters ---------- command: List[str] The command and it's parameters env: Dict The additional environment variables Returns ------- int The return code of the command """
# copy the current environment variables and add the vales from # `env` cmd_env = os.environ.copy() cmd_env.update(env) p = Popen(command, universal_newlines=True, bufsize=0, shell=False, env=cmd_env) _, _ = p.communicate() return p.returncode
<SYSTEM_TASK:> Load the current dotenv as system environemt variable. <END_TASK> <USER_TASK:> Description: def set_as_environment_variables(self, override=False): """ Load the current dotenv as system environemt variable. """
for k, v in self.dict().items(): if k in os.environ and not override: continue # With Python2 on Windows, force environment variables to str to avoid # "TypeError: environment can only contain strings" in Python's subprocess.py. if PY2 and WIN: if isinstance(k, text_type) or isinstance(v, text_type): k = k.encode('ascii') v = v.encode('ascii') os.environ[k] = v return True
<SYSTEM_TASK:> Verifies that PyOpenSSL's package-level dependencies have been met. <END_TASK> <USER_TASK:> Description: def _validate_dependencies_met(): """ Verifies that PyOpenSSL's package-level dependencies have been met. Throws `ImportError` if they are not met. """
# Method added in `cryptography==1.1`; not available in older versions from cryptography.x509.extensions import Extensions if getattr(Extensions, "get_extension_for_class", None) is None: raise ImportError("'cryptography' module missing required functionality. " "Try upgrading to v1.3.4 or newer.") # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 # attribute is only present on those versions. from OpenSSL.crypto import X509 x509 = X509() if getattr(x509, "_x509", None) is None: raise ImportError("'pyOpenSSL' module missing required functionality. " "Try upgrading to v0.14 or newer.")
<SYSTEM_TASK:> Given an PyOpenSSL certificate, provides all the subject alternative names. <END_TASK> <USER_TASK:> Description: def get_subj_alt_name(peer_cert): """ Given an PyOpenSSL certificate, provides all the subject alternative names. """
# Pass the cert to cryptography, which has much better APIs for this. if hasattr(peer_cert, "to_cryptography"): cert = peer_cert.to_cryptography() else: # This is technically using private APIs, but should work across all # relevant versions before PyOpenSSL got a proper API for this. cert = _Certificate(openssl_backend, peer_cert._x509) # We want to find the SAN extension. Ask Cryptography to locate it (it's # faster than looping in Python) try: ext = cert.extensions.get_extension_for_class( x509.SubjectAlternativeName ).value except x509.ExtensionNotFound: # No such extension, return the empty list. return [] except (x509.DuplicateExtension, UnsupportedExtension, x509.UnsupportedGeneralNameType, UnicodeError) as e: # A problem has been found with the quality of the certificate. Assume # no SAN field is present. log.warning( "A problem was encountered with the certificate that prevented " "urllib3 from finding the SubjectAlternativeName field. This can " "affect certificate validation. The error was %s", e, ) return [] # We want to return dNSName and iPAddress fields. We need to cast the IPs # back to strings because the match_hostname function wants them as # strings. # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 # decoded. This is pretty frustrating, but that's what the standard library # does with certificates, and so we need to attempt to do the same. # We also want to skip over names which cannot be idna encoded. names = [ ('DNS', name) for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) if name is not None ] names.extend( ('IP Address', str(name)) for name in ext.get_values_for_type(x509.IPAddress) ) return names
<SYSTEM_TASK:> Take an input command and run it, handling exceptions and error codes and returning <END_TASK> <USER_TASK:> Description: def run_command(cmd, *args, **kwargs): """ Take an input command and run it, handling exceptions and error codes and returning its stdout and stderr. :param cmd: The list of command and arguments. :type cmd: list :returns: A 2-tuple of the output and error from the command :rtype: Tuple[str, str] :raises: exceptions.PipenvCmdError """
from pipenv.vendor import delegator from ._compat import decode_for_output from .cmdparse import Script catch_exceptions = kwargs.pop("catch_exceptions", True) if isinstance(cmd, (six.string_types, list, tuple)): cmd = Script.parse(cmd) if not isinstance(cmd, Script): raise TypeError("Command input must be a string, list or tuple") if "env" not in kwargs: kwargs["env"] = os.environ.copy() kwargs["env"]["PYTHONIOENCODING"] = "UTF-8" try: cmd_string = cmd.cmdify() except TypeError: click_echo("Error turning command into string: {0}".format(cmd), err=True) sys.exit(1) if environments.is_verbose(): click_echo("Running command: $ {0}".format(cmd_string, err=True)) c = delegator.run(cmd_string, *args, **kwargs) return_code = c.return_code if environments.is_verbose(): click_echo("Command output: {0}".format( crayons.blue(decode_for_output(c.out)) ), err=True) if not c.ok and catch_exceptions: raise PipenvCmdError(cmd_string, c.out, c.err, return_code) return c
<SYSTEM_TASK:> Converts a Pipfile-formatted dependency to a pip-formatted one. <END_TASK> <USER_TASK:> Description: def convert_deps_to_pip(deps, project=None, r=True, include_index=True): """"Converts a Pipfile-formatted dependency to a pip-formatted one."""
from .vendor.requirementslib.models.requirements import Requirement dependencies = [] for dep_name, dep in deps.items(): if project: project.clear_pipfile_cache() indexes = getattr(project, "pipfile_sources", []) if project is not None else [] new_dep = Requirement.from_pipfile(dep_name, dep) if new_dep.index: include_index = True req = new_dep.as_line(sources=indexes if include_index else None).strip() dependencies.append(req) if not r: return dependencies # Write requirements.txt to tmp directory. from .vendor.vistir.path import create_tracked_tempfile f = create_tracked_tempfile(suffix="-requirements.txt", delete=False) f.write("\n".join(dependencies).encode("utf-8")) f.close() return f.name
<SYSTEM_TASK:> Check to see if there's a hard requirement for version <END_TASK> <USER_TASK:> Description: def is_required_version(version, specified_version): """Check to see if there's a hard requirement for version number provided in the Pipfile. """
# Certain packages may be defined with multiple values. if isinstance(specified_version, dict): specified_version = specified_version.get("version", "") if specified_version.startswith("=="): return version.strip() == specified_version.split("==")[1].strip() return True
<SYSTEM_TASK:> Determine if a package name is for a File dependency. <END_TASK> <USER_TASK:> Description: def is_file(package): """Determine if a package name is for a File dependency."""
if hasattr(package, "keys"): return any(key for key in package.keys() if key in ["file", "path"]) if os.path.exists(str(package)): return True for start in SCHEME_LIST: if str(package).startswith(start): return True return False
<SYSTEM_TASK:> Normalize package name to PEP 423 style standard. <END_TASK> <USER_TASK:> Description: def pep423_name(name): """Normalize package name to PEP 423 style standard."""
name = name.lower() if any(i not in name for i in (VCS_LIST + SCHEME_LIST)): return name.replace("_", "-") else: return name
<SYSTEM_TASK:> Properly case project name from pypi.org. <END_TASK> <USER_TASK:> Description: def proper_case(package_name): """Properly case project name from pypi.org."""
# Hit the simple API. r = _get_requests_session().get( "https://pypi.org/pypi/{0}/json".format(package_name), timeout=0.3, stream=True ) if not r.ok: raise IOError( "Unable to find package {0} in PyPI repository.".format(package_name) ) r = parse.parse("https://pypi.org/pypi/{name}/json", r.url) good_name = r["name"] return good_name
<SYSTEM_TASK:> Given an executable name, search the given location for an executable <END_TASK> <USER_TASK:> Description: def find_windows_executable(bin_path, exe_name): """Given an executable name, search the given location for an executable"""
requested_path = get_windows_path(bin_path, exe_name) if os.path.isfile(requested_path): return requested_path try: pathext = os.environ["PATHEXT"] except KeyError: pass else: for ext in pathext.split(os.pathsep): path = get_windows_path(bin_path, exe_name + ext.strip().lower()) if os.path.isfile(path): return path return find_executable(exe_name)
<SYSTEM_TASK:> Canonicalize a list of packages and return a set of canonical names <END_TASK> <USER_TASK:> Description: def get_canonical_names(packages): """Canonicalize a list of packages and return a set of canonical names"""
from .vendor.packaging.utils import canonicalize_name if not isinstance(packages, Sequence): if not isinstance(packages, six.string_types): return packages packages = [packages] return set([canonicalize_name(pkg) for pkg in packages if pkg])
<SYSTEM_TASK:> Downloads file from url to a path with filename <END_TASK> <USER_TASK:> Description: def download_file(url, filename): """Downloads file from url to a path with filename"""
r = _get_requests_session().get(url, stream=True) if not r.ok: raise IOError("Unable to download file") with open(filename, "wb") as f: f.write(r.content)
<SYSTEM_TASK:> Normalize drive in path so they stay consistent. <END_TASK> <USER_TASK:> Description: def normalize_drive(path): """Normalize drive in path so they stay consistent. This currently only affects local drives on Windows, which can be identified with either upper or lower cased drive names. The case is always converted to uppercase because it seems to be preferred. See: <https://github.com/pypa/pipenv/issues/1218> """
if os.name != "nt" or not isinstance(path, six.string_types): return path drive, tail = os.path.splitdrive(path) # Only match (lower cased) local drives (e.g. 'c:'), not UNC mounts. if drive.islower() and len(drive) == 2 and drive[1] == ":": return "{}{}".format(drive.upper(), tail) return path
<SYSTEM_TASK:> Call os.path.expandvars if value is a string, otherwise do nothing. <END_TASK> <USER_TASK:> Description: def safe_expandvars(value): """Call os.path.expandvars if value is a string, otherwise do nothing. """
if isinstance(value, six.string_types): return os.path.expandvars(value) return value
<SYSTEM_TASK:> Take a pipfile entry and normalize its markers <END_TASK> <USER_TASK:> Description: def translate_markers(pipfile_entry): """Take a pipfile entry and normalize its markers Provide a pipfile entry which may have 'markers' as a key or it may have any valid key from `packaging.markers.marker_context.keys()` and standardize the format into {'markers': 'key == "some_value"'}. :param pipfile_entry: A dictionariy of keys and values representing a pipfile entry :type pipfile_entry: dict :returns: A normalized dictionary with cleaned marker entries """
if not isinstance(pipfile_entry, Mapping): raise TypeError("Entry is not a pipfile formatted mapping.") from .vendor.distlib.markers import DEFAULT_CONTEXT as marker_context from .vendor.packaging.markers import Marker from .vendor.vistir.misc import dedup allowed_marker_keys = ["markers"] + [k for k in marker_context.keys()] provided_keys = list(pipfile_entry.keys()) if hasattr(pipfile_entry, "keys") else [] pipfile_markers = [k for k in provided_keys if k in allowed_marker_keys] new_pipfile = dict(pipfile_entry).copy() marker_set = set() if "markers" in new_pipfile: marker = str(Marker(new_pipfile.pop("markers"))) if 'extra' not in marker: marker_set.add(marker) for m in pipfile_markers: entry = "{0}".format(pipfile_entry[m]) if m != "markers": marker_set.add(str(Marker("{0}{1}".format(m, entry)))) new_pipfile.pop(m) if marker_set: new_pipfile["markers"] = str(Marker(" or ".join( "{0}".format(s) if " and " in s else s for s in sorted(dedup(marker_set)) ))).replace('"', "'") return new_pipfile
<SYSTEM_TASK:> Check if a given path is a virtual environment's root. <END_TASK> <USER_TASK:> Description: def is_virtual_environment(path): """Check if a given path is a virtual environment's root. This is done by checking if the directory contains a Python executable in its bin/Scripts directory. Not technically correct, but good enough for general usage. """
if not path.is_dir(): return False for bindir_name in ('bin', 'Scripts'): for python in path.joinpath(bindir_name).glob('python*'): try: exeness = python.is_file() and os.access(str(python), os.X_OK) except OSError: exeness = False if exeness: return True return False
<SYSTEM_TASK:> Compare two urls by scheme, host, and path, ignoring auth <END_TASK> <USER_TASK:> Description: def is_url_equal(url, other_url): # type: (str, str) -> bool """ Compare two urls by scheme, host, and path, ignoring auth :param str url: The initial URL to compare :param str url: Second url to compare to the first :return: Whether the URLs are equal without **auth**, **query**, and **fragment** :rtype: bool >>> is_url_equal("https://user:[email protected]/some/path?some_query", "https://user2:[email protected]/some/path") True >>> is_url_equal("https://user:[email protected]/some/path?some_query", "https://mydomain.com/some?some_query") False """
if not isinstance(url, six.string_types): raise TypeError("Expected string for url, received {0!r}".format(url)) if not isinstance(other_url, six.string_types): raise TypeError("Expected string for url, received {0!r}".format(other_url)) parsed_url = urllib3_util.parse_url(url) parsed_other_url = urllib3_util.parse_url(other_url) unparsed = parsed_url._replace(auth=None, query=None, fragment=None).url unparsed_other = parsed_other_url._replace(auth=None, query=None, fragment=None).url return unparsed == unparsed_other
<SYSTEM_TASK:> Given a `pythonfinder.Finder` instance and an optional line, find a corresponding python <END_TASK> <USER_TASK:> Description: def find_python(finder, line=None): """ Given a `pythonfinder.Finder` instance and an optional line, find a corresponding python :param finder: A :class:`pythonfinder.Finder` instance to use for searching :type finder: :class:pythonfinder.Finder` :param str line: A version, path, name, or nothing, defaults to None :return: A path to python :rtype: str """
if line and not isinstance(line, six.string_types): raise TypeError( "Invalid python search type: expected string, received {0!r}".format(line) ) if line and os.path.isabs(line): if os.name == "nt": line = posixpath.join(*line.split(os.path.sep)) return line if not finder: from pipenv.vendor.pythonfinder import Finder finder = Finder(global_search=True) if not line: result = next(iter(finder.find_all_python_versions()), None) elif line and line[0].isdigit() or re.match(r'[\d\.]+', line): result = finder.find_python_version(line) else: result = finder.find_python_version(name=line) if not result: result = finder.which(line) if not result and not line.startswith("python"): line = "python{0}".format(line) result = find_python(finder, line) if not result: result = next(iter(finder.find_all_python_versions()), None) if result: if not isinstance(result, six.string_types): return result.path.as_posix() return result return
<SYSTEM_TASK:> Given an input, checks whether the input is a request for python or notself. <END_TASK> <USER_TASK:> Description: def is_python_command(line): """ Given an input, checks whether the input is a request for python or notself. This can be a version, a python runtime name, or a generic 'python' or 'pythonX.Y' :param str line: A potential request to find python :returns: Whether the line is a python lookup :rtype: bool """
if not isinstance(line, six.string_types): raise TypeError("Not a valid command to check: {0!r}".format(line)) from pipenv.vendor.pythonfinder.utils import PYTHON_IMPLEMENTATIONS is_version = re.match(r'[\d\.]+', line) if (line.startswith("python") or is_version or any(line.startswith(v) for v in PYTHON_IMPLEMENTATIONS)): return True # we are less sure about this but we can guess if line.startswith("py"): return True return False
<SYSTEM_TASK:> Select a way to obtain process information from the system. <END_TASK> <USER_TASK:> Description: def _get_process_mapping(): """Select a way to obtain process information from the system. * `/proc` is used if supported. * The system `ps` utility is used as a fallback option. """
for impl in (proc, ps): try: mapping = impl.get_process_mapping() except EnvironmentError: continue return mapping raise ShellDetectionFailure('compatible proc fs or ps utility is required')
<SYSTEM_TASK:> Form shell information from the SHELL environment variable if possible. <END_TASK> <USER_TASK:> Description: def _get_login_shell(proc_cmd): """Form shell information from the SHELL environment variable if possible. """
login_shell = os.environ.get('SHELL', '') if login_shell: proc_cmd = login_shell else: proc_cmd = proc_cmd[1:] return (os.path.basename(proc_cmd).lower(), proc_cmd)
<SYSTEM_TASK:> Initializes a urllib3 PoolManager. <END_TASK> <USER_TASK:> Description: def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): """Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. """
# save these values for pickling self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs)
<SYSTEM_TASK:> Disposes of any internal state. <END_TASK> <USER_TASK:> Description: def close(self): """Disposes of any internal state. Currently, this closes the PoolManager and any active ProxyManager, which closes any pooled connections. """
self.poolmanager.clear() for proxy in self.proxy_manager.values(): proxy.clear()
<SYSTEM_TASK:> Cleans VCS uris from pipenv.patched.notpip format <END_TASK> <USER_TASK:> Description: def add_ssh_scheme_to_git_uri(uri): # type: (S) -> S """Cleans VCS uris from pipenv.patched.notpip format"""
if isinstance(uri, six.string_types): # Add scheme for parsing purposes, this is also what pip does if uri.startswith("git+") and "://" not in uri: uri = uri.replace("git+", "git+ssh://", 1) parsed = urlparse(uri) if ":" in parsed.netloc: netloc, _, path_start = parsed.netloc.rpartition(":") path = "/{0}{1}".format(path_start, parsed.path) uri = urlunparse(parsed._replace(netloc=netloc, path=path)) return uri
<SYSTEM_TASK:> Determine if dictionary entry from Pipfile is for a vcs dependency. <END_TASK> <USER_TASK:> Description: def is_vcs(pipfile_entry): # type: (PipfileType) -> bool """Determine if dictionary entry from Pipfile is for a vcs dependency."""
if isinstance(pipfile_entry, Mapping): return any(key for key in pipfile_entry.keys() if key in VCS_LIST) elif isinstance(pipfile_entry, six.string_types): if not is_valid_url(pipfile_entry) and pipfile_entry.startswith("git+"): pipfile_entry = add_ssh_scheme_to_git_uri(pipfile_entry) parsed_entry = urlsplit(pipfile_entry) return parsed_entry.scheme in VCS_SCHEMES return False
<SYSTEM_TASK:> Splits on multiple given separators. <END_TASK> <USER_TASK:> Description: def multi_split(s, split): # type: (S, Iterable[S]) -> List[S] """Splits on multiple given separators."""
for r in split: s = s.replace(r, "|") return [i for i in s.split("|") if len(i) > 0]
<SYSTEM_TASK:> Takes a chunk of a document and feeds it through all of the relevant <END_TASK> <USER_TASK:> Description: def feed(self, byte_str): """ Takes a chunk of a document and feeds it through all of the relevant charset probers. After calling ``feed``, you can check the value of the ``done`` attribute to see if you need to continue feeding the ``UniversalDetector`` more data, or if it has made a prediction (in the ``result`` attribute). .. note:: You should always call ``close`` when you're done feeding in your document if ``done`` is not already ``True``. """
if self.done: return if not len(byte_str): return if not isinstance(byte_str, bytearray): byte_str = bytearray(byte_str) # First check for known BOMs, since these are guaranteed to be correct if not self._got_data: # If the data starts with BOM, we know it is UTF if byte_str.startswith(codecs.BOM_UTF8): # EF BB BF UTF-8 with BOM self.result = {'encoding': "UTF-8-SIG", 'confidence': 1.0, 'language': ''} elif byte_str.startswith((codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE)): # FF FE 00 00 UTF-32, little-endian BOM # 00 00 FE FF UTF-32, big-endian BOM self.result = {'encoding': "UTF-32", 'confidence': 1.0, 'language': ''} elif byte_str.startswith(b'\xFE\xFF\x00\x00'): # FE FF 00 00 UCS-4, unusual octet order BOM (3412) self.result = {'encoding': "X-ISO-10646-UCS-4-3412", 'confidence': 1.0, 'language': ''} elif byte_str.startswith(b'\x00\x00\xFF\xFE'): # 00 00 FF FE UCS-4, unusual octet order BOM (2143) self.result = {'encoding': "X-ISO-10646-UCS-4-2143", 'confidence': 1.0, 'language': ''} elif byte_str.startswith((codecs.BOM_LE, codecs.BOM_BE)): # FF FE UTF-16, little endian BOM # FE FF UTF-16, big endian BOM self.result = {'encoding': "UTF-16", 'confidence': 1.0, 'language': ''} self._got_data = True if self.result['encoding'] is not None: self.done = True return # If none of those matched and we've only see ASCII so far, check # for high bytes and escape sequences if self._input_state == InputState.PURE_ASCII: if self.HIGH_BYTE_DETECTOR.search(byte_str): self._input_state = InputState.HIGH_BYTE elif self._input_state == InputState.PURE_ASCII and \ self.ESC_DETECTOR.search(self._last_char + byte_str): self._input_state = InputState.ESC_ASCII self._last_char = byte_str[-1:] # If we've seen escape sequences, use the EscCharSetProber, which # uses a simple state machine to check for known escape sequences in # HZ and ISO-2022 encodings, since those are the only encodings that # use such sequences. if self._input_state == InputState.ESC_ASCII: if not self._esc_charset_prober: self._esc_charset_prober = EscCharSetProber(self.lang_filter) if self._esc_charset_prober.feed(byte_str) == ProbingState.FOUND_IT: self.result = {'encoding': self._esc_charset_prober.charset_name, 'confidence': self._esc_charset_prober.get_confidence(), 'language': self._esc_charset_prober.language} self.done = True # If we've seen high bytes (i.e., those with values greater than 127), # we need to do more complicated checks using all our multi-byte and # single-byte probers that are left. The single-byte probers # use character bigram distributions to determine the encoding, whereas # the multi-byte probers use a combination of character unigram and # bigram distributions. elif self._input_state == InputState.HIGH_BYTE: if not self._charset_probers: self._charset_probers = [MBCSGroupProber(self.lang_filter)] # If we're checking non-CJK encodings, use single-byte prober if self.lang_filter & LanguageFilter.NON_CJK: self._charset_probers.append(SBCSGroupProber()) self._charset_probers.append(Latin1Prober()) for prober in self._charset_probers: if prober.feed(byte_str) == ProbingState.FOUND_IT: self.result = {'encoding': prober.charset_name, 'confidence': prober.get_confidence(), 'language': prober.language} self.done = True break if self.WIN_BYTE_DETECTOR.search(byte_str): self._has_win_bytes = True
<SYSTEM_TASK:> Stop analyzing the current document and come up with a final <END_TASK> <USER_TASK:> Description: def close(self): """ Stop analyzing the current document and come up with a final prediction. :returns: The ``result`` attribute, a ``dict`` with the keys `encoding`, `confidence`, and `language`. """
# Don't bother with checks if we're already done if self.done: return self.result self.done = True if not self._got_data: self.logger.debug('no data received!') # Default to ASCII if it is all we've seen so far elif self._input_state == InputState.PURE_ASCII: self.result = {'encoding': 'ascii', 'confidence': 1.0, 'language': ''} # If we have seen non-ASCII, return the best that met MINIMUM_THRESHOLD elif self._input_state == InputState.HIGH_BYTE: prober_confidence = None max_prober_confidence = 0.0 max_prober = None for prober in self._charset_probers: if not prober: continue prober_confidence = prober.get_confidence() if prober_confidence > max_prober_confidence: max_prober_confidence = prober_confidence max_prober = prober if max_prober and (max_prober_confidence > self.MINIMUM_THRESHOLD): charset_name = max_prober.charset_name lower_charset_name = max_prober.charset_name.lower() confidence = max_prober.get_confidence() # Use Windows encoding name instead of ISO-8859 if we saw any # extra Windows-specific bytes if lower_charset_name.startswith('iso-8859'): if self._has_win_bytes: charset_name = self.ISO_WIN_MAP.get(lower_charset_name, charset_name) self.result = {'encoding': charset_name, 'confidence': confidence, 'language': max_prober.language} # Log all prober confidences if none met MINIMUM_THRESHOLD if self.logger.getEffectiveLevel() == logging.DEBUG: if self.result['encoding'] is None: self.logger.debug('no probers hit minimum threshold') for group_prober in self._charset_probers: if not group_prober: continue if isinstance(group_prober, CharSetGroupProber): for prober in group_prober.probers: self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, prober.get_confidence()) else: self.logger.debug('%s %s confidence = %s', prober.charset_name, prober.language, prober.get_confidence()) return self.result
<SYSTEM_TASK:> Send a command to the REPL, wait for and return output. <END_TASK> <USER_TASK:> Description: def run_command(self, command, timeout=-1): """Send a command to the REPL, wait for and return output. :param str command: The command to send. Trailing newlines are not needed. This should be a complete block of input that will trigger execution; if a continuation prompt is found after sending input, :exc:`ValueError` will be raised. :param int timeout: How long to wait for the next prompt. -1 means the default from the :class:`pexpect.spawn` object (default 30 seconds). None means to wait indefinitely. """
# Split up multiline commands and feed them in bit-by-bit cmdlines = command.splitlines() # splitlines ignores trailing newlines - add it back in manually if command.endswith('\n'): cmdlines.append('') if not cmdlines: raise ValueError("No command was given") res = [] self.child.sendline(cmdlines[0]) for line in cmdlines[1:]: self._expect_prompt(timeout=timeout) res.append(self.child.before) self.child.sendline(line) # Command was fully submitted, now wait for the next prompt if self._expect_prompt(timeout=timeout) == 1: # We got the continuation prompt - command was incomplete self.child.kill(signal.SIGINT) self._expect_prompt(timeout=1) raise ValueError("Continuation prompt found - input was incomplete:\n" + command) return u''.join(res + [self.child.before])
<SYSTEM_TASK:> Wraps a function so that it swallows exceptions. <END_TASK> <USER_TASK:> Description: def safecall(func): """Wraps a function so that it swallows exceptions."""
def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except Exception: pass return wrapper
<SYSTEM_TASK:> Return a condensed version of help string. <END_TASK> <USER_TASK:> Description: def make_default_short_help(help, max_length=45): """Return a condensed version of help string."""
words = help.split() total_length = 0 result = [] done = False for word in words: if word[-1:] == '.': done = True new_length = result and 1 + len(word) or len(word) if total_length + new_length > max_length: result.append('...') done = True else: if result: result.append(' ') result.append(word) if done: break total_length += new_length return ''.join(result)
<SYSTEM_TASK:> Returns a system stream for byte processing. This essentially <END_TASK> <USER_TASK:> Description: def get_binary_stream(name): """Returns a system stream for byte processing. This essentially returns the stream from the sys module with the given name but it solves some compatibility issues between different Python versions. Primarily this function is necessary for getting binary streams on Python 3. :param name: the name of the stream to open. Valid names are ``'stdin'``, ``'stdout'`` and ``'stderr'`` """
opener = binary_streams.get(name) if opener is None: raise TypeError('Unknown standard stream %r' % name) return opener()
<SYSTEM_TASK:> Formats a filename for user display. The main purpose of this <END_TASK> <USER_TASK:> Description: def format_filename(filename, shorten=False): """Formats a filename for user display. The main purpose of this function is to ensure that the filename can be displayed at all. This will decode the filename to unicode if necessary in a way that it will not fail. Optionally, it can shorten the filename to not include the full path to the filename. :param filename: formats a filename for UI display. This will also convert the filename into unicode without failing. :param shorten: this optionally shortens the filename to strip of the path that leads up to it. """
if shorten: filename = os.path.basename(filename) return filename_to_ui(filename)
<SYSTEM_TASK:> r"""Returns the config folder for the application. The default behavior <END_TASK> <USER_TASK:> Description: def get_app_dir(app_name, roaming=True, force_posix=False): r"""Returns the config folder for the application. The default behavior is to return whatever is most appropriate for the operating system. To give you an idea, for an app called ``"Foo Bar"``, something like the following folders could be returned: Mac OS X: ``~/Library/Application Support/Foo Bar`` Mac OS X (POSIX): ``~/.foo-bar`` Unix: ``~/.config/foo-bar`` Unix (POSIX): ``~/.foo-bar`` Win XP (roaming): ``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar`` Win XP (not roaming): ``C:\Documents and Settings\<user>\Application Data\Foo Bar`` Win 7 (roaming): ``C:\Users\<user>\AppData\Roaming\Foo Bar`` Win 7 (not roaming): ``C:\Users\<user>\AppData\Local\Foo Bar`` .. versionadded:: 2.0 :param app_name: the application name. This should be properly capitalized and can contain whitespace. :param roaming: controls if the folder should be roaming or not on Windows. Has no affect otherwise. :param force_posix: if this is set to `True` then on any POSIX system the folder will be stored in the home folder with a leading dot instead of the XDG config home or darwin's application support folder. """
if WIN: key = roaming and 'APPDATA' or 'LOCALAPPDATA' folder = os.environ.get(key) if folder is None: folder = os.path.expanduser('~') return os.path.join(folder, app_name) if force_posix: return os.path.join(os.path.expanduser('~/.' + _posixify(app_name))) if sys.platform == 'darwin': return os.path.join(os.path.expanduser( '~/Library/Application Support'), app_name) return os.path.join( os.environ.get('XDG_CONFIG_HOME', os.path.expanduser('~/.config')), _posixify(app_name))
<SYSTEM_TASK:> Version of shlex.split that silently accept incomplete strings. <END_TASK> <USER_TASK:> Description: def split_args(line): """Version of shlex.split that silently accept incomplete strings. Parameters ---------- line : str The string to split Returns ------- [str] The line split in separated arguments """
lex = shlex.shlex(line, posix=True) lex.whitespace_split = True lex.commenters = '' res = [] try: while True: res.append(next(lex)) except ValueError: # No closing quotation pass except StopIteration: # End of loop pass if lex.token: res.append(lex.token) return res
<SYSTEM_TASK:> This function takes a text and shows it via an environment specific <END_TASK> <USER_TASK:> Description: def echo_via_pager(text_or_generator, color=None): """This function takes a text and shows it via an environment specific pager on stdout. .. versionchanged:: 3.0 Added the `color` flag. :param text_or_generator: the text to page, or alternatively, a generator emitting the text to page. :param color: controls if the pager supports ANSI colors or not. The default is autodetection. """
color = resolve_color_default(color) if inspect.isgeneratorfunction(text_or_generator): i = text_or_generator() elif isinstance(text_or_generator, string_types): i = [text_or_generator] else: i = iter(text_or_generator) # convert every element of i to a text type if necessary text_generator = (el if isinstance(el, string_types) else text_type(el) for el in i) from ._termui_impl import pager return pager(itertools.chain(text_generator, "\n"), color)
<SYSTEM_TASK:> Clears the terminal screen. This will have the effect of clearing <END_TASK> <USER_TASK:> Description: def clear(): """Clears the terminal screen. This will have the effect of clearing the whole visible space of the terminal and moving the cursor to the top left. This does not do anything if not connected to a terminal. .. versionadded:: 2.0 """
if not isatty(sys.stdout): return # If we're on Windows and we don't have colorama available, then we # clear the screen by shelling out. Otherwise we can use an escape # sequence. if WIN: os.system('cls') else: sys.stdout.write('\033[2J\033[1;1H')
<SYSTEM_TASK:> Fetches a single character from the terminal and returns it. This <END_TASK> <USER_TASK:> Description: def getchar(echo=False): """Fetches a single character from the terminal and returns it. This will always return a unicode character and under certain rare circumstances this might return more than one character. The situations which more than one character is returned is when for whatever reason multiple characters end up in the terminal buffer or standard input was not actually a terminal. Note that this will always read from the terminal, even if something is piped into the standard input. Note for Windows: in rare cases when typing non-ASCII characters, this function might wait for a second character and then return both at once. This is because certain Unicode characters look like special-key markers. .. versionadded:: 2.0 :param echo: if set to `True`, the character read will also show up on the terminal. The default is to not show it. """
f = _getchar if f is None: from ._termui_impl import getchar as f return f(echo)
<SYSTEM_TASK:> This command stops execution and waits for the user to press any <END_TASK> <USER_TASK:> Description: def pause(info='Press any key to continue ...', err=False): """This command stops execution and waits for the user to press any key to continue. This is similar to the Windows batch "pause" command. If the program is not run through a terminal, this command will instead do nothing. .. versionadded:: 2.0 .. versionadded:: 4.0 Added the `err` parameter. :param info: the info string to print before pausing. :param err: if set to message goes to ``stderr`` instead of ``stdout``, the same as with echo. """
if not isatty(sys.stdin) or not isatty(sys.stdout): return try: if info: echo(info, nl=False, err=err) try: getchar() except (KeyboardInterrupt, EOFError): pass finally: if info: echo(err=err)