|
"""Prepares a distribution for installation |
|
""" |
|
|
|
|
|
|
|
|
|
import logging |
|
import mimetypes |
|
import os |
|
import shutil |
|
|
|
from pip._vendor.six import PY2 |
|
|
|
from pip._internal.distributions import ( |
|
make_distribution_for_install_requirement, |
|
) |
|
from pip._internal.distributions.installed import InstalledDistribution |
|
from pip._internal.exceptions import ( |
|
DirectoryUrlHashUnsupported, |
|
HashMismatch, |
|
HashUnpinned, |
|
InstallationError, |
|
NetworkConnectionError, |
|
PreviousBuildDirError, |
|
VcsHashUnsupported, |
|
) |
|
from pip._internal.utils.filesystem import copy2_fixed |
|
from pip._internal.utils.hashes import MissingHashes |
|
from pip._internal.utils.logging import indent_log |
|
from pip._internal.utils.misc import ( |
|
display_path, |
|
hide_url, |
|
path_to_display, |
|
rmtree, |
|
) |
|
from pip._internal.utils.temp_dir import TempDirectory |
|
from pip._internal.utils.typing import MYPY_CHECK_RUNNING |
|
from pip._internal.utils.unpacking import unpack_file |
|
from pip._internal.vcs import vcs |
|
|
|
if MYPY_CHECK_RUNNING: |
|
from typing import ( |
|
Callable, List, Optional, Tuple, |
|
) |
|
|
|
from mypy_extensions import TypedDict |
|
|
|
from pip._internal.distributions import AbstractDistribution |
|
from pip._internal.index.package_finder import PackageFinder |
|
from pip._internal.models.link import Link |
|
from pip._internal.network.download import Downloader |
|
from pip._internal.req.req_install import InstallRequirement |
|
from pip._internal.req.req_tracker import RequirementTracker |
|
from pip._internal.utils.hashes import Hashes |
|
|
|
if PY2: |
|
CopytreeKwargs = TypedDict( |
|
'CopytreeKwargs', |
|
{ |
|
'ignore': Callable[[str, List[str]], List[str]], |
|
'symlinks': bool, |
|
}, |
|
total=False, |
|
) |
|
else: |
|
CopytreeKwargs = TypedDict( |
|
'CopytreeKwargs', |
|
{ |
|
'copy_function': Callable[[str, str], None], |
|
'ignore': Callable[[str, List[str]], List[str]], |
|
'ignore_dangling_symlinks': bool, |
|
'symlinks': bool, |
|
}, |
|
total=False, |
|
) |
|
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
def _get_prepared_distribution( |
|
req, |
|
req_tracker, |
|
finder, |
|
build_isolation |
|
): |
|
|
|
"""Prepare a distribution for installation. |
|
""" |
|
abstract_dist = make_distribution_for_install_requirement(req) |
|
with req_tracker.track(req): |
|
abstract_dist.prepare_distribution_metadata(finder, build_isolation) |
|
return abstract_dist |
|
|
|
|
|
def unpack_vcs_link(link, location): |
|
|
|
vcs_backend = vcs.get_backend_for_scheme(link.scheme) |
|
assert vcs_backend is not None |
|
vcs_backend.unpack(location, url=hide_url(link.url)) |
|
|
|
|
|
class File(object): |
|
def __init__(self, path, content_type): |
|
|
|
self.path = path |
|
self.content_type = content_type |
|
|
|
|
|
def get_http_url( |
|
link, |
|
downloader, |
|
download_dir=None, |
|
hashes=None, |
|
): |
|
|
|
temp_dir = TempDirectory(kind="unpack", globally_managed=True) |
|
|
|
already_downloaded_path = None |
|
if download_dir: |
|
already_downloaded_path = _check_download_dir( |
|
link, download_dir, hashes |
|
) |
|
|
|
if already_downloaded_path: |
|
from_path = already_downloaded_path |
|
content_type = mimetypes.guess_type(from_path)[0] |
|
else: |
|
|
|
from_path, content_type = _download_http_url( |
|
link, downloader, temp_dir.path, hashes |
|
) |
|
|
|
return File(from_path, content_type) |
|
|
|
|
|
def _copy2_ignoring_special_files(src, dest): |
|
|
|
"""Copying special files is not supported, but as a convenience to users |
|
we skip errors copying them. This supports tools that may create e.g. |
|
socket files in the project source directory. |
|
""" |
|
try: |
|
copy2_fixed(src, dest) |
|
except shutil.SpecialFileError as e: |
|
|
|
|
|
|
|
|
|
logger.warning( |
|
"Ignoring special file error '%s' encountered copying %s to %s.", |
|
str(e), |
|
path_to_display(src), |
|
path_to_display(dest), |
|
) |
|
|
|
|
|
def _copy_source_tree(source, target): |
|
|
|
target_abspath = os.path.abspath(target) |
|
target_basename = os.path.basename(target_abspath) |
|
target_dirname = os.path.dirname(target_abspath) |
|
|
|
def ignore(d, names): |
|
|
|
skipped = [] |
|
if d == source: |
|
|
|
|
|
|
|
|
|
skipped += ['.tox', '.nox'] |
|
if os.path.abspath(d) == target_dirname: |
|
|
|
|
|
|
|
skipped += [target_basename] |
|
return skipped |
|
|
|
kwargs = dict(ignore=ignore, symlinks=True) |
|
|
|
if not PY2: |
|
|
|
|
|
kwargs['copy_function'] = _copy2_ignoring_special_files |
|
|
|
shutil.copytree(source, target, **kwargs) |
|
|
|
|
|
def get_file_url( |
|
link, |
|
download_dir=None, |
|
hashes=None |
|
): |
|
|
|
"""Get file and optionally check its hash. |
|
""" |
|
|
|
already_downloaded_path = None |
|
if download_dir: |
|
already_downloaded_path = _check_download_dir( |
|
link, download_dir, hashes |
|
) |
|
|
|
if already_downloaded_path: |
|
from_path = already_downloaded_path |
|
else: |
|
from_path = link.file_path |
|
|
|
|
|
|
|
|
|
|
|
|
|
if hashes: |
|
hashes.check_against_path(from_path) |
|
|
|
content_type = mimetypes.guess_type(from_path)[0] |
|
|
|
return File(from_path, content_type) |
|
|
|
|
|
def unpack_url( |
|
link, |
|
location, |
|
downloader, |
|
download_dir=None, |
|
hashes=None, |
|
): |
|
|
|
"""Unpack link into location, downloading if required. |
|
|
|
:param hashes: A Hashes object, one of whose embedded hashes must match, |
|
or HashMismatch will be raised. If the Hashes is empty, no matches are |
|
required, and unhashable types of requirements (like VCS ones, which |
|
would ordinarily raise HashUnsupported) are allowed. |
|
""" |
|
|
|
if link.is_vcs: |
|
unpack_vcs_link(link, location) |
|
return None |
|
|
|
|
|
if link.is_existing_dir(): |
|
if os.path.isdir(location): |
|
rmtree(location) |
|
_copy_source_tree(link.file_path, location) |
|
return None |
|
|
|
|
|
if link.is_file: |
|
file = get_file_url(link, download_dir, hashes=hashes) |
|
|
|
|
|
else: |
|
file = get_http_url( |
|
link, |
|
downloader, |
|
download_dir, |
|
hashes=hashes, |
|
) |
|
|
|
|
|
|
|
if not link.is_wheel: |
|
unpack_file(file.path, location, file.content_type) |
|
|
|
return file |
|
|
|
|
|
def _download_http_url( |
|
link, |
|
downloader, |
|
temp_dir, |
|
hashes, |
|
): |
|
|
|
"""Download link url into temp_dir using provided session""" |
|
download = downloader(link) |
|
|
|
file_path = os.path.join(temp_dir, download.filename) |
|
with open(file_path, 'wb') as content_file: |
|
for chunk in download.chunks: |
|
content_file.write(chunk) |
|
|
|
if hashes: |
|
hashes.check_against_path(file_path) |
|
|
|
return file_path, download.response.headers.get('content-type', '') |
|
|
|
|
|
def _check_download_dir(link, download_dir, hashes): |
|
|
|
""" Check download_dir for previously downloaded file with correct hash |
|
If a correct file is found return its path else None |
|
""" |
|
download_path = os.path.join(download_dir, link.filename) |
|
|
|
if not os.path.exists(download_path): |
|
return None |
|
|
|
|
|
logger.info('File was already downloaded %s', download_path) |
|
if hashes: |
|
try: |
|
hashes.check_against_path(download_path) |
|
except HashMismatch: |
|
logger.warning( |
|
'Previously-downloaded file %s has bad hash. ' |
|
'Re-downloading.', |
|
download_path |
|
) |
|
os.unlink(download_path) |
|
return None |
|
return download_path |
|
|
|
|
|
class RequirementPreparer(object): |
|
"""Prepares a Requirement |
|
""" |
|
|
|
def __init__( |
|
self, |
|
build_dir, |
|
download_dir, |
|
src_dir, |
|
wheel_download_dir, |
|
build_isolation, |
|
req_tracker, |
|
downloader, |
|
finder, |
|
require_hashes, |
|
use_user_site, |
|
): |
|
|
|
super(RequirementPreparer, self).__init__() |
|
|
|
self.src_dir = src_dir |
|
self.build_dir = build_dir |
|
self.req_tracker = req_tracker |
|
self.downloader = downloader |
|
self.finder = finder |
|
|
|
|
|
|
|
self.download_dir = download_dir |
|
|
|
|
|
|
|
|
|
self.wheel_download_dir = wheel_download_dir |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self.build_isolation = build_isolation |
|
|
|
|
|
self.require_hashes = require_hashes |
|
|
|
|
|
self.use_user_site = use_user_site |
|
|
|
@property |
|
def _download_should_save(self): |
|
|
|
if not self.download_dir: |
|
return False |
|
|
|
if os.path.exists(self.download_dir): |
|
return True |
|
|
|
logger.critical('Could not find download directory') |
|
raise InstallationError( |
|
"Could not find or access download directory '{}'" |
|
.format(self.download_dir)) |
|
|
|
def _log_preparing_link(self, req): |
|
|
|
"""Log the way the link prepared.""" |
|
if req.link.is_file: |
|
path = req.link.file_path |
|
logger.info('Processing %s', display_path(path)) |
|
else: |
|
logger.info('Collecting %s', req.req or req) |
|
|
|
def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds): |
|
|
|
"""Ensure source_dir of a linked InstallRequirement.""" |
|
|
|
if req.link.is_wheel: |
|
|
|
|
|
return |
|
assert req.source_dir is None |
|
|
|
req.ensure_has_source_dir( |
|
self.build_dir, |
|
autodelete=True, |
|
parallel_builds=parallel_builds, |
|
) |
|
|
|
|
|
|
|
|
|
|
|
|
|
if os.path.exists(os.path.join(req.source_dir, 'setup.py')): |
|
raise PreviousBuildDirError( |
|
"pip can't proceed with requirements '{}' due to a" |
|
"pre-existing build directory ({}). This is likely " |
|
"due to a previous installation that failed . pip is " |
|
"being responsible and not assuming it can delete this. " |
|
"Please delete it and try again.".format(req, req.source_dir) |
|
) |
|
|
|
def _get_linked_req_hashes(self, req): |
|
|
|
|
|
|
|
|
|
|
|
|
|
if not self.require_hashes: |
|
return req.hashes(trust_internet=True) |
|
|
|
|
|
|
|
|
|
|
|
if req.link.is_vcs: |
|
raise VcsHashUnsupported() |
|
if req.link.is_existing_dir(): |
|
raise DirectoryUrlHashUnsupported() |
|
|
|
|
|
|
|
|
|
|
|
|
|
if req.original_link is None and not req.is_pinned: |
|
raise HashUnpinned() |
|
|
|
|
|
|
|
|
|
|
|
return req.hashes(trust_internet=False) or MissingHashes() |
|
|
|
def prepare_linked_requirement(self, req, parallel_builds=False): |
|
|
|
"""Prepare a requirement to be obtained from req.link.""" |
|
assert req.link |
|
link = req.link |
|
self._log_preparing_link(req) |
|
if link.is_wheel and self.wheel_download_dir: |
|
|
|
download_dir = self.wheel_download_dir |
|
else: |
|
download_dir = self.download_dir |
|
|
|
with indent_log(): |
|
self._ensure_link_req_src_dir(req, download_dir, parallel_builds) |
|
try: |
|
local_file = unpack_url( |
|
link, req.source_dir, self.downloader, download_dir, |
|
hashes=self._get_linked_req_hashes(req) |
|
) |
|
except NetworkConnectionError as exc: |
|
raise InstallationError( |
|
'Could not install requirement {} because of HTTP ' |
|
'error {} for URL {}'.format(req, exc, link) |
|
) |
|
|
|
|
|
|
|
if local_file: |
|
req.local_file_path = local_file.path |
|
|
|
abstract_dist = _get_prepared_distribution( |
|
req, self.req_tracker, self.finder, self.build_isolation, |
|
) |
|
|
|
if download_dir: |
|
if link.is_existing_dir(): |
|
logger.info('Link is a directory, ignoring download_dir') |
|
elif local_file: |
|
download_location = os.path.join( |
|
download_dir, link.filename |
|
) |
|
if not os.path.exists(download_location): |
|
shutil.copy(local_file.path, download_location) |
|
download_path = display_path(download_location) |
|
logger.info('Saved %s', download_path) |
|
|
|
if self._download_should_save: |
|
|
|
if link.is_vcs: |
|
req.archive(self.download_dir) |
|
return abstract_dist |
|
|
|
def prepare_editable_requirement( |
|
self, |
|
req, |
|
): |
|
|
|
"""Prepare an editable requirement |
|
""" |
|
assert req.editable, "cannot prepare a non-editable req as editable" |
|
|
|
logger.info('Obtaining %s', req) |
|
|
|
with indent_log(): |
|
if self.require_hashes: |
|
raise InstallationError( |
|
'The editable requirement {} cannot be installed when ' |
|
'requiring hashes, because there is no single file to ' |
|
'hash.'.format(req) |
|
) |
|
req.ensure_has_source_dir(self.src_dir) |
|
req.update_editable(not self._download_should_save) |
|
|
|
abstract_dist = _get_prepared_distribution( |
|
req, self.req_tracker, self.finder, self.build_isolation, |
|
) |
|
|
|
if self._download_should_save: |
|
req.archive(self.download_dir) |
|
req.check_if_exists(self.use_user_site) |
|
|
|
return abstract_dist |
|
|
|
def prepare_installed_requirement( |
|
self, |
|
req, |
|
skip_reason |
|
): |
|
|
|
"""Prepare an already-installed requirement |
|
""" |
|
assert req.satisfied_by, "req should have been satisfied but isn't" |
|
assert skip_reason is not None, ( |
|
"did not get skip reason skipped but req.satisfied_by " |
|
"is set to {}".format(req.satisfied_by) |
|
) |
|
logger.info( |
|
'Requirement %s: %s (%s)', |
|
skip_reason, req, req.satisfied_by.version |
|
) |
|
with indent_log(): |
|
if self.require_hashes: |
|
logger.debug( |
|
'Since it is already installed, we are trusting this ' |
|
'package without checking its hash. To ensure a ' |
|
'completely repeatable environment, install into an ' |
|
'empty virtualenv.' |
|
) |
|
abstract_dist = InstalledDistribution(req) |
|
|
|
return abstract_dist |
|
|