req_install.py 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846
  1. # The following comment should be removed at some point in the future.
  2. # mypy: strict-optional=False
  3. import logging
  4. import os
  5. import shutil
  6. import sys
  7. import uuid
  8. import zipfile
  9. from typing import Any, Dict, Iterable, List, Optional, Sequence, Union
  10. from pip._vendor import pkg_resources, six
  11. from pip._vendor.packaging.markers import Marker
  12. from pip._vendor.packaging.requirements import Requirement
  13. from pip._vendor.packaging.specifiers import SpecifierSet
  14. from pip._vendor.packaging.utils import canonicalize_name
  15. from pip._vendor.packaging.version import Version
  16. from pip._vendor.packaging.version import parse as parse_version
  17. from pip._vendor.pep517.wrappers import Pep517HookCaller
  18. from pip._vendor.pkg_resources import Distribution
  19. from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
  20. from pip._internal.exceptions import InstallationError
  21. from pip._internal.locations import get_scheme
  22. from pip._internal.models.link import Link
  23. from pip._internal.operations.build.metadata import generate_metadata
  24. from pip._internal.operations.build.metadata_legacy import (
  25. generate_metadata as generate_metadata_legacy,
  26. )
  27. from pip._internal.operations.install.editable_legacy import (
  28. install_editable as install_editable_legacy,
  29. )
  30. from pip._internal.operations.install.legacy import LegacyInstallFailure
  31. from pip._internal.operations.install.legacy import install as install_legacy
  32. from pip._internal.operations.install.wheel import install_wheel
  33. from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
  34. from pip._internal.req.req_uninstall import UninstallPathSet
  35. from pip._internal.utils.deprecation import deprecated
  36. from pip._internal.utils.direct_url_helpers import direct_url_from_link
  37. from pip._internal.utils.hashes import Hashes
  38. from pip._internal.utils.logging import indent_log
  39. from pip._internal.utils.misc import (
  40. ask_path_exists,
  41. backup_dir,
  42. display_path,
  43. dist_in_site_packages,
  44. dist_in_usersite,
  45. get_distribution,
  46. hide_url,
  47. redact_auth_from_url,
  48. )
  49. from pip._internal.utils.packaging import get_metadata
  50. from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
  51. from pip._internal.utils.virtualenv import running_under_virtualenv
  52. from pip._internal.vcs import vcs
  53. logger = logging.getLogger(__name__)
  54. def _get_dist(metadata_directory: str) -> Distribution:
  55. """Return a pkg_resources.Distribution for the provided
  56. metadata directory.
  57. """
  58. dist_dir = metadata_directory.rstrip(os.sep)
  59. # Build a PathMetadata object, from path to metadata. :wink:
  60. base_dir, dist_dir_name = os.path.split(dist_dir)
  61. metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
  62. # Determine the correct Distribution object type.
  63. if dist_dir.endswith(".egg-info"):
  64. dist_cls = pkg_resources.Distribution
  65. dist_name = os.path.splitext(dist_dir_name)[0]
  66. else:
  67. assert dist_dir.endswith(".dist-info")
  68. dist_cls = pkg_resources.DistInfoDistribution
  69. dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
  70. return dist_cls(
  71. base_dir,
  72. project_name=dist_name,
  73. metadata=metadata,
  74. )
  75. class InstallRequirement:
  76. """
  77. Represents something that may be installed later on, may have information
  78. about where to fetch the relevant requirement and also contains logic for
  79. installing the said requirement.
  80. """
  81. def __init__(
  82. self,
  83. req: Optional[Requirement],
  84. comes_from: Optional[Union[str, "InstallRequirement"]],
  85. editable: bool = False,
  86. link: Optional[Link] = None,
  87. markers: Optional[Marker] = None,
  88. use_pep517: Optional[bool] = None,
  89. isolated: bool = False,
  90. install_options: Optional[List[str]] = None,
  91. global_options: Optional[List[str]] = None,
  92. hash_options: Optional[Dict[str, List[str]]] = None,
  93. constraint: bool = False,
  94. extras: Iterable[str] = (),
  95. user_supplied: bool = False,
  96. ) -> None:
  97. assert req is None or isinstance(req, Requirement), req
  98. self.req = req
  99. self.comes_from = comes_from
  100. self.constraint = constraint
  101. self.editable = editable
  102. self.legacy_install_reason: Optional[int] = None
  103. # source_dir is the local directory where the linked requirement is
  104. # located, or unpacked. In case unpacking is needed, creating and
  105. # populating source_dir is done by the RequirementPreparer. Note this
  106. # is not necessarily the directory where pyproject.toml or setup.py is
  107. # located - that one is obtained via unpacked_source_directory.
  108. self.source_dir: Optional[str] = None
  109. if self.editable:
  110. assert link
  111. if link.is_file:
  112. self.source_dir = os.path.normpath(
  113. os.path.abspath(link.file_path)
  114. )
  115. if link is None and req and req.url:
  116. # PEP 508 URL requirement
  117. link = Link(req.url)
  118. self.link = self.original_link = link
  119. self.original_link_is_in_wheel_cache = False
  120. # Path to any downloaded or already-existing package.
  121. self.local_file_path: Optional[str] = None
  122. if self.link and self.link.is_file:
  123. self.local_file_path = self.link.file_path
  124. if extras:
  125. self.extras = extras
  126. elif req:
  127. self.extras = {
  128. pkg_resources.safe_extra(extra) for extra in req.extras
  129. }
  130. else:
  131. self.extras = set()
  132. if markers is None and req:
  133. markers = req.marker
  134. self.markers = markers
  135. # This holds the pkg_resources.Distribution object if this requirement
  136. # is already available:
  137. self.satisfied_by: Optional[Distribution] = None
  138. # Whether the installation process should try to uninstall an existing
  139. # distribution before installing this requirement.
  140. self.should_reinstall = False
  141. # Temporary build location
  142. self._temp_build_dir: Optional[TempDirectory] = None
  143. # Set to True after successful installation
  144. self.install_succeeded: Optional[bool] = None
  145. # Supplied options
  146. self.install_options = install_options if install_options else []
  147. self.global_options = global_options if global_options else []
  148. self.hash_options = hash_options if hash_options else {}
  149. # Set to True after successful preparation of this requirement
  150. self.prepared = False
  151. # User supplied requirement are explicitly requested for installation
  152. # by the user via CLI arguments or requirements files, as opposed to,
  153. # e.g. dependencies, extras or constraints.
  154. self.user_supplied = user_supplied
  155. self.isolated = isolated
  156. self.build_env: BuildEnvironment = NoOpBuildEnvironment()
  157. # For PEP 517, the directory where we request the project metadata
  158. # gets stored. We need this to pass to build_wheel, so the backend
  159. # can ensure that the wheel matches the metadata (see the PEP for
  160. # details).
  161. self.metadata_directory: Optional[str] = None
  162. # The static build requirements (from pyproject.toml)
  163. self.pyproject_requires: Optional[List[str]] = None
  164. # Build requirements that we will check are available
  165. self.requirements_to_check: List[str] = []
  166. # The PEP 517 backend we should use to build the project
  167. self.pep517_backend: Optional[Pep517HookCaller] = None
  168. # Are we using PEP 517 for this requirement?
  169. # After pyproject.toml has been loaded, the only valid values are True
  170. # and False. Before loading, None is valid (meaning "use the default").
  171. # Setting an explicit value before loading pyproject.toml is supported,
  172. # but after loading this flag should be treated as read only.
  173. self.use_pep517 = use_pep517
  174. # This requirement needs more preparation before it can be built
  175. self.needs_more_preparation = False
  176. def __str__(self) -> str:
  177. if self.req:
  178. s = str(self.req)
  179. if self.link:
  180. s += ' from {}'.format(redact_auth_from_url(self.link.url))
  181. elif self.link:
  182. s = redact_auth_from_url(self.link.url)
  183. else:
  184. s = '<InstallRequirement>'
  185. if self.satisfied_by is not None:
  186. s += ' in {}'.format(display_path(self.satisfied_by.location))
  187. if self.comes_from:
  188. if isinstance(self.comes_from, str):
  189. comes_from: Optional[str] = self.comes_from
  190. else:
  191. comes_from = self.comes_from.from_path()
  192. if comes_from:
  193. s += f' (from {comes_from})'
  194. return s
  195. def __repr__(self) -> str:
  196. return '<{} object: {} editable={!r}>'.format(
  197. self.__class__.__name__, str(self), self.editable)
  198. def format_debug(self) -> str:
  199. """An un-tested helper for getting state, for debugging.
  200. """
  201. attributes = vars(self)
  202. names = sorted(attributes)
  203. state = (
  204. "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
  205. )
  206. return '<{name} object: {{{state}}}>'.format(
  207. name=self.__class__.__name__,
  208. state=", ".join(state),
  209. )
  210. # Things that are valid for all kinds of requirements?
  211. @property
  212. def name(self) -> Optional[str]:
  213. if self.req is None:
  214. return None
  215. return pkg_resources.safe_name(self.req.name)
  216. @property
  217. def specifier(self) -> SpecifierSet:
  218. return self.req.specifier
  219. @property
  220. def is_pinned(self) -> bool:
  221. """Return whether I am pinned to an exact version.
  222. For example, some-package==1.2 is pinned; some-package>1.2 is not.
  223. """
  224. specifiers = self.specifier
  225. return (len(specifiers) == 1 and
  226. next(iter(specifiers)).operator in {'==', '==='})
  227. def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
  228. if not extras_requested:
  229. # Provide an extra to safely evaluate the markers
  230. # without matching any extra
  231. extras_requested = ('',)
  232. if self.markers is not None:
  233. return any(
  234. self.markers.evaluate({'extra': extra})
  235. for extra in extras_requested)
  236. else:
  237. return True
  238. @property
  239. def has_hash_options(self) -> bool:
  240. """Return whether any known-good hashes are specified as options.
  241. These activate --require-hashes mode; hashes specified as part of a
  242. URL do not.
  243. """
  244. return bool(self.hash_options)
  245. def hashes(self, trust_internet: bool = True) -> Hashes:
  246. """Return a hash-comparer that considers my option- and URL-based
  247. hashes to be known-good.
  248. Hashes in URLs--ones embedded in the requirements file, not ones
  249. downloaded from an index server--are almost peers with ones from
  250. flags. They satisfy --require-hashes (whether it was implicitly or
  251. explicitly activated) but do not activate it. md5 and sha224 are not
  252. allowed in flags, which should nudge people toward good algos. We
  253. always OR all hashes together, even ones from URLs.
  254. :param trust_internet: Whether to trust URL-based (#md5=...) hashes
  255. downloaded from the internet, as by populate_link()
  256. """
  257. good_hashes = self.hash_options.copy()
  258. link = self.link if trust_internet else self.original_link
  259. if link and link.hash:
  260. good_hashes.setdefault(link.hash_name, []).append(link.hash)
  261. return Hashes(good_hashes)
  262. def from_path(self) -> Optional[str]:
  263. """Format a nice indicator to show where this "comes from"
  264. """
  265. if self.req is None:
  266. return None
  267. s = str(self.req)
  268. if self.comes_from:
  269. if isinstance(self.comes_from, str):
  270. comes_from = self.comes_from
  271. else:
  272. comes_from = self.comes_from.from_path()
  273. if comes_from:
  274. s += '->' + comes_from
  275. return s
  276. def ensure_build_location(
  277. self, build_dir: str, autodelete: bool, parallel_builds: bool
  278. ) -> str:
  279. assert build_dir is not None
  280. if self._temp_build_dir is not None:
  281. assert self._temp_build_dir.path
  282. return self._temp_build_dir.path
  283. if self.req is None:
  284. # Some systems have /tmp as a symlink which confuses custom
  285. # builds (such as numpy). Thus, we ensure that the real path
  286. # is returned.
  287. self._temp_build_dir = TempDirectory(
  288. kind=tempdir_kinds.REQ_BUILD, globally_managed=True
  289. )
  290. return self._temp_build_dir.path
  291. # This is the only remaining place where we manually determine the path
  292. # for the temporary directory. It is only needed for editables where
  293. # it is the value of the --src option.
  294. # When parallel builds are enabled, add a UUID to the build directory
  295. # name so multiple builds do not interfere with each other.
  296. dir_name: str = canonicalize_name(self.name)
  297. if parallel_builds:
  298. dir_name = f"{dir_name}_{uuid.uuid4().hex}"
  299. # FIXME: Is there a better place to create the build_dir? (hg and bzr
  300. # need this)
  301. if not os.path.exists(build_dir):
  302. logger.debug('Creating directory %s', build_dir)
  303. os.makedirs(build_dir)
  304. actual_build_dir = os.path.join(build_dir, dir_name)
  305. # `None` indicates that we respect the globally-configured deletion
  306. # settings, which is what we actually want when auto-deleting.
  307. delete_arg = None if autodelete else False
  308. return TempDirectory(
  309. path=actual_build_dir,
  310. delete=delete_arg,
  311. kind=tempdir_kinds.REQ_BUILD,
  312. globally_managed=True,
  313. ).path
  314. def _set_requirement(self) -> None:
  315. """Set requirement after generating metadata.
  316. """
  317. assert self.req is None
  318. assert self.metadata is not None
  319. assert self.source_dir is not None
  320. # Construct a Requirement object from the generated metadata
  321. if isinstance(parse_version(self.metadata["Version"]), Version):
  322. op = "=="
  323. else:
  324. op = "==="
  325. self.req = Requirement(
  326. "".join([
  327. self.metadata["Name"],
  328. op,
  329. self.metadata["Version"],
  330. ])
  331. )
  332. def warn_on_mismatching_name(self) -> None:
  333. metadata_name = canonicalize_name(self.metadata["Name"])
  334. if canonicalize_name(self.req.name) == metadata_name:
  335. # Everything is fine.
  336. return
  337. # If we're here, there's a mismatch. Log a warning about it.
  338. logger.warning(
  339. 'Generating metadata for package %s '
  340. 'produced metadata for project name %s. Fix your '
  341. '#egg=%s fragments.',
  342. self.name, metadata_name, self.name
  343. )
  344. self.req = Requirement(metadata_name)
  345. def check_if_exists(self, use_user_site: bool) -> None:
  346. """Find an installed distribution that satisfies or conflicts
  347. with this requirement, and set self.satisfied_by or
  348. self.should_reinstall appropriately.
  349. """
  350. if self.req is None:
  351. return
  352. existing_dist = get_distribution(self.req.name)
  353. if not existing_dist:
  354. return
  355. # pkg_resouces may contain a different copy of packaging.version from
  356. # pip in if the downstream distributor does a poor job debundling pip.
  357. # We avoid existing_dist.parsed_version and let SpecifierSet.contains
  358. # parses the version instead.
  359. existing_version = existing_dist.version
  360. version_compatible = (
  361. existing_version is not None and
  362. self.req.specifier.contains(existing_version, prereleases=True)
  363. )
  364. if not version_compatible:
  365. self.satisfied_by = None
  366. if use_user_site:
  367. if dist_in_usersite(existing_dist):
  368. self.should_reinstall = True
  369. elif (running_under_virtualenv() and
  370. dist_in_site_packages(existing_dist)):
  371. raise InstallationError(
  372. "Will not install to the user site because it will "
  373. "lack sys.path precedence to {} in {}".format(
  374. existing_dist.project_name, existing_dist.location)
  375. )
  376. else:
  377. self.should_reinstall = True
  378. else:
  379. if self.editable:
  380. self.should_reinstall = True
  381. # when installing editables, nothing pre-existing should ever
  382. # satisfy
  383. self.satisfied_by = None
  384. else:
  385. self.satisfied_by = existing_dist
  386. # Things valid for wheels
  387. @property
  388. def is_wheel(self) -> bool:
  389. if not self.link:
  390. return False
  391. return self.link.is_wheel
  392. # Things valid for sdists
  393. @property
  394. def unpacked_source_directory(self) -> str:
  395. return os.path.join(
  396. self.source_dir,
  397. self.link and self.link.subdirectory_fragment or '')
  398. @property
  399. def setup_py_path(self) -> str:
  400. assert self.source_dir, f"No source dir for {self}"
  401. setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
  402. return setup_py
  403. @property
  404. def pyproject_toml_path(self) -> str:
  405. assert self.source_dir, f"No source dir for {self}"
  406. return make_pyproject_path(self.unpacked_source_directory)
  407. def load_pyproject_toml(self) -> None:
  408. """Load the pyproject.toml file.
  409. After calling this routine, all of the attributes related to PEP 517
  410. processing for this requirement have been set. In particular, the
  411. use_pep517 attribute can be used to determine whether we should
  412. follow the PEP 517 or legacy (setup.py) code path.
  413. """
  414. pyproject_toml_data = load_pyproject_toml(
  415. self.use_pep517,
  416. self.pyproject_toml_path,
  417. self.setup_py_path,
  418. str(self)
  419. )
  420. if pyproject_toml_data is None:
  421. self.use_pep517 = False
  422. return
  423. self.use_pep517 = True
  424. requires, backend, check, backend_path = pyproject_toml_data
  425. self.requirements_to_check = check
  426. self.pyproject_requires = requires
  427. self.pep517_backend = Pep517HookCaller(
  428. self.unpacked_source_directory, backend, backend_path=backend_path,
  429. )
  430. def _generate_metadata(self) -> str:
  431. """Invokes metadata generator functions, with the required arguments.
  432. """
  433. if not self.use_pep517:
  434. assert self.unpacked_source_directory
  435. if not os.path.exists(self.setup_py_path):
  436. raise InstallationError(
  437. f'File "setup.py" not found for legacy project {self}.'
  438. )
  439. return generate_metadata_legacy(
  440. build_env=self.build_env,
  441. setup_py_path=self.setup_py_path,
  442. source_dir=self.unpacked_source_directory,
  443. isolated=self.isolated,
  444. details=self.name or f"from {self.link}"
  445. )
  446. assert self.pep517_backend is not None
  447. return generate_metadata(
  448. build_env=self.build_env,
  449. backend=self.pep517_backend,
  450. )
  451. def prepare_metadata(self) -> None:
  452. """Ensure that project metadata is available.
  453. Under PEP 517, call the backend hook to prepare the metadata.
  454. Under legacy processing, call setup.py egg-info.
  455. """
  456. assert self.source_dir
  457. with indent_log():
  458. self.metadata_directory = self._generate_metadata()
  459. # Act on the newly generated metadata, based on the name and version.
  460. if not self.name:
  461. self._set_requirement()
  462. else:
  463. self.warn_on_mismatching_name()
  464. self.assert_source_matches_version()
  465. @property
  466. def metadata(self) -> Any:
  467. if not hasattr(self, '_metadata'):
  468. self._metadata = get_metadata(self.get_dist())
  469. return self._metadata
  470. def get_dist(self) -> Distribution:
  471. return _get_dist(self.metadata_directory)
  472. def assert_source_matches_version(self) -> None:
  473. assert self.source_dir
  474. version = self.metadata['version']
  475. if self.req.specifier and version not in self.req.specifier:
  476. logger.warning(
  477. 'Requested %s, but installing version %s',
  478. self,
  479. version,
  480. )
  481. else:
  482. logger.debug(
  483. 'Source in %s has version %s, which satisfies requirement %s',
  484. display_path(self.source_dir),
  485. version,
  486. self,
  487. )
  488. # For both source distributions and editables
  489. def ensure_has_source_dir(
  490. self,
  491. parent_dir: str,
  492. autodelete: bool = False,
  493. parallel_builds: bool = False,
  494. ) -> None:
  495. """Ensure that a source_dir is set.
  496. This will create a temporary build dir if the name of the requirement
  497. isn't known yet.
  498. :param parent_dir: The ideal pip parent_dir for the source_dir.
  499. Generally src_dir for editables and build_dir for sdists.
  500. :return: self.source_dir
  501. """
  502. if self.source_dir is None:
  503. self.source_dir = self.ensure_build_location(
  504. parent_dir,
  505. autodelete=autodelete,
  506. parallel_builds=parallel_builds,
  507. )
  508. # For editable installations
  509. def update_editable(self) -> None:
  510. if not self.link:
  511. logger.debug(
  512. "Cannot update repository at %s; repository location is "
  513. "unknown",
  514. self.source_dir,
  515. )
  516. return
  517. assert self.editable
  518. assert self.source_dir
  519. if self.link.scheme == 'file':
  520. # Static paths don't get updated
  521. return
  522. vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
  523. # Editable requirements are validated in Requirement constructors.
  524. # So here, if it's neither a path nor a valid VCS URL, it's a bug.
  525. assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
  526. hidden_url = hide_url(self.link.url)
  527. vcs_backend.obtain(self.source_dir, url=hidden_url)
  528. # Top-level Actions
  529. def uninstall(
  530. self, auto_confirm: bool = False, verbose: bool = False
  531. ) -> Optional[UninstallPathSet]:
  532. """
  533. Uninstall the distribution currently satisfying this requirement.
  534. Prompts before removing or modifying files unless
  535. ``auto_confirm`` is True.
  536. Refuses to delete or modify files outside of ``sys.prefix`` -
  537. thus uninstallation within a virtual environment can only
  538. modify that virtual environment, even if the virtualenv is
  539. linked to global site-packages.
  540. """
  541. assert self.req
  542. dist = get_distribution(self.req.name)
  543. if not dist:
  544. logger.warning("Skipping %s as it is not installed.", self.name)
  545. return None
  546. logger.info('Found existing installation: %s', dist)
  547. uninstalled_pathset = UninstallPathSet.from_dist(dist)
  548. uninstalled_pathset.remove(auto_confirm, verbose)
  549. return uninstalled_pathset
  550. def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
  551. def _clean_zip_name(name: str, prefix: str) -> str:
  552. assert name.startswith(prefix + os.path.sep), (
  553. f"name {name!r} doesn't start with prefix {prefix!r}"
  554. )
  555. name = name[len(prefix) + 1:]
  556. name = name.replace(os.path.sep, '/')
  557. return name
  558. path = os.path.join(parentdir, path)
  559. name = _clean_zip_name(path, rootdir)
  560. return self.name + '/' + name
  561. def archive(self, build_dir: Optional[str]) -> None:
  562. """Saves archive to provided build_dir.
  563. Used for saving downloaded VCS requirements as part of `pip download`.
  564. """
  565. assert self.source_dir
  566. if build_dir is None:
  567. return
  568. create_archive = True
  569. archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
  570. archive_path = os.path.join(build_dir, archive_name)
  571. if os.path.exists(archive_path):
  572. response = ask_path_exists(
  573. 'The file {} exists. (i)gnore, (w)ipe, '
  574. '(b)ackup, (a)bort '.format(
  575. display_path(archive_path)),
  576. ('i', 'w', 'b', 'a'))
  577. if response == 'i':
  578. create_archive = False
  579. elif response == 'w':
  580. logger.warning('Deleting %s', display_path(archive_path))
  581. os.remove(archive_path)
  582. elif response == 'b':
  583. dest_file = backup_dir(archive_path)
  584. logger.warning(
  585. 'Backing up %s to %s',
  586. display_path(archive_path),
  587. display_path(dest_file),
  588. )
  589. shutil.move(archive_path, dest_file)
  590. elif response == 'a':
  591. sys.exit(-1)
  592. if not create_archive:
  593. return
  594. zip_output = zipfile.ZipFile(
  595. archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
  596. )
  597. with zip_output:
  598. dir = os.path.normcase(
  599. os.path.abspath(self.unpacked_source_directory)
  600. )
  601. for dirpath, dirnames, filenames in os.walk(dir):
  602. for dirname in dirnames:
  603. dir_arcname = self._get_archive_name(
  604. dirname, parentdir=dirpath, rootdir=dir,
  605. )
  606. zipdir = zipfile.ZipInfo(dir_arcname + '/')
  607. zipdir.external_attr = 0x1ED << 16 # 0o755
  608. zip_output.writestr(zipdir, '')
  609. for filename in filenames:
  610. file_arcname = self._get_archive_name(
  611. filename, parentdir=dirpath, rootdir=dir,
  612. )
  613. filename = os.path.join(dirpath, filename)
  614. zip_output.write(filename, file_arcname)
  615. logger.info('Saved %s', display_path(archive_path))
  616. def install(
  617. self,
  618. install_options: List[str],
  619. global_options: Optional[Sequence[str]] = None,
  620. root: Optional[str] = None,
  621. home: Optional[str] = None,
  622. prefix: Optional[str] = None,
  623. warn_script_location: bool = True,
  624. use_user_site: bool = False,
  625. pycompile: bool = True
  626. ) -> None:
  627. scheme = get_scheme(
  628. self.name,
  629. user=use_user_site,
  630. home=home,
  631. root=root,
  632. isolated=self.isolated,
  633. prefix=prefix,
  634. )
  635. global_options = global_options if global_options is not None else []
  636. if self.editable:
  637. install_editable_legacy(
  638. install_options,
  639. global_options,
  640. prefix=prefix,
  641. home=home,
  642. use_user_site=use_user_site,
  643. name=self.name,
  644. setup_py_path=self.setup_py_path,
  645. isolated=self.isolated,
  646. build_env=self.build_env,
  647. unpacked_source_directory=self.unpacked_source_directory,
  648. )
  649. self.install_succeeded = True
  650. return
  651. if self.is_wheel:
  652. assert self.local_file_path
  653. direct_url = None
  654. if self.original_link:
  655. direct_url = direct_url_from_link(
  656. self.original_link,
  657. self.source_dir,
  658. self.original_link_is_in_wheel_cache,
  659. )
  660. install_wheel(
  661. self.name,
  662. self.local_file_path,
  663. scheme=scheme,
  664. req_description=str(self.req),
  665. pycompile=pycompile,
  666. warn_script_location=warn_script_location,
  667. direct_url=direct_url,
  668. requested=self.user_supplied,
  669. )
  670. self.install_succeeded = True
  671. return
  672. # TODO: Why don't we do this for editable installs?
  673. # Extend the list of global and install options passed on to
  674. # the setup.py call with the ones from the requirements file.
  675. # Options specified in requirements file override those
  676. # specified on the command line, since the last option given
  677. # to setup.py is the one that is used.
  678. global_options = list(global_options) + self.global_options
  679. install_options = list(install_options) + self.install_options
  680. try:
  681. success = install_legacy(
  682. install_options=install_options,
  683. global_options=global_options,
  684. root=root,
  685. home=home,
  686. prefix=prefix,
  687. use_user_site=use_user_site,
  688. pycompile=pycompile,
  689. scheme=scheme,
  690. setup_py_path=self.setup_py_path,
  691. isolated=self.isolated,
  692. req_name=self.name,
  693. build_env=self.build_env,
  694. unpacked_source_directory=self.unpacked_source_directory,
  695. req_description=str(self.req),
  696. )
  697. except LegacyInstallFailure as exc:
  698. self.install_succeeded = False
  699. six.reraise(*exc.parent)
  700. except Exception:
  701. self.install_succeeded = True
  702. raise
  703. self.install_succeeded = success
  704. if success and self.legacy_install_reason == 8368:
  705. deprecated(
  706. reason=(
  707. "{} was installed using the legacy 'setup.py install' "
  708. "method, because a wheel could not be built for it.".
  709. format(self.name)
  710. ),
  711. replacement="to fix the wheel build issue reported above",
  712. gone_in=None,
  713. issue=8368,
  714. )
  715. def check_invalid_constraint_type(req: InstallRequirement) -> str:
  716. # Check for unsupported forms
  717. problem = ""
  718. if not req.name:
  719. problem = "Unnamed requirements are not allowed as constraints"
  720. elif req.editable:
  721. problem = "Editable requirements are not allowed as constraints"
  722. elif req.extras:
  723. problem = "Constraints cannot have extras"
  724. if problem:
  725. deprecated(
  726. reason=(
  727. "Constraints are only allowed to take the form of a package "
  728. "name and a version specifier. Other forms were originally "
  729. "permitted as an accident of the implementation, but were "
  730. "undocumented. The new implementation of the resolver no "
  731. "longer supports these forms."
  732. ),
  733. replacement="replacing the constraint with a requirement",
  734. # No plan yet for when the new resolver becomes default
  735. gone_in=None,
  736. issue=8210,
  737. )
  738. return problem