]> jfr.im git - dlqueue.git/blob - venv/lib/python3.11/site-packages/pip/_internal/operations/prepare.py
init: venv aand flask
[dlqueue.git] / venv / lib / python3.11 / site-packages / pip / _internal / operations / prepare.py
1 """Prepares a distribution for installation
2 """
3
4 # The following comment should be removed at some point in the future.
5 # mypy: strict-optional=False
6
7 import logging
8 import mimetypes
9 import os
10 import shutil
11 from typing import Dict, Iterable, List, Optional
12
13 from pip._vendor.packaging.utils import canonicalize_name
14
15 from pip._internal.distributions import make_distribution_for_install_requirement
16 from pip._internal.distributions.installed import InstalledDistribution
17 from pip._internal.exceptions import (
18 DirectoryUrlHashUnsupported,
19 HashMismatch,
20 HashUnpinned,
21 InstallationError,
22 MetadataInconsistent,
23 NetworkConnectionError,
24 PreviousBuildDirError,
25 VcsHashUnsupported,
26 )
27 from pip._internal.index.package_finder import PackageFinder
28 from pip._internal.metadata import BaseDistribution, get_metadata_distribution
29 from pip._internal.models.direct_url import ArchiveInfo
30 from pip._internal.models.link import Link
31 from pip._internal.models.wheel import Wheel
32 from pip._internal.network.download import BatchDownloader, Downloader
33 from pip._internal.network.lazy_wheel import (
34 HTTPRangeRequestUnsupported,
35 dist_from_wheel_url,
36 )
37 from pip._internal.network.session import PipSession
38 from pip._internal.operations.build.build_tracker import BuildTracker
39 from pip._internal.req.req_install import InstallRequirement
40 from pip._internal.utils.direct_url_helpers import (
41 direct_url_for_editable,
42 direct_url_from_link,
43 )
44 from pip._internal.utils.hashes import Hashes, MissingHashes
45 from pip._internal.utils.logging import indent_log
46 from pip._internal.utils.misc import (
47 display_path,
48 hash_file,
49 hide_url,
50 is_installable_dir,
51 )
52 from pip._internal.utils.temp_dir import TempDirectory
53 from pip._internal.utils.unpacking import unpack_file
54 from pip._internal.vcs import vcs
55
56 logger = logging.getLogger(__name__)
57
58
59 def _get_prepared_distribution(
60 req: InstallRequirement,
61 build_tracker: BuildTracker,
62 finder: PackageFinder,
63 build_isolation: bool,
64 check_build_deps: bool,
65 ) -> BaseDistribution:
66 """Prepare a distribution for installation."""
67 abstract_dist = make_distribution_for_install_requirement(req)
68 with build_tracker.track(req):
69 abstract_dist.prepare_distribution_metadata(
70 finder, build_isolation, check_build_deps
71 )
72 return abstract_dist.get_metadata_distribution()
73
74
75 def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
76 vcs_backend = vcs.get_backend_for_scheme(link.scheme)
77 assert vcs_backend is not None
78 vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
79
80
81 class File:
82 def __init__(self, path: str, content_type: Optional[str]) -> None:
83 self.path = path
84 if content_type is None:
85 self.content_type = mimetypes.guess_type(path)[0]
86 else:
87 self.content_type = content_type
88
89
90 def get_http_url(
91 link: Link,
92 download: Downloader,
93 download_dir: Optional[str] = None,
94 hashes: Optional[Hashes] = None,
95 ) -> File:
96 temp_dir = TempDirectory(kind="unpack", globally_managed=True)
97 # If a download dir is specified, is the file already downloaded there?
98 already_downloaded_path = None
99 if download_dir:
100 already_downloaded_path = _check_download_dir(link, download_dir, hashes)
101
102 if already_downloaded_path:
103 from_path = already_downloaded_path
104 content_type = None
105 else:
106 # let's download to a tmp dir
107 from_path, content_type = download(link, temp_dir.path)
108 if hashes:
109 hashes.check_against_path(from_path)
110
111 return File(from_path, content_type)
112
113
114 def get_file_url(
115 link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
116 ) -> File:
117 """Get file and optionally check its hash."""
118 # If a download dir is specified, is the file already there and valid?
119 already_downloaded_path = None
120 if download_dir:
121 already_downloaded_path = _check_download_dir(link, download_dir, hashes)
122
123 if already_downloaded_path:
124 from_path = already_downloaded_path
125 else:
126 from_path = link.file_path
127
128 # If --require-hashes is off, `hashes` is either empty, the
129 # link's embedded hash, or MissingHashes; it is required to
130 # match. If --require-hashes is on, we are satisfied by any
131 # hash in `hashes` matching: a URL-based or an option-based
132 # one; no internet-sourced hash will be in `hashes`.
133 if hashes:
134 hashes.check_against_path(from_path)
135 return File(from_path, None)
136
137
138 def unpack_url(
139 link: Link,
140 location: str,
141 download: Downloader,
142 verbosity: int,
143 download_dir: Optional[str] = None,
144 hashes: Optional[Hashes] = None,
145 ) -> Optional[File]:
146 """Unpack link into location, downloading if required.
147
148 :param hashes: A Hashes object, one of whose embedded hashes must match,
149 or HashMismatch will be raised. If the Hashes is empty, no matches are
150 required, and unhashable types of requirements (like VCS ones, which
151 would ordinarily raise HashUnsupported) are allowed.
152 """
153 # non-editable vcs urls
154 if link.is_vcs:
155 unpack_vcs_link(link, location, verbosity=verbosity)
156 return None
157
158 assert not link.is_existing_dir()
159
160 # file urls
161 if link.is_file:
162 file = get_file_url(link, download_dir, hashes=hashes)
163
164 # http urls
165 else:
166 file = get_http_url(
167 link,
168 download,
169 download_dir,
170 hashes=hashes,
171 )
172
173 # unpack the archive to the build dir location. even when only downloading
174 # archives, they have to be unpacked to parse dependencies, except wheels
175 if not link.is_wheel:
176 unpack_file(file.path, location, file.content_type)
177
178 return file
179
180
181 def _check_download_dir(
182 link: Link,
183 download_dir: str,
184 hashes: Optional[Hashes],
185 warn_on_hash_mismatch: bool = True,
186 ) -> Optional[str]:
187 """Check download_dir for previously downloaded file with correct hash
188 If a correct file is found return its path else None
189 """
190 download_path = os.path.join(download_dir, link.filename)
191
192 if not os.path.exists(download_path):
193 return None
194
195 # If already downloaded, does its hash match?
196 logger.info("File was already downloaded %s", download_path)
197 if hashes:
198 try:
199 hashes.check_against_path(download_path)
200 except HashMismatch:
201 if warn_on_hash_mismatch:
202 logger.warning(
203 "Previously-downloaded file %s has bad hash. Re-downloading.",
204 download_path,
205 )
206 os.unlink(download_path)
207 return None
208 return download_path
209
210
211 class RequirementPreparer:
212 """Prepares a Requirement"""
213
214 def __init__(
215 self,
216 build_dir: str,
217 download_dir: Optional[str],
218 src_dir: str,
219 build_isolation: bool,
220 check_build_deps: bool,
221 build_tracker: BuildTracker,
222 session: PipSession,
223 progress_bar: str,
224 finder: PackageFinder,
225 require_hashes: bool,
226 use_user_site: bool,
227 lazy_wheel: bool,
228 verbosity: int,
229 legacy_resolver: bool,
230 ) -> None:
231 super().__init__()
232
233 self.src_dir = src_dir
234 self.build_dir = build_dir
235 self.build_tracker = build_tracker
236 self._session = session
237 self._download = Downloader(session, progress_bar)
238 self._batch_download = BatchDownloader(session, progress_bar)
239 self.finder = finder
240
241 # Where still-packed archives should be written to. If None, they are
242 # not saved, and are deleted immediately after unpacking.
243 self.download_dir = download_dir
244
245 # Is build isolation allowed?
246 self.build_isolation = build_isolation
247
248 # Should check build dependencies?
249 self.check_build_deps = check_build_deps
250
251 # Should hash-checking be required?
252 self.require_hashes = require_hashes
253
254 # Should install in user site-packages?
255 self.use_user_site = use_user_site
256
257 # Should wheels be downloaded lazily?
258 self.use_lazy_wheel = lazy_wheel
259
260 # How verbose should underlying tooling be?
261 self.verbosity = verbosity
262
263 # Are we using the legacy resolver?
264 self.legacy_resolver = legacy_resolver
265
266 # Memoized downloaded files, as mapping of url: path.
267 self._downloaded: Dict[str, str] = {}
268
269 # Previous "header" printed for a link-based InstallRequirement
270 self._previous_requirement_header = ("", "")
271
272 def _log_preparing_link(self, req: InstallRequirement) -> None:
273 """Provide context for the requirement being prepared."""
274 if req.link.is_file and not req.is_wheel_from_cache:
275 message = "Processing %s"
276 information = str(display_path(req.link.file_path))
277 else:
278 message = "Collecting %s"
279 information = str(req.req or req)
280
281 # If we used req.req, inject requirement source if available (this
282 # would already be included if we used req directly)
283 if req.req and req.comes_from:
284 if isinstance(req.comes_from, str):
285 comes_from: Optional[str] = req.comes_from
286 else:
287 comes_from = req.comes_from.from_path()
288 if comes_from:
289 information += f" (from {comes_from})"
290
291 if (message, information) != self._previous_requirement_header:
292 self._previous_requirement_header = (message, information)
293 logger.info(message, information)
294
295 if req.is_wheel_from_cache:
296 with indent_log():
297 logger.info("Using cached %s", req.link.filename)
298
299 def _ensure_link_req_src_dir(
300 self, req: InstallRequirement, parallel_builds: bool
301 ) -> None:
302 """Ensure source_dir of a linked InstallRequirement."""
303 # Since source_dir is only set for editable requirements.
304 if req.link.is_wheel:
305 # We don't need to unpack wheels, so no need for a source
306 # directory.
307 return
308 assert req.source_dir is None
309 if req.link.is_existing_dir():
310 # build local directories in-tree
311 req.source_dir = req.link.file_path
312 return
313
314 # We always delete unpacked sdists after pip runs.
315 req.ensure_has_source_dir(
316 self.build_dir,
317 autodelete=True,
318 parallel_builds=parallel_builds,
319 )
320
321 # If a checkout exists, it's unwise to keep going. version
322 # inconsistencies are logged later, but do not fail the
323 # installation.
324 # FIXME: this won't upgrade when there's an existing
325 # package unpacked in `req.source_dir`
326 # TODO: this check is now probably dead code
327 if is_installable_dir(req.source_dir):
328 raise PreviousBuildDirError(
329 "pip can't proceed with requirements '{}' due to a"
330 "pre-existing build directory ({}). This is likely "
331 "due to a previous installation that failed . pip is "
332 "being responsible and not assuming it can delete this. "
333 "Please delete it and try again.".format(req, req.source_dir)
334 )
335
336 def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
337 # By the time this is called, the requirement's link should have
338 # been checked so we can tell what kind of requirements req is
339 # and raise some more informative errors than otherwise.
340 # (For example, we can raise VcsHashUnsupported for a VCS URL
341 # rather than HashMissing.)
342 if not self.require_hashes:
343 return req.hashes(trust_internet=True)
344
345 # We could check these first 2 conditions inside unpack_url
346 # and save repetition of conditions, but then we would
347 # report less-useful error messages for unhashable
348 # requirements, complaining that there's no hash provided.
349 if req.link.is_vcs:
350 raise VcsHashUnsupported()
351 if req.link.is_existing_dir():
352 raise DirectoryUrlHashUnsupported()
353
354 # Unpinned packages are asking for trouble when a new version
355 # is uploaded. This isn't a security check, but it saves users
356 # a surprising hash mismatch in the future.
357 # file:/// URLs aren't pinnable, so don't complain about them
358 # not being pinned.
359 if not req.is_direct and not req.is_pinned:
360 raise HashUnpinned()
361
362 # If known-good hashes are missing for this requirement,
363 # shim it with a facade object that will provoke hash
364 # computation and then raise a HashMissing exception
365 # showing the user what the hash should be.
366 return req.hashes(trust_internet=False) or MissingHashes()
367
368 def _fetch_metadata_only(
369 self,
370 req: InstallRequirement,
371 ) -> Optional[BaseDistribution]:
372 if self.legacy_resolver:
373 logger.debug(
374 "Metadata-only fetching is not used in the legacy resolver",
375 )
376 return None
377 if self.require_hashes:
378 logger.debug(
379 "Metadata-only fetching is not used as hash checking is required",
380 )
381 return None
382 # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
383 return self._fetch_metadata_using_link_data_attr(
384 req
385 ) or self._fetch_metadata_using_lazy_wheel(req.link)
386
387 def _fetch_metadata_using_link_data_attr(
388 self,
389 req: InstallRequirement,
390 ) -> Optional[BaseDistribution]:
391 """Fetch metadata from the data-dist-info-metadata attribute, if possible."""
392 # (1) Get the link to the metadata file, if provided by the backend.
393 metadata_link = req.link.metadata_link()
394 if metadata_link is None:
395 return None
396 assert req.req is not None
397 logger.info(
398 "Obtaining dependency information for %s from %s",
399 req.req,
400 metadata_link,
401 )
402 # (2) Download the contents of the METADATA file, separate from the dist itself.
403 metadata_file = get_http_url(
404 metadata_link,
405 self._download,
406 hashes=metadata_link.as_hashes(),
407 )
408 with open(metadata_file.path, "rb") as f:
409 metadata_contents = f.read()
410 # (3) Generate a dist just from those file contents.
411 metadata_dist = get_metadata_distribution(
412 metadata_contents,
413 req.link.filename,
414 req.req.name,
415 )
416 # (4) Ensure the Name: field from the METADATA file matches the name from the
417 # install requirement.
418 #
419 # NB: raw_name will fall back to the name from the install requirement if
420 # the Name: field is not present, but it's noted in the raw_name docstring
421 # that that should NEVER happen anyway.
422 if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
423 raise MetadataInconsistent(
424 req, "Name", req.req.name, metadata_dist.raw_name
425 )
426 return metadata_dist
427
428 def _fetch_metadata_using_lazy_wheel(
429 self,
430 link: Link,
431 ) -> Optional[BaseDistribution]:
432 """Fetch metadata using lazy wheel, if possible."""
433 # --use-feature=fast-deps must be provided.
434 if not self.use_lazy_wheel:
435 return None
436 if link.is_file or not link.is_wheel:
437 logger.debug(
438 "Lazy wheel is not used as %r does not point to a remote wheel",
439 link,
440 )
441 return None
442
443 wheel = Wheel(link.filename)
444 name = canonicalize_name(wheel.name)
445 logger.info(
446 "Obtaining dependency information from %s %s",
447 name,
448 wheel.version,
449 )
450 url = link.url.split("#", 1)[0]
451 try:
452 return dist_from_wheel_url(name, url, self._session)
453 except HTTPRangeRequestUnsupported:
454 logger.debug("%s does not support range requests", url)
455 return None
456
457 def _complete_partial_requirements(
458 self,
459 partially_downloaded_reqs: Iterable[InstallRequirement],
460 parallel_builds: bool = False,
461 ) -> None:
462 """Download any requirements which were only fetched by metadata."""
463 # Download to a temporary directory. These will be copied over as
464 # needed for downstream 'download', 'wheel', and 'install' commands.
465 temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
466
467 # Map each link to the requirement that owns it. This allows us to set
468 # `req.local_file_path` on the appropriate requirement after passing
469 # all the links at once into BatchDownloader.
470 links_to_fully_download: Dict[Link, InstallRequirement] = {}
471 for req in partially_downloaded_reqs:
472 assert req.link
473 links_to_fully_download[req.link] = req
474
475 batch_download = self._batch_download(
476 links_to_fully_download.keys(),
477 temp_dir,
478 )
479 for link, (filepath, _) in batch_download:
480 logger.debug("Downloading link %s to %s", link, filepath)
481 req = links_to_fully_download[link]
482 req.local_file_path = filepath
483 # TODO: This needs fixing for sdists
484 # This is an emergency fix for #11847, which reports that
485 # distributions get downloaded twice when metadata is loaded
486 # from a PEP 658 standalone metadata file. Setting _downloaded
487 # fixes this for wheels, but breaks the sdist case (tests
488 # test_download_metadata). As PyPI is currently only serving
489 # metadata for wheels, this is not an immediate issue.
490 # Fixing the problem properly looks like it will require a
491 # complete refactoring of the `prepare_linked_requirements_more`
492 # logic, and I haven't a clue where to start on that, so for now
493 # I have fixed the issue *just* for wheels.
494 if req.is_wheel:
495 self._downloaded[req.link.url] = filepath
496
497 # This step is necessary to ensure all lazy wheels are processed
498 # successfully by the 'download', 'wheel', and 'install' commands.
499 for req in partially_downloaded_reqs:
500 self._prepare_linked_requirement(req, parallel_builds)
501
502 def prepare_linked_requirement(
503 self, req: InstallRequirement, parallel_builds: bool = False
504 ) -> BaseDistribution:
505 """Prepare a requirement to be obtained from req.link."""
506 assert req.link
507 self._log_preparing_link(req)
508 with indent_log():
509 # Check if the relevant file is already available
510 # in the download directory
511 file_path = None
512 if self.download_dir is not None and req.link.is_wheel:
513 hashes = self._get_linked_req_hashes(req)
514 file_path = _check_download_dir(
515 req.link,
516 self.download_dir,
517 hashes,
518 # When a locally built wheel has been found in cache, we don't warn
519 # about re-downloading when the already downloaded wheel hash does
520 # not match. This is because the hash must be checked against the
521 # original link, not the cached link. It that case the already
522 # downloaded file will be removed and re-fetched from cache (which
523 # implies a hash check against the cache entry's origin.json).
524 warn_on_hash_mismatch=not req.is_wheel_from_cache,
525 )
526
527 if file_path is not None:
528 # The file is already available, so mark it as downloaded
529 self._downloaded[req.link.url] = file_path
530 else:
531 # The file is not available, attempt to fetch only metadata
532 metadata_dist = self._fetch_metadata_only(req)
533 if metadata_dist is not None:
534 req.needs_more_preparation = True
535 return metadata_dist
536
537 # None of the optimizations worked, fully prepare the requirement
538 return self._prepare_linked_requirement(req, parallel_builds)
539
540 def prepare_linked_requirements_more(
541 self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
542 ) -> None:
543 """Prepare linked requirements more, if needed."""
544 reqs = [req for req in reqs if req.needs_more_preparation]
545 for req in reqs:
546 # Determine if any of these requirements were already downloaded.
547 if self.download_dir is not None and req.link.is_wheel:
548 hashes = self._get_linked_req_hashes(req)
549 file_path = _check_download_dir(req.link, self.download_dir, hashes)
550 if file_path is not None:
551 self._downloaded[req.link.url] = file_path
552 req.needs_more_preparation = False
553
554 # Prepare requirements we found were already downloaded for some
555 # reason. The other downloads will be completed separately.
556 partially_downloaded_reqs: List[InstallRequirement] = []
557 for req in reqs:
558 if req.needs_more_preparation:
559 partially_downloaded_reqs.append(req)
560 else:
561 self._prepare_linked_requirement(req, parallel_builds)
562
563 # TODO: separate this part out from RequirementPreparer when the v1
564 # resolver can be removed!
565 self._complete_partial_requirements(
566 partially_downloaded_reqs,
567 parallel_builds=parallel_builds,
568 )
569
570 def _prepare_linked_requirement(
571 self, req: InstallRequirement, parallel_builds: bool
572 ) -> BaseDistribution:
573 assert req.link
574 link = req.link
575
576 hashes = self._get_linked_req_hashes(req)
577
578 if hashes and req.is_wheel_from_cache:
579 assert req.download_info is not None
580 assert link.is_wheel
581 assert link.is_file
582 # We need to verify hashes, and we have found the requirement in the cache
583 # of locally built wheels.
584 if (
585 isinstance(req.download_info.info, ArchiveInfo)
586 and req.download_info.info.hashes
587 and hashes.has_one_of(req.download_info.info.hashes)
588 ):
589 # At this point we know the requirement was built from a hashable source
590 # artifact, and we verified that the cache entry's hash of the original
591 # artifact matches one of the hashes we expect. We don't verify hashes
592 # against the cached wheel, because the wheel is not the original.
593 hashes = None
594 else:
595 logger.warning(
596 "The hashes of the source archive found in cache entry "
597 "don't match, ignoring cached built wheel "
598 "and re-downloading source."
599 )
600 req.link = req.cached_wheel_source_link
601 link = req.link
602
603 self._ensure_link_req_src_dir(req, parallel_builds)
604
605 if link.is_existing_dir():
606 local_file = None
607 elif link.url not in self._downloaded:
608 try:
609 local_file = unpack_url(
610 link,
611 req.source_dir,
612 self._download,
613 self.verbosity,
614 self.download_dir,
615 hashes,
616 )
617 except NetworkConnectionError as exc:
618 raise InstallationError(
619 "Could not install requirement {} because of HTTP "
620 "error {} for URL {}".format(req, exc, link)
621 )
622 else:
623 file_path = self._downloaded[link.url]
624 if hashes:
625 hashes.check_against_path(file_path)
626 local_file = File(file_path, content_type=None)
627
628 # If download_info is set, we got it from the wheel cache.
629 if req.download_info is None:
630 # Editables don't go through this function (see
631 # prepare_editable_requirement).
632 assert not req.editable
633 req.download_info = direct_url_from_link(link, req.source_dir)
634 # Make sure we have a hash in download_info. If we got it as part of the
635 # URL, it will have been verified and we can rely on it. Otherwise we
636 # compute it from the downloaded file.
637 # FIXME: https://github.com/pypa/pip/issues/11943
638 if (
639 isinstance(req.download_info.info, ArchiveInfo)
640 and not req.download_info.info.hashes
641 and local_file
642 ):
643 hash = hash_file(local_file.path)[0].hexdigest()
644 # We populate info.hash for backward compatibility.
645 # This will automatically populate info.hashes.
646 req.download_info.info.hash = f"sha256={hash}"
647
648 # For use in later processing,
649 # preserve the file path on the requirement.
650 if local_file:
651 req.local_file_path = local_file.path
652
653 dist = _get_prepared_distribution(
654 req,
655 self.build_tracker,
656 self.finder,
657 self.build_isolation,
658 self.check_build_deps,
659 )
660 return dist
661
662 def save_linked_requirement(self, req: InstallRequirement) -> None:
663 assert self.download_dir is not None
664 assert req.link is not None
665 link = req.link
666 if link.is_vcs or (link.is_existing_dir() and req.editable):
667 # Make a .zip of the source_dir we already created.
668 req.archive(self.download_dir)
669 return
670
671 if link.is_existing_dir():
672 logger.debug(
673 "Not copying link to destination directory "
674 "since it is a directory: %s",
675 link,
676 )
677 return
678 if req.local_file_path is None:
679 # No distribution was downloaded for this requirement.
680 return
681
682 download_location = os.path.join(self.download_dir, link.filename)
683 if not os.path.exists(download_location):
684 shutil.copy(req.local_file_path, download_location)
685 download_path = display_path(download_location)
686 logger.info("Saved %s", download_path)
687
688 def prepare_editable_requirement(
689 self,
690 req: InstallRequirement,
691 ) -> BaseDistribution:
692 """Prepare an editable requirement."""
693 assert req.editable, "cannot prepare a non-editable req as editable"
694
695 logger.info("Obtaining %s", req)
696
697 with indent_log():
698 if self.require_hashes:
699 raise InstallationError(
700 "The editable requirement {} cannot be installed when "
701 "requiring hashes, because there is no single file to "
702 "hash.".format(req)
703 )
704 req.ensure_has_source_dir(self.src_dir)
705 req.update_editable()
706 assert req.source_dir
707 req.download_info = direct_url_for_editable(req.unpacked_source_directory)
708
709 dist = _get_prepared_distribution(
710 req,
711 self.build_tracker,
712 self.finder,
713 self.build_isolation,
714 self.check_build_deps,
715 )
716
717 req.check_if_exists(self.use_user_site)
718
719 return dist
720
721 def prepare_installed_requirement(
722 self,
723 req: InstallRequirement,
724 skip_reason: str,
725 ) -> BaseDistribution:
726 """Prepare an already-installed requirement."""
727 assert req.satisfied_by, "req should have been satisfied but isn't"
728 assert skip_reason is not None, (
729 "did not get skip reason skipped but req.satisfied_by "
730 "is set to {}".format(req.satisfied_by)
731 )
732 logger.info(
733 "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
734 )
735 with indent_log():
736 if self.require_hashes:
737 logger.debug(
738 "Since it is already installed, we are trusting this "
739 "package without checking its hash. To ensure a "
740 "completely repeatable environment, install into an "
741 "empty virtualenv."
742 )
743 return InstalledDistribution(req).get_metadata_distribution()