5 A resource is a logical file contained within a package, or a logical
6 subdirectory thereof. The package resource API expects resource names
7 to have their path parts separated with ``/``, *not* whatever the local
8 path separator is. Do not use os.path operations to manipulate resource
9 names being passed into the API.
11 The package resource API is designed to work with normal filesystem packages,
12 .egg files, and unpacked .egg files. It can also work in a limited way with
13 .zip files and with custom PEP 302 loaders that support the ``get_data()``
16 This module is deprecated. Users are directed to :mod:`importlib.resources`,
17 :mod:`importlib.metadata` and :pypi:`packaging` instead.
44 from pkgutil
import get_importer
49 # Python 3.2 compatibility
55 FileExistsError
= OSError
57 # capture these to bypass sandboxing
61 from os
import mkdir
, rename
, unlink
65 # no write support, probably under GAE
68 from os
import open as os_open
69 from os
.path
import isdir
, split
72 import importlib
.machinery
as importlib_machinery
74 # access attribute to force import under delayed import mechanisms.
75 importlib_machinery
.__name
__
77 importlib_machinery
= None
79 from pkg_resources
.extern
.jaraco
.text
import (
85 from pkg_resources
.extern
import platformdirs
86 from pkg_resources
.extern
import packaging
88 __import__('pkg_resources.extern.packaging.version')
89 __import__('pkg_resources.extern.packaging.specifiers')
90 __import__('pkg_resources.extern.packaging.requirements')
91 __import__('pkg_resources.extern.packaging.markers')
92 __import__('pkg_resources.extern.packaging.utils')
94 if sys
.version_info
< (3, 5):
95 raise RuntimeError("Python 3.5 or later is required")
97 # declare some globals that will be defined later to
98 # satisfy the linters.
101 add_activation_listener
= None
102 resources_stream
= None
103 cleanup_resources
= None
105 resource_stream
= None
106 set_extraction_path
= None
107 resource_isdir
= None
108 resource_string
= None
109 iter_entry_points
= None
110 resource_listdir
= None
111 resource_filename
= None
112 resource_exists
= None
113 _distribution_finders
= None
114 _namespace_handlers
= None
115 _namespace_packages
= None
119 "pkg_resources is deprecated as an API. "
120 "See https://setuptools.pypa.io/en/latest/pkg_resources.html",
126 _PEP440_FALLBACK
= re
.compile(r
"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re
.I
)
129 class PEP440Warning(RuntimeWarning):
131 Used when there is an issue with a version or specifier not complying with
136 parse_version
= packaging
.version
.Version
142 def _declare_state(vartype
, **kw
):
144 _state_vars
.update(dict.fromkeys(kw
, vartype
))
150 for k
, v
in _state_vars
.items():
151 state
[k
] = g
['_sget_' + v
](g
[k
])
155 def __setstate__(state
):
157 for k
, v
in state
.items():
158 g
['_sset_' + _state_vars
[k
]](k
, g
[k
], v
)
166 def _sset_dict(key
, ob
, state
):
171 def _sget_object(val
):
172 return val
.__getstate
__()
175 def _sset_object(key
, ob
, state
):
176 ob
.__setstate
__(state
)
179 _sget_none
= _sset_none
= lambda *args
: None
182 def get_supported_platform():
183 """Return this platform's maximum compatible version.
185 distutils.util.get_platform() normally reports the minimum version
186 of macOS that would be required to *use* extensions produced by
187 distutils. But what we want when checking compatibility is to know the
188 version of macOS that we are *running*. To allow usage of packages that
189 explicitly require a newer version of macOS, we must also know the
190 current version of the OS.
192 If this condition occurs for any other platform with a version in its
193 platform strings, this function should be extended accordingly.
195 plat
= get_build_platform()
196 m
= macosVersionString
.match(plat
)
197 if m
is not None and sys
.platform
== "darwin":
199 plat
= 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m
.group(3))
207 # Basic resource access and distribution/entry point discovery
222 # Environmental control
225 'add_activation_listener',
226 'find_distributions',
227 'set_extraction_path',
230 # Primary implementation classes
240 'DistributionNotFound',
245 # Parsing functions and string utilities
246 'parse_requirements',
251 'compatible_platforms',
258 # filesystem utilities
261 # Distribution "precedence" constants
267 # "Provider" interfaces, implementations, and registration/lookup APIs
280 'register_namespace_handler',
281 'register_loader_type',
282 'fixup_namespace_packages',
285 'PkgResourcesDeprecationWarning',
286 # Deprecated/backward compatibility only
288 'AvailableDistributions',
292 class ResolutionError(Exception):
293 """Abstract base for dependency resolution errors"""
296 return self
.__class
__.__name
__ + repr(self
.args
)
299 class VersionConflict(ResolutionError
):
301 An already-installed version conflicts with the requested version.
303 Should be initialized with the installed Distribution and the requested
307 _template
= "{self.dist} is installed but {self.req} is required"
318 return self
._template
.format(**locals())
320 def with_context(self
, required_by
):
322 If required_by is non-empty, return a version of self that is a
323 ContextualVersionConflict.
327 args
= self
.args
+ (required_by
,)
328 return ContextualVersionConflict(*args
)
331 class ContextualVersionConflict(VersionConflict
):
333 A VersionConflict that accepts a third parameter, the set of the
334 requirements that required the installed Distribution.
337 _template
= VersionConflict
._template
+ ' by {self.required_by}'
340 def required_by(self
):
344 class DistributionNotFound(ResolutionError
):
345 """A requested distribution was not found"""
348 "The '{self.req}' distribution was not found "
349 "and is required by {self.requirers_str}"
361 def requirers_str(self
):
362 if not self
.requirers
:
363 return 'the application'
364 return ', '.join(self
.requirers
)
367 return self
._template
.format(**locals())
373 class UnknownExtra(ResolutionError
):
374 """Distribution doesn't have an "extra feature" of the given name"""
377 _provider_factories
= {}
379 PY_MAJOR
= '{}.{}'.format(*sys
.version_info
)
387 def register_loader_type(loader_type
, provider_factory
):
388 """Register `provider_factory` to make providers for `loader_type`
390 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
391 and `provider_factory` is a function that, passed a *module* object,
392 returns an ``IResourceProvider`` for that module.
394 _provider_factories
[loader_type
] = provider_factory
397 def get_provider(moduleOrReq
):
398 """Return an IResourceProvider for the named module or requirement"""
399 if isinstance(moduleOrReq
, Requirement
):
400 return working_set
.find(moduleOrReq
) or require(str(moduleOrReq
))[0]
402 module
= sys
.modules
[moduleOrReq
]
404 __import__(moduleOrReq
)
405 module
= sys
.modules
[moduleOrReq
]
406 loader
= getattr(module
, '__loader__', None)
407 return _find_adapter(_provider_factories
, loader
)(module
)
410 def _macos_vers(_cache
=[]):
412 version
= platform
.mac_ver()[0]
413 # fallback for MacPorts
415 plist
= '/System/Library/CoreServices/SystemVersion.plist'
416 if os
.path
.exists(plist
):
417 if hasattr(plistlib
, 'readPlist'):
418 plist_content
= plistlib
.readPlist(plist
)
419 if 'ProductVersion' in plist_content
:
420 version
= plist_content
['ProductVersion']
422 _cache
.append(version
.split('.'))
426 def _macos_arch(machine
):
427 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}
.get(machine
, machine
)
430 def get_build_platform():
431 """Return this platform's string for platform-specific distributions
433 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
434 needs some hacks for Linux and macOS.
436 from sysconfig
import get_platform
438 plat
= get_platform()
439 if sys
.platform
== "darwin" and not plat
.startswith('macosx-'):
441 version
= _macos_vers()
442 machine
= os
.uname()[4].replace(" ", "_")
443 return "macosx-%d.%d-%s" % (
446 _macos_arch(machine
),
449 # if someone is running a non-Mac darwin system, this will fall
450 # through to the default implementation
455 macosVersionString
= re
.compile(r
"macosx-(\d+)\.(\d+)-(.*)")
456 darwinVersionString
= re
.compile(r
"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
457 # XXX backward compat
458 get_platform
= get_build_platform
461 def compatible_platforms(provided
, required
):
462 """Can code for the `provided` platform run on the `required` platform?
464 Returns true if either platform is ``None``, or the platforms are equal.
466 XXX Needs compatibility checks for Linux and other unixy OSes.
468 if provided
is None or required
is None or provided
== required
:
472 # macOS special cases
473 reqMac
= macosVersionString
.match(required
)
475 provMac
= macosVersionString
.match(provided
)
477 # is this a Mac package?
479 # this is backwards compatibility for packages built before
480 # setuptools 0.6. All packages built after this point will
481 # use the new macOS designation.
482 provDarwin
= darwinVersionString
.match(provided
)
484 dversion
= int(provDarwin
.group(1))
485 macosversion
= "%s.%s" % (reqMac
.group(1), reqMac
.group(2))
488 and macosversion
>= "10.3"
490 and macosversion
>= "10.4"
493 # egg isn't macOS or legacy darwin
496 # are they the same major version and machine type?
497 if provMac
.group(1) != reqMac
.group(1) or provMac
.group(3) != reqMac
.group(3):
500 # is the required OS major update >= the provided one?
501 if int(provMac
.group(2)) > int(reqMac
.group(2)):
506 # XXX Linux and other platforms' special cases should go here
510 def run_script(dist_spec
, script_name
):
511 """Locate distribution `dist_spec` and run its `script_name` script"""
512 ns
= sys
._getframe
(1).f_globals
513 name
= ns
['__name__']
515 ns
['__name__'] = name
516 require(dist_spec
)[0].run_script(script_name
, ns
)
519 # backward compatibility
520 run_main
= run_script
523 def get_distribution(dist
):
524 """Return a current distribution object for a Requirement or string"""
525 if isinstance(dist
, str):
526 dist
= Requirement
.parse(dist
)
527 if isinstance(dist
, Requirement
):
528 dist
= get_provider(dist
)
529 if not isinstance(dist
, Distribution
):
530 raise TypeError("Expected string, Requirement, or Distribution", dist
)
534 def load_entry_point(dist
, group
, name
):
535 """Return `name` entry point of `group` for `dist` or raise ImportError"""
536 return get_distribution(dist
).load_entry_point(group
, name
)
539 def get_entry_map(dist
, group
=None):
540 """Return the entry point map for `group`, or the full entry map"""
541 return get_distribution(dist
).get_entry_map(group
)
544 def get_entry_info(dist
, group
, name
):
545 """Return the EntryPoint object for `group`+`name`, or ``None``"""
546 return get_distribution(dist
).get_entry_info(group
, name
)
549 class IMetadataProvider
:
550 def has_metadata(name
):
551 """Does the package's distribution contain the named metadata?"""
553 def get_metadata(name
):
554 """The named metadata resource as a string"""
556 def get_metadata_lines(name
):
557 """Yield named metadata resource as list of non-blank non-comment lines
559 Leading and trailing whitespace is stripped from each line, and lines
560 with ``#`` as the first non-blank character are omitted."""
562 def metadata_isdir(name
):
563 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
565 def metadata_listdir(name
):
566 """List of metadata names in the directory (like ``os.listdir()``)"""
568 def run_script(script_name
, namespace
):
569 """Execute the named script in the supplied namespace dictionary"""
572 class IResourceProvider(IMetadataProvider
):
573 """An object that provides access to package resources"""
575 def get_resource_filename(manager
, resource_name
):
576 """Return a true filesystem path for `resource_name`
578 `manager` must be an ``IResourceManager``"""
580 def get_resource_stream(manager
, resource_name
):
581 """Return a readable file-like object for `resource_name`
583 `manager` must be an ``IResourceManager``"""
585 def get_resource_string(manager
, resource_name
):
586 """Return a string containing the contents of `resource_name`
588 `manager` must be an ``IResourceManager``"""
590 def has_resource(resource_name
):
591 """Does the package contain the named resource?"""
593 def resource_isdir(resource_name
):
594 """Is the named resource a directory? (like ``os.path.isdir()``)"""
596 def resource_listdir(resource_name
):
597 """List of resource names in the directory (like ``os.listdir()``)"""
601 """A collection of active distributions on sys.path (or a similar list)"""
603 def __init__(self
, entries
=None):
604 """Create working set from list of path entries (default=sys.path)"""
608 self
.normalized_to_canonical_keys
= {}
614 for entry
in entries
:
615 self
.add_entry(entry
)
618 def _build_master(cls
):
620 Prepare the master working set.
624 from __main__
import __requires__
626 # The main program does not list any requirements
629 # ensure the requirements are met
631 ws
.require(__requires__
)
632 except VersionConflict
:
633 return cls
._build
_from
_requirements
(__requires__
)
638 def _build_from_requirements(cls
, req_spec
):
640 Build a working set from a requirement spec. Rewrites sys.path.
642 # try it without defaults already on sys.path
643 # by starting with an empty path
645 reqs
= parse_requirements(req_spec
)
646 dists
= ws
.resolve(reqs
, Environment())
650 # add any missing entries from sys.path
651 for entry
in sys
.path
:
652 if entry
not in ws
.entries
:
655 # then copy back to sys.path
656 sys
.path
[:] = ws
.entries
659 def add_entry(self
, entry
):
660 """Add a path item to ``.entries``, finding any distributions on it
662 ``find_distributions(entry, True)`` is used to find distributions
663 corresponding to the path entry, and they are added. `entry` is
664 always appended to ``.entries``, even if it is already present.
665 (This is because ``sys.path`` can contain the same value more than
666 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
669 self
.entry_keys
.setdefault(entry
, [])
670 self
.entries
.append(entry
)
671 for dist
in find_distributions(entry
, True):
672 self
.add(dist
, entry
, False)
674 def __contains__(self
, dist
):
675 """True if `dist` is the active distribution for its project"""
676 return self
.by_key
.get(dist
.key
) == dist
679 """Find a distribution matching requirement `req`
681 If there is an active distribution for the requested project, this
682 returns it as long as it meets the version requirement specified by
683 `req`. But, if there is an active distribution for the project and it
684 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
685 If there is no active distribution for the requested project, ``None``
688 dist
= self
.by_key
.get(req
.key
)
691 canonical_key
= self
.normalized_to_canonical_keys
.get(req
.key
)
693 if canonical_key
is not None:
694 req
.key
= canonical_key
695 dist
= self
.by_key
.get(canonical_key
)
697 if dist
is not None and dist
not in req
:
699 raise VersionConflict(dist
, req
)
702 def iter_entry_points(self
, group
, name
=None):
703 """Yield entry point objects from `group` matching `name`
705 If `name` is None, yields all entry points in `group` from all
706 distributions in the working set, otherwise only ones matching
707 both `group` and `name` are yielded (in distribution order).
712 for entry
in dist
.get_entry_map(group
).values()
713 if name
is None or name
== entry
.name
716 def run_script(self
, requires
, script_name
):
717 """Locate distribution for `requires` and run `script_name` script"""
718 ns
= sys
._getframe
(1).f_globals
719 name
= ns
['__name__']
721 ns
['__name__'] = name
722 self
.require(requires
)[0].run_script(script_name
, ns
)
725 """Yield distributions for non-duplicate projects in the working set
727 The yield order is the order in which the items' path entries were
728 added to the working set.
731 for item
in self
.entries
:
732 if item
not in self
.entry_keys
:
733 # workaround a cache issue
736 for key
in self
.entry_keys
[item
]:
739 yield self
.by_key
[key
]
741 def add(self
, dist
, entry
=None, insert
=True, replace
=False):
742 """Add `dist` to working set, associated with `entry`
744 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
745 On exit from this routine, `entry` is added to the end of the working
746 set's ``.entries`` (if it wasn't already present).
748 `dist` is only added to the working set if it's for a project that
749 doesn't already have a distribution in the set, unless `replace=True`.
750 If it's added, any callbacks registered with the ``subscribe()`` method
754 dist
.insert_on(self
.entries
, entry
, replace
=replace
)
757 entry
= dist
.location
758 keys
= self
.entry_keys
.setdefault(entry
, [])
759 keys2
= self
.entry_keys
.setdefault(dist
.location
, [])
760 if not replace
and dist
.key
in self
.by_key
:
761 # ignore hidden distros
764 self
.by_key
[dist
.key
] = dist
765 normalized_name
= packaging
.utils
.canonicalize_name(dist
.key
)
766 self
.normalized_to_canonical_keys
[normalized_name
] = dist
.key
767 if dist
.key
not in keys
:
768 keys
.append(dist
.key
)
769 if dist
.key
not in keys2
:
770 keys2
.append(dist
.key
)
771 self
._added
_new
(dist
)
778 replace_conflicting
=False,
781 """List all distributions needed to (recursively) meet `requirements`
783 `requirements` must be a sequence of ``Requirement`` objects. `env`,
784 if supplied, should be an ``Environment`` instance. If
785 not supplied, it defaults to all distributions available within any
786 entry or distribution in the working set. `installer`, if supplied,
787 will be invoked with each requirement that cannot be met by an
788 already-installed distribution; it should return a ``Distribution`` or
791 Unless `replace_conflicting=True`, raises a VersionConflict exception
793 any requirements are found on the path that have the correct name but
794 the wrong version. Otherwise, if an `installer` is supplied it will be
795 invoked to obtain the correct version of the requirement and activate
798 `extras` is a list of the extras to be used with these requirements.
799 This is important because extra requirements may look like `my_req;
800 extra = "my_extra"`, which would otherwise be interpreted as a purely
801 optional requirement. Instead, we want to be able to assert that these
802 requirements are truly required.
806 requirements
= list(requirements
)[::-1]
807 # set of processed requirements
813 req_extras
= _ReqExtras()
815 # Mapping of requirement to set of distributions that required it;
816 # useful for reporting info about conflicts.
817 required_by
= collections
.defaultdict(set)
820 # process dependencies breadth-first
821 req
= requirements
.pop(0)
823 # Ignore cyclic or redundant dependencies
826 if not req_extras
.markers_pass(req
, extras
):
829 dist
= self
._resolve
_dist
(
830 req
, best
, replace_conflicting
, env
, installer
, required_by
, to_activate
833 # push the new requirements onto the stack
834 new_requirements
= dist
.requires(req
.extras
)[::-1]
835 requirements
.extend(new_requirements
)
837 # Register the new requirements needed by req
838 for new_requirement
in new_requirements
:
839 required_by
[new_requirement
].add(req
.project_name
)
840 req_extras
[new_requirement
] = req
.extras
842 processed
[req
] = True
844 # return list of distros to activate
848 self
, req
, best
, replace_conflicting
, env
, installer
, required_by
, to_activate
850 dist
= best
.get(req
.key
)
852 # Find the best distribution and add it to the map
853 dist
= self
.by_key
.get(req
.key
)
854 if dist
is None or (dist
not in req
and replace_conflicting
):
858 env
= Environment(self
.entries
)
860 # Use an empty environment and workingset to avoid
861 # any further conflicts with the conflicting
863 env
= Environment([])
865 dist
= best
[req
.key
] = env
.best_match(
866 req
, ws
, installer
, replace_conflicting
=replace_conflicting
869 requirers
= required_by
.get(req
, None)
870 raise DistributionNotFound(req
, requirers
)
871 to_activate
.append(dist
)
873 # Oops, the "best" so far conflicts with a dependency
874 dependent_req
= required_by
[req
]
875 raise VersionConflict(dist
, req
).with_context(dependent_req
)
878 def find_plugins(self
, plugin_env
, full_env
=None, installer
=None, fallback
=True):
879 """Find all activatable distributions in `plugin_env`
883 distributions, errors = working_set.find_plugins(
884 Environment(plugin_dirlist)
886 # add plugins+libs to sys.path
887 map(working_set.add, distributions)
889 print('Could not load', errors)
891 The `plugin_env` should be an ``Environment`` instance that contains
892 only distributions that are in the project's "plugin directory" or
893 directories. The `full_env`, if supplied, should be an ``Environment``
894 contains all currently-available distributions. If `full_env` is not
895 supplied, one is created automatically from the ``WorkingSet`` this
896 method is called on, which will typically mean that every directory on
897 ``sys.path`` will be scanned for distributions.
899 `installer` is a standard installer callback as used by the
900 ``resolve()`` method. The `fallback` flag indicates whether we should
901 attempt to resolve older versions of a plugin if the newest version
904 This method returns a 2-tuple: (`distributions`, `error_info`), where
905 `distributions` is a list of the distributions found in `plugin_env`
906 that were loadable, along with any other distributions that are needed
907 to resolve their dependencies. `error_info` is a dictionary mapping
908 unloadable plugin distributions to an exception instance describing the
909 error that occurred. Usually this will be a ``DistributionNotFound`` or
910 ``VersionConflict`` instance.
913 plugin_projects
= list(plugin_env
)
914 # scan project names in alphabetic order
915 plugin_projects
.sort()
921 env
= Environment(self
.entries
)
924 env
= full_env
+ plugin_env
926 shadow_set
= self
.__class
__([])
927 # put all our entries in shadow_set
928 list(map(shadow_set
.add
, self
))
930 for project_name
in plugin_projects
:
931 for dist
in plugin_env
[project_name
]:
932 req
= [dist
.as_requirement()]
935 resolvees
= shadow_set
.resolve(req
, env
, installer
)
937 except ResolutionError
as v
:
941 # try the next older version of project
944 # give up on this project, keep going
948 list(map(shadow_set
.add
, resolvees
))
949 distributions
.update(dict.fromkeys(resolvees
))
951 # success, no need to try any more versions of this project
954 distributions
= list(distributions
)
957 return distributions
, error_info
959 def require(self
, *requirements
):
960 """Ensure that distributions matching `requirements` are activated
962 `requirements` must be a string or a (possibly-nested) sequence
963 thereof, specifying the distributions and versions required. The
964 return value is a sequence of the distributions that needed to be
965 activated to fulfill the requirements; all relevant distributions are
966 included, even if they were already activated in this working set.
968 needed
= self
.resolve(parse_requirements(requirements
))
975 def subscribe(self
, callback
, existing
=True):
976 """Invoke `callback` for all distributions
978 If `existing=True` (default),
979 call on all existing ones, as well.
981 if callback
in self
.callbacks
:
983 self
.callbacks
.append(callback
)
989 def _added_new(self
, dist
):
990 for callback
in self
.callbacks
:
993 def __getstate__(self
):
996 self
.entry_keys
.copy(),
998 self
.normalized_to_canonical_keys
.copy(),
1002 def __setstate__(self
, e_k_b_n_c
):
1003 entries
, keys
, by_key
, normalized_to_canonical_keys
, callbacks
= e_k_b_n_c
1004 self
.entries
= entries
[:]
1005 self
.entry_keys
= keys
.copy()
1006 self
.by_key
= by_key
.copy()
1007 self
.normalized_to_canonical_keys
= normalized_to_canonical_keys
.copy()
1008 self
.callbacks
= callbacks
[:]
1011 class _ReqExtras(dict):
1013 Map each requirement to the extras that demanded it.
1016 def markers_pass(self
, req
, extras
=None):
1018 Evaluate markers for req against each extra that
1021 Return False if the req has a marker and fails
1022 evaluation. Otherwise, return True.
1025 req
.marker
.evaluate({'extra': extra}
)
1026 for extra
in self
.get(req
, ()) + (extras
or (None,))
1028 return not req
.marker
or any(extra_evals
)
1032 """Searchable snapshot of distributions on a search path"""
1035 self
, search_path
=None, platform
=get_supported_platform(), python
=PY_MAJOR
1037 """Snapshot distributions available on a search path
1039 Any distributions found on `search_path` are added to the environment.
1040 `search_path` should be a sequence of ``sys.path`` items. If not
1041 supplied, ``sys.path`` is used.
1043 `platform` is an optional string specifying the name of the platform
1044 that platform-specific distributions must be compatible with. If
1045 unspecified, it defaults to the current platform. `python` is an
1046 optional string naming the desired version of Python (e.g. ``'3.6'``);
1047 it defaults to the current version.
1049 You may explicitly set `platform` (and/or `python`) to ``None`` if you
1050 wish to map *all* distributions, not just those compatible with the
1051 running platform or Python version.
1054 self
.platform
= platform
1055 self
.python
= python
1056 self
.scan(search_path
)
1058 def can_add(self
, dist
):
1059 """Is distribution `dist` acceptable for this environment?
1061 The distribution must match the platform and python version
1062 requirements specified when this environment was created, or False
1067 or dist
.py_version
is None
1068 or dist
.py_version
== self
.python
1070 return py_compat
and compatible_platforms(dist
.platform
, self
.platform
)
1072 def remove(self
, dist
):
1073 """Remove `dist` from the environment"""
1074 self
._distmap
[dist
.key
].remove(dist
)
1076 def scan(self
, search_path
=None):
1077 """Scan `search_path` for distributions usable in this environment
1079 Any distributions found are added to the environment.
1080 `search_path` should be a sequence of ``sys.path`` items. If not
1081 supplied, ``sys.path`` is used. Only distributions conforming to
1082 the platform/python version defined at initialization are added.
1084 if search_path
is None:
1085 search_path
= sys
.path
1087 for item
in search_path
:
1088 for dist
in find_distributions(item
):
1091 def __getitem__(self
, project_name
):
1092 """Return a newest-to-oldest list of distributions for `project_name`
1094 Uses case-insensitive `project_name` comparison, assuming all the
1095 project's distributions use their project's name converted to all
1096 lowercase as their key.
1099 distribution_key
= project_name
.lower()
1100 return self
._distmap
.get(distribution_key
, [])
1102 def add(self
, dist
):
1103 """Add `dist` if we ``can_add()`` it and it has not already been added"""
1104 if self
.can_add(dist
) and dist
.has_version():
1105 dists
= self
._distmap
.setdefault(dist
.key
, [])
1106 if dist
not in dists
:
1108 dists
.sort(key
=operator
.attrgetter('hashcmp'), reverse
=True)
1110 def best_match(self
, req
, working_set
, installer
=None, replace_conflicting
=False):
1111 """Find distribution best matching `req` and usable on `working_set`
1113 This calls the ``find(req)`` method of the `working_set` to see if a
1114 suitable distribution is already active. (This may raise
1115 ``VersionConflict`` if an unsuitable version of the project is already
1116 active in the specified `working_set`.) If a suitable distribution
1117 isn't active, this method returns the newest distribution in the
1118 environment that meets the ``Requirement`` in `req`. If no suitable
1119 distribution is found, and `installer` is supplied, then the result of
1120 calling the environment's ``obtain(req, installer)`` method will be
1124 dist
= working_set
.find(req
)
1125 except VersionConflict
:
1126 if not replace_conflicting
:
1129 if dist
is not None:
1131 for dist
in self
[req
.key
]:
1134 # try to download/install
1135 return self
.obtain(req
, installer
)
1137 def obtain(self
, requirement
, installer
=None):
1138 """Obtain a distribution matching `requirement` (e.g. via download)
1140 Obtain a distro that matches requirement (e.g. via download). In the
1141 base ``Environment`` class, this routine just returns
1142 ``installer(requirement)``, unless `installer` is None, in which case
1143 None is returned instead. This method is a hook that allows subclasses
1144 to attempt other ways of obtaining a distribution before falling back
1145 to the `installer` argument."""
1146 if installer
is not None:
1147 return installer(requirement
)
1150 """Yield the unique project names of the available distributions"""
1151 for key
in self
._distmap
.keys():
1155 def __iadd__(self
, other
):
1156 """In-place addition of a distribution or environment"""
1157 if isinstance(other
, Distribution
):
1159 elif isinstance(other
, Environment
):
1160 for project
in other
:
1161 for dist
in other
[project
]:
1164 raise TypeError("Can't add %r to environment" % (other
,))
1167 def __add__(self
, other
):
1168 """Add an environment or distribution to an environment"""
1169 new
= self
.__class
__([], platform
=None, python
=None)
1170 for env
in self
, other
:
1175 # XXX backward compatibility
1176 AvailableDistributions
= Environment
1179 class ExtractionError(RuntimeError):
1180 """An error occurred extracting a resource
1182 The following attributes are available from instances of this exception:
1185 The resource manager that raised this exception
1188 The base directory for resource extraction
1191 The exception instance that caused extraction to fail
1195 class ResourceManager
:
1196 """Manage resource extraction and packages"""
1198 extraction_path
= None
1201 self
.cached_files
= {}
1203 def resource_exists(self
, package_or_requirement
, resource_name
):
1204 """Does the named resource exist?"""
1205 return get_provider(package_or_requirement
).has_resource(resource_name
)
1207 def resource_isdir(self
, package_or_requirement
, resource_name
):
1208 """Is the named resource an existing directory?"""
1209 return get_provider(package_or_requirement
).resource_isdir(resource_name
)
1211 def resource_filename(self
, package_or_requirement
, resource_name
):
1212 """Return a true filesystem path for specified resource"""
1213 return get_provider(package_or_requirement
).get_resource_filename(
1217 def resource_stream(self
, package_or_requirement
, resource_name
):
1218 """Return a readable file-like object for specified resource"""
1219 return get_provider(package_or_requirement
).get_resource_stream(
1223 def resource_string(self
, package_or_requirement
, resource_name
):
1224 """Return specified resource as a string"""
1225 return get_provider(package_or_requirement
).get_resource_string(
1229 def resource_listdir(self
, package_or_requirement
, resource_name
):
1230 """List the contents of the named resource directory"""
1231 return get_provider(package_or_requirement
).resource_listdir(resource_name
)
1233 def extraction_error(self
):
1234 """Give an error message for problems extracting file(s)"""
1236 old_exc
= sys
.exc_info()[1]
1237 cache_path
= self
.extraction_path
or get_default_cache()
1239 tmpl
= textwrap
.dedent(
1241 Can't extract file(s) to egg cache
1243 The following error occurred while trying to extract file(s)
1244 to the Python egg cache:
1248 The Python egg cache directory is currently set to:
1252 Perhaps your account does not have write access to this directory?
1253 You can change the cache directory by setting the PYTHON_EGG_CACHE
1254 environment variable to point to an accessible directory.
1257 err
= ExtractionError(tmpl
.format(**locals()))
1259 err
.cache_path
= cache_path
1260 err
.original_error
= old_exc
1263 def get_cache_path(self
, archive_name
, names
=()):
1264 """Return absolute location in cache for `archive_name` and `names`
1266 The parent directory of the resulting path will be created if it does
1267 not already exist. `archive_name` should be the base filename of the
1268 enclosing egg (which may not be the name of the enclosing zipfile!),
1269 including its ".egg" extension. `names`, if provided, should be a
1270 sequence of path name parts "under" the egg's extraction location.
1272 This method should only be called by resource providers that need to
1273 obtain an extraction location, and only for names they intend to
1274 extract, as it tracks the generated names for possible cleanup later.
1276 extract_path
= self
.extraction_path
or get_default_cache()
1277 target_path
= os
.path
.join(extract_path
, archive_name
+ '-tmp', *names
)
1279 _bypass_ensure_directory(target_path
)
1281 self
.extraction_error()
1283 self
._warn
_unsafe
_extraction
_path
(extract_path
)
1285 self
.cached_files
[target_path
] = 1
1289 def _warn_unsafe_extraction_path(path
):
1291 If the default extraction path is overridden and set to an insecure
1292 location, such as /tmp, it opens up an opportunity for an attacker to
1293 replace an extracted file with an unauthorized payload. Warn the user
1294 if a known insecure location is used.
1296 See Distribute #375 for more details.
1298 if os
.name
== 'nt' and not path
.startswith(os
.environ
['windir']):
1299 # On Windows, permissions are generally restrictive by default
1300 # and temp directories are not writable by other users, so
1301 # bypass the warning.
1303 mode
= os
.stat(path
).st_mode
1304 if mode
& stat
.S_IWOTH
or mode
& stat
.S_IWGRP
:
1306 "Extraction path is writable by group/others "
1307 "and vulnerable to attack when "
1308 "used with get_resource_filename ({path}). "
1309 "Consider a more secure "
1310 "location (set with .set_extraction_path or the "
1311 "PYTHON_EGG_CACHE environment variable)."
1312 ).format(**locals())
1313 warnings
.warn(msg
, UserWarning)
1315 def postprocess(self
, tempname
, filename
):
1316 """Perform any platform-specific postprocessing of `tempname`
1318 This is where Mac header rewrites should be done; other platforms don't
1319 have anything special they should do.
1321 Resource providers should call this method ONLY after successfully
1322 extracting a compressed resource. They must NOT call it on resources
1323 that are already in the filesystem.
1325 `tempname` is the current (temporary) name of the file, and `filename`
1326 is the name it will be renamed to by the caller after this routine
1330 if os
.name
== 'posix':
1331 # Make the resource executable
1332 mode
= ((os
.stat(tempname
).st_mode
) |
0o555) & 0o7777
1333 os
.chmod(tempname
, mode
)
1335 def set_extraction_path(self
, path
):
1336 """Set the base path where resources will be extracted to, if needed.
1338 If you do not call this routine before any extractions take place, the
1339 path defaults to the return value of ``get_default_cache()``. (Which
1340 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1341 platform-specific fallbacks. See that routine's documentation for more
1344 Resources are extracted to subdirectories of this path based upon
1345 information given by the ``IResourceProvider``. You may set this to a
1346 temporary directory, but then you must call ``cleanup_resources()`` to
1347 delete the extracted files when done. There is no guarantee that
1348 ``cleanup_resources()`` will be able to remove all extracted files.
1350 (Note: you may not change the extraction path for a given resource
1351 manager once resources have been extracted, unless you first call
1352 ``cleanup_resources()``.)
1354 if self
.cached_files
:
1355 raise ValueError("Can't change extraction path, files already extracted")
1357 self
.extraction_path
= path
1359 def cleanup_resources(self
, force
=False):
1361 Delete all extracted resource files and directories, returning a list
1362 of the file and directory names that could not be successfully removed.
1363 This function does not have any concurrency protection, so it should
1364 generally only be called when the extraction path is a temporary
1365 directory exclusive to a single process. This method is not
1366 automatically called; you must call it explicitly or register it as an
1367 ``atexit`` function if you wish to ensure cleanup of a temporary
1368 directory used for extractions.
1373 def get_default_cache():
1375 Return the ``PYTHON_EGG_CACHE`` environment variable
1376 or a platform-relevant user cache dir for an app
1377 named "Python-Eggs".
1379 return os
.environ
.get('PYTHON_EGG_CACHE') or platformdirs
.user_cache_dir(
1380 appname
='Python-Eggs'
1384 def safe_name(name
):
1385 """Convert an arbitrary string to a standard distribution name
1387 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1389 return re
.sub('[^A-Za-z0-9.]+', '-', name
)
1392 def safe_version(version
):
1394 Convert an arbitrary string to a standard version string
1397 # normalize the version
1398 return str(packaging
.version
.Version(version
))
1399 except packaging
.version
.InvalidVersion
:
1400 version
= version
.replace(' ', '.')
1401 return re
.sub('[^A-Za-z0-9.]+', '-', version
)
1404 def _forgiving_version(version
):
1405 """Fallback when ``safe_version`` is not safe enough
1406 >>> parse_version(_forgiving_version('0.23ubuntu1'))
1407 <Version('0.23.dev0+sanitized.ubuntu1')>
1408 >>> parse_version(_forgiving_version('0.23-'))
1409 <Version('0.23.dev0+sanitized')>
1410 >>> parse_version(_forgiving_version('0.-_'))
1411 <Version('0.dev0+sanitized')>
1412 >>> parse_version(_forgiving_version('42.+?1'))
1413 <Version('42.dev0+sanitized.1')>
1414 >>> parse_version(_forgiving_version('hello world'))
1415 <Version('0.dev0+sanitized.hello.world')>
1417 version
= version
.replace(' ', '.')
1418 match
= _PEP440_FALLBACK
.search(version
)
1420 safe
= match
["safe"]
1421 rest
= version
[len(safe
) :]
1425 local
= f
"sanitized.{_safe_segment(rest)}".strip(".")
1426 return f
"{safe}.dev0+{local}"
1429 def _safe_segment(segment
):
1430 """Convert an arbitrary string into a safe segment"""
1431 segment
= re
.sub('[^A-Za-z0-9.]+', '-', segment
)
1432 segment
= re
.sub('-[^A-Za-z0-9]+', '-', segment
)
1433 return re
.sub(r
'\.[^A-Za-z0-9]+', '.', segment
).strip(".-")
1436 def safe_extra(extra
):
1437 """Convert an arbitrary string to a standard 'extra' name
1439 Any runs of non-alphanumeric characters are replaced with a single '_',
1440 and the result is always lowercased.
1442 return re
.sub('[^A-Za-z0-9.-]+', '_', extra
).lower()
1445 def to_filename(name
):
1446 """Convert a project or version name to its filename-escaped form
1448 Any '-' characters are currently replaced with '_'.
1450 return name
.replace('-', '_')
1453 def invalid_marker(text
):
1455 Validate text as a PEP 508 environment marker; return an exception
1456 if invalid or False otherwise.
1459 evaluate_marker(text
)
1460 except SyntaxError as e
:
1467 def evaluate_marker(text
, extra
=None):
1469 Evaluate a PEP 508 environment marker.
1470 Return a boolean indicating the marker result in this environment.
1471 Raise SyntaxError if marker is invalid.
1473 This implementation uses the 'pyparsing' module.
1476 marker
= packaging
.markers
.Marker(text
)
1477 return marker
.evaluate()
1478 except packaging
.markers
.InvalidMarker
as e
:
1479 raise SyntaxError(e
) from e
1483 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1489 def __init__(self
, module
):
1490 self
.loader
= getattr(module
, '__loader__', None)
1491 self
.module_path
= os
.path
.dirname(getattr(module
, '__file__', ''))
1493 def get_resource_filename(self
, manager
, resource_name
):
1494 return self
._fn
(self
.module_path
, resource_name
)
1496 def get_resource_stream(self
, manager
, resource_name
):
1497 return io
.BytesIO(self
.get_resource_string(manager
, resource_name
))
1499 def get_resource_string(self
, manager
, resource_name
):
1500 return self
._get
(self
._fn
(self
.module_path
, resource_name
))
1502 def has_resource(self
, resource_name
):
1503 return self
._has
(self
._fn
(self
.module_path
, resource_name
))
1505 def _get_metadata_path(self
, name
):
1506 return self
._fn
(self
.egg_info
, name
)
1508 def has_metadata(self
, name
):
1509 if not self
.egg_info
:
1510 return self
.egg_info
1512 path
= self
._get
_metadata
_path
(name
)
1513 return self
._has
(path
)
1515 def get_metadata(self
, name
):
1516 if not self
.egg_info
:
1518 path
= self
._get
_metadata
_path
(name
)
1519 value
= self
._get
(path
)
1521 return value
.decode('utf-8')
1522 except UnicodeDecodeError as exc
:
1523 # Include the path in the error message to simplify
1524 # troubleshooting, and without changing the exception type.
1525 exc
.reason
+= ' in {} file at path: {}'.format(name
, path
)
1528 def get_metadata_lines(self
, name
):
1529 return yield_lines(self
.get_metadata(name
))
1531 def resource_isdir(self
, resource_name
):
1532 return self
._isdir
(self
._fn
(self
.module_path
, resource_name
))
1534 def metadata_isdir(self
, name
):
1535 return self
.egg_info
and self
._isdir
(self
._fn
(self
.egg_info
, name
))
1537 def resource_listdir(self
, resource_name
):
1538 return self
._listdir
(self
._fn
(self
.module_path
, resource_name
))
1540 def metadata_listdir(self
, name
):
1542 return self
._listdir
(self
._fn
(self
.egg_info
, name
))
1545 def run_script(self
, script_name
, namespace
):
1546 script
= 'scripts/' + script_name
1547 if not self
.has_metadata(script
):
1548 raise ResolutionError(
1549 "Script {script!r} not found in metadata at {self.egg_info!r}".format(
1553 script_text
= self
.get_metadata(script
).replace('\r\n', '\n')
1554 script_text
= script_text
.replace('\r', '\n')
1555 script_filename
= self
._fn
(self
.egg_info
, script
)
1556 namespace
['__file__'] = script_filename
1557 if os
.path
.exists(script_filename
):
1558 with open(script_filename
) as fid
:
1560 code
= compile(source
, script_filename
, 'exec')
1561 exec(code
, namespace
, namespace
)
1563 from linecache
import cache
1565 cache
[script_filename
] = (
1568 script_text
.split('\n'),
1571 script_code
= compile(script_text
, script_filename
, 'exec')
1572 exec(script_code
, namespace
, namespace
)
1574 def _has(self
, path
):
1575 raise NotImplementedError(
1576 "Can't perform this operation for unregistered loader type"
1579 def _isdir(self
, path
):
1580 raise NotImplementedError(
1581 "Can't perform this operation for unregistered loader type"
1584 def _listdir(self
, path
):
1585 raise NotImplementedError(
1586 "Can't perform this operation for unregistered loader type"
1589 def _fn(self
, base
, resource_name
):
1590 self
._validate
_resource
_path
(resource_name
)
1592 return os
.path
.join(base
, *resource_name
.split('/'))
1596 def _validate_resource_path(path
):
1598 Validate the resource paths according to the docs.
1599 https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
1601 >>> warned = getfixture('recwarn')
1602 >>> warnings.simplefilter('always')
1603 >>> vrp = NullProvider._validate_resource_path
1604 >>> vrp('foo/bar.txt')
1607 >>> vrp('../foo/bar.txt')
1611 >>> vrp('/foo/bar.txt')
1614 >>> vrp('foo/../../bar.txt')
1618 >>> vrp('foo/f../bar.txt')
1622 Windows path separators are straight-up disallowed.
1623 >>> vrp(r'\\foo/bar.txt')
1624 Traceback (most recent call last):
1626 ValueError: Use of .. or absolute path in a resource path \
1629 >>> vrp(r'C:\\foo/bar.txt')
1630 Traceback (most recent call last):
1632 ValueError: Use of .. or absolute path in a resource path \
1635 Blank values are allowed
1641 Non-string values are not.
1644 Traceback (most recent call last):
1649 os
.path
.pardir
in path
.split(posixpath
.sep
)
1650 or posixpath
.isabs(path
)
1651 or ntpath
.isabs(path
)
1656 msg
= "Use of .. or absolute path in a resource path is not allowed."
1658 # Aggressively disallow Windows absolute paths
1659 if ntpath
.isabs(path
) and not posixpath
.isabs(path
):
1660 raise ValueError(msg
)
1662 # for compatibility, warn; in future
1663 # raise ValueError(msg)
1665 msg
[:-1] + " and will raise exceptions in a future release.",
1669 def _get(self
, path
):
1670 if hasattr(self
.loader
, 'get_data'):
1671 return self
.loader
.get_data(path
)
1672 raise NotImplementedError(
1673 "Can't perform this operation for loaders without 'get_data()'"
1677 register_loader_type(object, NullProvider
)
1682 yield all parents of path including path
1688 path
, _
= os
.path
.split(path
)
1691 class EggProvider(NullProvider
):
1692 """Provider based on a virtual filesystem"""
1694 def __init__(self
, module
):
1695 super().__init
__(module
)
1696 self
._setup
_prefix
()
1698 def _setup_prefix(self
):
1699 # Assume that metadata may be nested inside a "basket"
1700 # of multiple eggs and use module_path instead of .archive.
1701 eggs
= filter(_is_egg_path
, _parents(self
.module_path
))
1702 egg
= next(eggs
, None)
1703 egg
and self
._set
_egg
(egg
)
1705 def _set_egg(self
, path
):
1706 self
.egg_name
= os
.path
.basename(path
)
1707 self
.egg_info
= os
.path
.join(path
, 'EGG-INFO')
1708 self
.egg_root
= path
1711 class DefaultProvider(EggProvider
):
1712 """Provides access to package resources in the filesystem"""
1714 def _has(self
, path
):
1715 return os
.path
.exists(path
)
1717 def _isdir(self
, path
):
1718 return os
.path
.isdir(path
)
1720 def _listdir(self
, path
):
1721 return os
.listdir(path
)
1723 def get_resource_stream(self
, manager
, resource_name
):
1724 return open(self
._fn
(self
.module_path
, resource_name
), 'rb')
1726 def _get(self
, path
):
1727 with open(path
, 'rb') as stream
:
1728 return stream
.read()
1734 'SourcelessFileLoader',
1736 for name
in loader_names
:
1737 loader_cls
= getattr(importlib_machinery
, name
, type(None))
1738 register_loader_type(loader_cls
, cls
)
1741 DefaultProvider
._register
()
1744 class EmptyProvider(NullProvider
):
1745 """Provider that returns nothing for all requests"""
1749 _isdir
= _has
= lambda self
, path
: False
1751 def _get(self
, path
):
1754 def _listdir(self
, path
):
1761 empty_provider
= EmptyProvider()
1764 class ZipManifests(dict):
1766 zip manifest builder
1770 def build(cls
, path
):
1772 Build a dictionary similar to the zipimport directory
1773 caches, except instead of tuples, store ZipInfo objects.
1775 Use a platform-specific path separator (os.sep) for the path keys
1776 for compatibility with pypy on Windows.
1778 with zipfile
.ZipFile(path
) as zfile
:
1781 name
.replace('/', os
.sep
),
1782 zfile
.getinfo(name
),
1784 for name
in zfile
.namelist()
1791 class MemoizedZipManifests(ZipManifests
):
1793 Memoized zipfile manifests.
1796 manifest_mod
= collections
.namedtuple('manifest_mod', 'manifest mtime')
1798 def load(self
, path
):
1800 Load a manifest at path or return a suitable manifest already loaded.
1802 path
= os
.path
.normpath(path
)
1803 mtime
= os
.stat(path
).st_mtime
1805 if path
not in self
or self
[path
].mtime
!= mtime
:
1806 manifest
= self
.build(path
)
1807 self
[path
] = self
.manifest_mod(manifest
, mtime
)
1809 return self
[path
].manifest
1812 class ZipProvider(EggProvider
):
1813 """Resource support for zips and eggs"""
1816 _zip_manifests
= MemoizedZipManifests()
1818 def __init__(self
, module
):
1819 super().__init
__(module
)
1820 self
.zip_pre
= self
.loader
.archive
+ os
.sep
1822 def _zipinfo_name(self
, fspath
):
1823 # Convert a virtual filename (full path to file) into a zipfile subpath
1824 # usable with the zipimport directory cache for our target archive
1825 fspath
= fspath
.rstrip(os
.sep
)
1826 if fspath
== self
.loader
.archive
:
1828 if fspath
.startswith(self
.zip_pre
):
1829 return fspath
[len(self
.zip_pre
) :]
1830 raise AssertionError("%s is not a subpath of %s" % (fspath
, self
.zip_pre
))
1832 def _parts(self
, zip_path
):
1833 # Convert a zipfile subpath into an egg-relative path part list.
1835 fspath
= self
.zip_pre
+ zip_path
1836 if fspath
.startswith(self
.egg_root
+ os
.sep
):
1837 return fspath
[len(self
.egg_root
) + 1 :].split(os
.sep
)
1838 raise AssertionError("%s is not a subpath of %s" % (fspath
, self
.egg_root
))
1842 return self
._zip
_manifests
.load(self
.loader
.archive
)
1844 def get_resource_filename(self
, manager
, resource_name
):
1845 if not self
.egg_name
:
1846 raise NotImplementedError(
1847 "resource_filename() only supported for .egg, not .zip"
1849 # no need to lock for extraction, since we use temp names
1850 zip_path
= self
._resource
_to
_zip
(resource_name
)
1851 eagers
= self
._get
_eager
_resources
()
1852 if '/'.join(self
._parts
(zip_path
)) in eagers
:
1854 self
._extract
_resource
(manager
, self
._eager
_to
_zip
(name
))
1855 return self
._extract
_resource
(manager
, zip_path
)
1858 def _get_date_and_size(zip_stat
):
1859 size
= zip_stat
.file_size
1860 # ymdhms+wday, yday, dst
1861 date_time
= zip_stat
.date_time
+ (0, 0, -1)
1862 # 1980 offset already done
1863 timestamp
= time
.mktime(date_time
)
1864 return timestamp
, size
1866 # FIXME: 'ZipProvider._extract_resource' is too complex (12)
1867 def _extract_resource(self
, manager
, zip_path
): # noqa: C901
1868 if zip_path
in self
._index
():
1869 for name
in self
._index
()[zip_path
]:
1870 last
= self
._extract
_resource
(manager
, os
.path
.join(zip_path
, name
))
1871 # return the extracted directory name
1872 return os
.path
.dirname(last
)
1874 timestamp
, size
= self
._get
_date
_and
_size
(self
.zipinfo
[zip_path
])
1876 if not WRITE_SUPPORT
:
1878 '"os.rename" and "os.unlink" are not supported ' 'on this platform'
1881 real_path
= manager
.get_cache_path(self
.egg_name
, self
._parts
(zip_path
))
1883 if self
._is
_current
(real_path
, zip_path
):
1886 outf
, tmpnam
= _mkstemp(
1888 dir=os
.path
.dirname(real_path
),
1890 os
.write(outf
, self
.loader
.get_data(zip_path
))
1892 utime(tmpnam
, (timestamp
, timestamp
))
1893 manager
.postprocess(tmpnam
, real_path
)
1896 rename(tmpnam
, real_path
)
1899 if os
.path
.isfile(real_path
):
1900 if self
._is
_current
(real_path
, zip_path
):
1901 # the file became current since it was checked above,
1904 # Windows, del old file and retry
1905 elif os
.name
== 'nt':
1907 rename(tmpnam
, real_path
)
1912 # report a user-friendly error
1913 manager
.extraction_error()
1917 def _is_current(self
, file_path
, zip_path
):
1919 Return True if the file_path is current for this zip_path
1921 timestamp
, size
= self
._get
_date
_and
_size
(self
.zipinfo
[zip_path
])
1922 if not os
.path
.isfile(file_path
):
1924 stat
= os
.stat(file_path
)
1925 if stat
.st_size
!= size
or stat
.st_mtime
!= timestamp
:
1927 # check that the contents match
1928 zip_contents
= self
.loader
.get_data(zip_path
)
1929 with open(file_path
, 'rb') as f
:
1930 file_contents
= f
.read()
1931 return zip_contents
== file_contents
1933 def _get_eager_resources(self
):
1934 if self
.eagers
is None:
1936 for name
in ('native_libs.txt', 'eager_resources.txt'):
1937 if self
.has_metadata(name
):
1938 eagers
.extend(self
.get_metadata_lines(name
))
1939 self
.eagers
= eagers
1944 return self
._dirindex
1945 except AttributeError:
1947 for path
in self
.zipinfo
:
1948 parts
= path
.split(os
.sep
)
1950 parent
= os
.sep
.join(parts
[:-1])
1952 ind
[parent
].append(parts
[-1])
1955 ind
[parent
] = [parts
.pop()]
1956 self
._dirindex
= ind
1959 def _has(self
, fspath
):
1960 zip_path
= self
._zipinfo
_name
(fspath
)
1961 return zip_path
in self
.zipinfo
or zip_path
in self
._index
()
1963 def _isdir(self
, fspath
):
1964 return self
._zipinfo
_name
(fspath
) in self
._index
()
1966 def _listdir(self
, fspath
):
1967 return list(self
._index
().get(self
._zipinfo
_name
(fspath
), ()))
1969 def _eager_to_zip(self
, resource_name
):
1970 return self
._zipinfo
_name
(self
._fn
(self
.egg_root
, resource_name
))
1972 def _resource_to_zip(self
, resource_name
):
1973 return self
._zipinfo
_name
(self
._fn
(self
.module_path
, resource_name
))
1976 register_loader_type(zipimport
.zipimporter
, ZipProvider
)
1979 class FileMetadata(EmptyProvider
):
1980 """Metadata handler for standalone PKG-INFO files
1984 metadata = FileMetadata("/path/to/PKG-INFO")
1986 This provider rejects all data and metadata requests except for PKG-INFO,
1987 which is treated as existing, and will be the contents of the file at
1988 the provided location.
1991 def __init__(self
, path
):
1994 def _get_metadata_path(self
, name
):
1997 def has_metadata(self
, name
):
1998 return name
== 'PKG-INFO' and os
.path
.isfile(self
.path
)
2000 def get_metadata(self
, name
):
2001 if name
!= 'PKG-INFO':
2002 raise KeyError("No metadata except PKG-INFO is available")
2004 with io
.open(self
.path
, encoding
='utf-8', errors
="replace") as f
:
2006 self
._warn
_on
_replacement
(metadata
)
2009 def _warn_on_replacement(self
, metadata
):
2010 replacement_char
= '�'
2011 if replacement_char
in metadata
:
2012 tmpl
= "{self.path} could not be properly decoded in UTF-8"
2013 msg
= tmpl
.format(**locals())
2016 def get_metadata_lines(self
, name
):
2017 return yield_lines(self
.get_metadata(name
))
2020 class PathMetadata(DefaultProvider
):
2021 """Metadata provider for egg directories
2027 egg_info = "/path/to/PackageName.egg-info"
2028 base_dir = os.path.dirname(egg_info)
2029 metadata = PathMetadata(base_dir, egg_info)
2030 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
2031 dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
2033 # Unpacked egg directories:
2035 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
2036 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
2037 dist = Distribution.from_filename(egg_path, metadata=metadata)
2040 def __init__(self
, path
, egg_info
):
2041 self
.module_path
= path
2042 self
.egg_info
= egg_info
2045 class EggMetadata(ZipProvider
):
2046 """Metadata provider for .egg files"""
2048 def __init__(self
, importer
):
2049 """Create a metadata provider from a zipimporter"""
2051 self
.zip_pre
= importer
.archive
+ os
.sep
2052 self
.loader
= importer
2054 self
.module_path
= os
.path
.join(importer
.archive
, importer
.prefix
)
2056 self
.module_path
= importer
.archive
2057 self
._setup
_prefix
()
2060 _declare_state('dict', _distribution_finders
={})
2063 def register_finder(importer_type
, distribution_finder
):
2064 """Register `distribution_finder` to find distributions in sys.path items
2066 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2067 handler), and `distribution_finder` is a callable that, passed a path
2068 item and the importer instance, yields ``Distribution`` instances found on
2069 that path item. See ``pkg_resources.find_on_path`` for an example."""
2070 _distribution_finders
[importer_type
] = distribution_finder
2073 def find_distributions(path_item
, only
=False):
2074 """Yield distributions accessible via `path_item`"""
2075 importer
= get_importer(path_item
)
2076 finder
= _find_adapter(_distribution_finders
, importer
)
2077 return finder(importer
, path_item
, only
)
2080 def find_eggs_in_zip(importer
, path_item
, only
=False):
2082 Find eggs in zip files; possibly multiple nested eggs.
2084 if importer
.archive
.endswith('.whl'):
2085 # wheels are not supported with this finder
2086 # they don't have PKG-INFO metadata, and won't ever contain eggs
2088 metadata
= EggMetadata(importer
)
2089 if metadata
.has_metadata('PKG-INFO'):
2090 yield Distribution
.from_filename(path_item
, metadata
=metadata
)
2092 # don't yield nested distros
2094 for subitem
in metadata
.resource_listdir(''):
2095 if _is_egg_path(subitem
):
2096 subpath
= os
.path
.join(path_item
, subitem
)
2097 dists
= find_eggs_in_zip(zipimport
.zipimporter(subpath
), subpath
)
2100 elif subitem
.lower().endswith(('.dist-info', '.egg-info')):
2101 subpath
= os
.path
.join(path_item
, subitem
)
2102 submeta
= EggMetadata(zipimport
.zipimporter(subpath
))
2103 submeta
.egg_info
= subpath
2104 yield Distribution
.from_location(path_item
, subitem
, submeta
)
2107 register_finder(zipimport
.zipimporter
, find_eggs_in_zip
)
2110 def find_nothing(importer
, path_item
, only
=False):
2114 register_finder(object, find_nothing
)
2117 def find_on_path(importer
, path_item
, only
=False):
2118 """Yield distributions accessible on a sys.path directory"""
2119 path_item
= _normalize_cached(path_item
)
2121 if _is_unpacked_egg(path_item
):
2122 yield Distribution
.from_filename(
2124 metadata
=PathMetadata(path_item
, os
.path
.join(path_item
, 'EGG-INFO')),
2128 entries
= (os
.path
.join(path_item
, child
) for child
in safe_listdir(path_item
))
2130 # scan for .egg and .egg-info in directory
2131 for entry
in sorted(entries
):
2132 fullpath
= os
.path
.join(path_item
, entry
)
2133 factory
= dist_factory(path_item
, entry
, only
)
2134 for dist
in factory(fullpath
):
2138 def dist_factory(path_item
, entry
, only
):
2139 """Return a dist_factory for the given entry."""
2140 lower
= entry
.lower()
2141 is_egg_info
= lower
.endswith('.egg-info')
2142 is_dist_info
= lower
.endswith('.dist-info') and os
.path
.isdir(
2143 os
.path
.join(path_item
, entry
)
2145 is_meta
= is_egg_info
or is_dist_info
2147 distributions_from_metadata
2149 else find_distributions
2150 if not only
and _is_egg_path(entry
)
2151 else resolve_egg_link
2152 if not only
and lower
.endswith('.egg-link')
2162 >>> list(NoDists()('anything'))
2169 def __call__(self
, fullpath
):
2173 def safe_listdir(path
):
2175 Attempt to list contents of path, but suppress some exceptions.
2178 return os
.listdir(path
)
2179 except (PermissionError
, NotADirectoryError
):
2181 except OSError as e
:
2182 # Ignore the directory if does not exist, not a directory or
2184 if e
.errno
not in (errno
.ENOTDIR
, errno
.EACCES
, errno
.ENOENT
):
2189 def distributions_from_metadata(path
):
2190 root
= os
.path
.dirname(path
)
2191 if os
.path
.isdir(path
):
2192 if len(os
.listdir(path
)) == 0:
2193 # empty metadata dir; skip
2195 metadata
= PathMetadata(root
, path
)
2197 metadata
= FileMetadata(path
)
2198 entry
= os
.path
.basename(path
)
2199 yield Distribution
.from_location(
2203 precedence
=DEVELOP_DIST
,
2207 def non_empty_lines(path
):
2209 Yield non-empty lines from file at path
2211 with open(path
) as f
:
2218 def resolve_egg_link(path
):
2220 Given a path to an .egg-link, resolve distributions
2221 present in the referenced path.
2223 referenced_paths
= non_empty_lines(path
)
2225 os
.path
.join(os
.path
.dirname(path
), ref
) for ref
in referenced_paths
2227 dist_groups
= map(find_distributions
, resolved_paths
)
2228 return next(dist_groups
, ())
2231 if hasattr(pkgutil
, 'ImpImporter'):
2232 register_finder(pkgutil
.ImpImporter
, find_on_path
)
2234 register_finder(importlib_machinery
.FileFinder
, find_on_path
)
2236 _declare_state('dict', _namespace_handlers
={})
2237 _declare_state('dict', _namespace_packages
={})
2240 def register_namespace_handler(importer_type
, namespace_handler
):
2241 """Register `namespace_handler` to declare namespace packages
2243 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2244 handler), and `namespace_handler` is a callable like this::
2246 def namespace_handler(importer, path_entry, moduleName, module):
2247 # return a path_entry to use for child packages
2249 Namespace handlers are only called if the importer object has already
2250 agreed that it can handle the relevant path item, and they should only
2251 return a subpath if the module __path__ does not already contain an
2252 equivalent subpath. For an example namespace handler, see
2253 ``pkg_resources.file_ns_handler``.
2255 _namespace_handlers
[importer_type
] = namespace_handler
2258 def _handle_ns(packageName
, path_item
):
2259 """Ensure that named package includes a subpath of path_item (if needed)"""
2261 importer
= get_importer(path_item
)
2262 if importer
is None:
2265 # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
2267 spec
= importer
.find_spec(packageName
)
2268 except AttributeError:
2269 # capture warnings due to #1111
2270 with warnings
.catch_warnings():
2271 warnings
.simplefilter("ignore")
2272 loader
= importer
.find_module(packageName
)
2274 loader
= spec
.loader
if spec
else None
2278 module
= sys
.modules
.get(packageName
)
2280 module
= sys
.modules
[packageName
] = types
.ModuleType(packageName
)
2281 module
.__path
__ = []
2282 _set_parent_ns(packageName
)
2283 elif not hasattr(module
, '__path__'):
2284 raise TypeError("Not a package:", packageName
)
2285 handler
= _find_adapter(_namespace_handlers
, importer
)
2286 subpath
= handler(importer
, path_item
, packageName
, module
)
2287 if subpath
is not None:
2288 path
= module
.__path
__
2289 path
.append(subpath
)
2290 importlib
.import_module(packageName
)
2291 _rebuild_mod_path(path
, packageName
, module
)
2295 def _rebuild_mod_path(orig_path
, package_name
, module
):
2297 Rebuild module.__path__ ensuring that all entries are ordered
2298 corresponding to their sys.path order
2300 sys_path
= [_normalize_cached(p
) for p
in sys
.path
]
2302 def safe_sys_path_index(entry
):
2304 Workaround for #520 and #513.
2307 return sys_path
.index(entry
)
2311 def position_in_sys_path(path
):
2313 Return the ordinal of the path based on its position in sys.path
2315 path_parts
= path
.split(os
.sep
)
2316 module_parts
= package_name
.count('.') + 1
2317 parts
= path_parts
[:-module_parts
]
2318 return safe_sys_path_index(_normalize_cached(os
.sep
.join(parts
)))
2320 new_path
= sorted(orig_path
, key
=position_in_sys_path
)
2321 new_path
= [_normalize_cached(p
) for p
in new_path
]
2323 if isinstance(module
.__path
__, list):
2324 module
.__path
__[:] = new_path
2326 module
.__path
__ = new_path
2329 def declare_namespace(packageName
):
2330 """Declare that package 'packageName' is a namespace package"""
2333 f
"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n"
2334 "Implementing implicit namespace packages (as specified in PEP 420) "
2335 "is preferred to `pkg_resources.declare_namespace`. "
2336 "See https://setuptools.pypa.io/en/latest/references/"
2337 "keywords.html#keyword-namespace-packages"
2339 warnings
.warn(msg
, DeprecationWarning, stacklevel
=2)
2343 if packageName
in _namespace_packages
:
2347 parent
, _
, _
= packageName
.rpartition('.')
2350 declare_namespace(parent
)
2351 if parent
not in _namespace_packages
:
2354 path
= sys
.modules
[parent
].__path
__
2355 except AttributeError as e
:
2356 raise TypeError("Not a package:", parent
) from e
2358 # Track what packages are namespaces, so when new path items are added,
2359 # they can be updated
2360 _namespace_packages
.setdefault(parent
or None, []).append(packageName
)
2361 _namespace_packages
.setdefault(packageName
, [])
2363 for path_item
in path
:
2364 # Ensure all the parent's path items are reflected in the child,
2366 _handle_ns(packageName
, path_item
)
2372 def fixup_namespace_packages(path_item
, parent
=None):
2373 """Ensure that previously-declared namespace packages include path_item"""
2376 for package
in _namespace_packages
.get(parent
, ()):
2377 subpath
= _handle_ns(package
, path_item
)
2379 fixup_namespace_packages(subpath
, package
)
2384 def file_ns_handler(importer
, path_item
, packageName
, module
):
2385 """Compute an ns-package subpath for a filesystem or zipfile importer"""
2387 subpath
= os
.path
.join(path_item
, packageName
.split('.')[-1])
2388 normalized
= _normalize_cached(subpath
)
2389 for item
in module
.__path
__:
2390 if _normalize_cached(item
) == normalized
:
2393 # Only return the path if it's not already there
2397 if hasattr(pkgutil
, 'ImpImporter'):
2398 register_namespace_handler(pkgutil
.ImpImporter
, file_ns_handler
)
2400 register_namespace_handler(zipimport
.zipimporter
, file_ns_handler
)
2401 register_namespace_handler(importlib_machinery
.FileFinder
, file_ns_handler
)
2404 def null_ns_handler(importer
, path_item
, packageName
, module
):
2408 register_namespace_handler(object, null_ns_handler
)
2411 def normalize_path(filename
):
2412 """Normalize a file/dir name for comparison purposes"""
2413 return os
.path
.normcase(os
.path
.realpath(os
.path
.normpath(_cygwin_patch(filename
))))
2416 def _cygwin_patch(filename
): # pragma: nocover
2418 Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
2419 symlink components. Using
2420 os.path.abspath() works around this limitation. A fix in os.getcwd()
2421 would probably better, in Cygwin even more so, except
2422 that this seems to be by design...
2424 return os
.path
.abspath(filename
) if sys
.platform
== 'cygwin' else filename
2427 def _normalize_cached(filename
, _cache
={}):
2429 return _cache
[filename
]
2431 _cache
[filename
] = result
= normalize_path(filename
)
2435 def _is_egg_path(path
):
2437 Determine if given path appears to be an egg.
2439 return _is_zip_egg(path
) or _is_unpacked_egg(path
)
2442 def _is_zip_egg(path
):
2444 path
.lower().endswith('.egg')
2445 and os
.path
.isfile(path
)
2446 and zipfile
.is_zipfile(path
)
2450 def _is_unpacked_egg(path
):
2452 Determine if given path appears to be an unpacked egg.
2454 return path
.lower().endswith('.egg') and os
.path
.isfile(
2455 os
.path
.join(path
, 'EGG-INFO', 'PKG-INFO')
2459 def _set_parent_ns(packageName
):
2460 parts
= packageName
.split('.')
2463 parent
= '.'.join(parts
)
2464 setattr(sys
.modules
[parent
], name
, sys
.modules
[packageName
])
2467 MODULE
= re
.compile(r
"\w+(\.\w+)*$").match
2468 EGG_NAME
= re
.compile(
2472 -py(?P<pyver>[^-]+) (
2478 re
.VERBOSE | re
.IGNORECASE
,
2483 """Object representing an advertised importable object"""
2485 def __init__(self
, name
, module_name
, attrs
=(), extras
=(), dist
=None):
2486 if not MODULE(module_name
):
2487 raise ValueError("Invalid module name", module_name
)
2489 self
.module_name
= module_name
2490 self
.attrs
= tuple(attrs
)
2491 self
.extras
= tuple(extras
)
2495 s
= "%s = %s" % (self
.name
, self
.module_name
)
2497 s
+= ':' + '.'.join(self
.attrs
)
2499 s
+= ' [%s]' % ','.join(self
.extras
)
2503 return "EntryPoint.parse(%r)" % str(self
)
2505 def load(self
, require
=True, *args
, **kwargs
):
2507 Require packages for this EntryPoint, then resolve it.
2509 if not require
or args
or kwargs
:
2511 "Parameters to load are deprecated. Call .resolve and "
2512 ".require separately.",
2513 PkgResourcesDeprecationWarning
,
2517 self
.require(*args
, **kwargs
)
2518 return self
.resolve()
2522 Resolve the entry point from its module and attrs.
2524 module
= __import__(self
.module_name
, fromlist
=['__name__'], level
=0)
2526 return functools
.reduce(getattr, self
.attrs
, module
)
2527 except AttributeError as exc
:
2528 raise ImportError(str(exc
)) from exc
2530 def require(self
, env
=None, installer
=None):
2531 if self
.extras
and not self
.dist
:
2532 raise UnknownExtra("Can't require() without a distribution", self
)
2534 # Get the requirements for this entry point with all its extras and
2535 # then resolve them. We have to pass `extras` along when resolving so
2536 # that the working set knows what extras we want. Otherwise, for
2537 # dist-info distributions, the working set will assume that the
2538 # requirements for that extra are purely optional and skip over them.
2539 reqs
= self
.dist
.requires(self
.extras
)
2540 items
= working_set
.resolve(reqs
, env
, installer
, extras
=self
.extras
)
2541 list(map(working_set
.add
, items
))
2543 pattern
= re
.compile(
2547 r
'(?P<module>[\w.]+)\s*'
2548 r
'(:\s*(?P<attr>[\w.]+))?\s*'
2549 r
'(?P<extras>\[.*\])?\s*$'
2553 def parse(cls
, src
, dist
=None):
2554 """Parse a single entry point from string `src`
2556 Entry point syntax follows the form::
2558 name = some.module:some.attr [extra1, extra2]
2560 The entry name and module name are required, but the ``:attrs`` and
2561 ``[extras]`` parts are optional
2563 m
= cls
.pattern
.match(src
)
2565 msg
= "EntryPoint must be in 'name=module:attrs [extras]' format"
2566 raise ValueError(msg
, src
)
2568 extras
= cls
._parse
_extras
(res
['extras'])
2569 attrs
= res
['attr'].split('.') if res
['attr'] else ()
2570 return cls(res
['name'], res
['module'], attrs
, extras
, dist
)
2573 def _parse_extras(cls
, extras_spec
):
2576 req
= Requirement
.parse('x' + extras_spec
)
2582 def parse_group(cls
, group
, lines
, dist
=None):
2583 """Parse an entry point group"""
2584 if not MODULE(group
):
2585 raise ValueError("Invalid group name", group
)
2587 for line
in yield_lines(lines
):
2588 ep
= cls
.parse(line
, dist
)
2590 raise ValueError("Duplicate entry point", group
, ep
.name
)
2595 def parse_map(cls
, data
, dist
=None):
2596 """Parse a map of entry point groups"""
2597 if isinstance(data
, dict):
2600 data
= split_sections(data
)
2602 for group
, lines
in data
:
2606 raise ValueError("Entry points must be listed in groups")
2607 group
= group
.strip()
2609 raise ValueError("Duplicate group name", group
)
2610 maps
[group
] = cls
.parse_group(group
, lines
, dist
)
2614 def _version_from_file(lines
):
2616 Given an iterable of lines from a Metadata file, return
2617 the value of the Version field, if present, or None otherwise.
2620 def is_version_line(line
):
2621 return line
.lower().startswith('version:')
2623 version_lines
= filter(is_version_line
, lines
)
2624 line
= next(iter(version_lines
), '')
2625 _
, _
, value
= line
.partition(':')
2626 return safe_version(value
.strip()) or None
2630 """Wrap an actual or potential sys.path entry w/metadata"""
2632 PKG_INFO
= 'PKG-INFO'
2640 py_version
=PY_MAJOR
,
2642 precedence
=EGG_DIST
,
2644 self
.project_name
= safe_name(project_name
or 'Unknown')
2645 if version
is not None:
2646 self
._version
= safe_version(version
)
2647 self
.py_version
= py_version
2648 self
.platform
= platform
2649 self
.location
= location
2650 self
.precedence
= precedence
2651 self
._provider
= metadata
or empty_provider
2654 def from_location(cls
, location
, basename
, metadata
=None, **kw
):
2655 project_name
, version
, py_version
, platform
= [None] * 4
2656 basename
, ext
= os
.path
.splitext(basename
)
2657 if ext
.lower() in _distributionImpl
:
2658 cls
= _distributionImpl
[ext
.lower()]
2660 match
= EGG_NAME(basename
)
2662 project_name
, version
, py_version
, platform
= match
.group(
2663 'name', 'ver', 'pyver', 'plat'
2668 project_name
=project_name
,
2670 py_version
=py_version
,
2675 def _reload_version(self
):
2681 self
._forgiving
_parsed
_version
,
2685 self
.py_version
or '',
2686 self
.platform
or '',
2690 return hash(self
.hashcmp
)
2692 def __lt__(self
, other
):
2693 return self
.hashcmp
< other
.hashcmp
2695 def __le__(self
, other
):
2696 return self
.hashcmp
<= other
.hashcmp
2698 def __gt__(self
, other
):
2699 return self
.hashcmp
> other
.hashcmp
2701 def __ge__(self
, other
):
2702 return self
.hashcmp
>= other
.hashcmp
2704 def __eq__(self
, other
):
2705 if not isinstance(other
, self
.__class
__):
2706 # It's not a Distribution, so they are not equal
2708 return self
.hashcmp
== other
.hashcmp
2710 def __ne__(self
, other
):
2711 return not self
== other
2713 # These properties have to be lazy so that we don't have to load any
2714 # metadata until/unless it's actually needed. (i.e., some distributions
2715 # may not know their name or version without loading PKG-INFO)
2721 except AttributeError:
2722 self
._key
= key
= self
.project_name
.lower()
2726 def parsed_version(self
):
2727 if not hasattr(self
, "_parsed_version"):
2729 self
._parsed
_version
= parse_version(self
.version
)
2730 except packaging
.version
.InvalidVersion
as ex
:
2731 info
= f
"(package: {self.project_name})"
2732 if hasattr(ex
, "add_note"):
2733 ex
.add_note(info
) # PEP 678
2735 raise packaging
.version
.InvalidVersion(f
"{str(ex)} {info}") from None
2737 return self
._parsed
_version
2740 def _forgiving_parsed_version(self
):
2742 return self
.parsed_version
2743 except packaging
.version
.InvalidVersion
as ex
:
2744 self
._parsed
_version
= parse_version(_forgiving_version(self
.version
))
2746 notes
= "\n".join(getattr(ex
, "__notes__", [])) # PEP 678
2748 *************************************************************************
2751 This is a long overdue deprecation.
2752 For the time being, `pkg_resources` will use `{self._parsed_version}`
2753 as a replacement to avoid breaking existing environments,
2754 but no future compatibility is guaranteed.
2756 If you maintain package {self.project_name} you should implement
2757 the relevant changes to adequate the project to PEP 440 immediately.
2758 *************************************************************************
2761 warnings
.warn(msg
, DeprecationWarning)
2763 return self
._parsed
_version
2768 return self
._version
2769 except AttributeError as e
:
2770 version
= self
._get
_version
()
2772 path
= self
._get
_metadata
_path
_for
_display
(self
.PKG_INFO
)
2773 msg
= ("Missing 'Version:' header and/or {} file at path: {}").format(
2776 raise ValueError(msg
, self
) from e
2783 A map of extra to its list of (direct) requirements
2784 for this distribution, including the null extra.
2787 return self
.__dep
_map
2788 except AttributeError:
2789 self
.__dep
_map
= self
._filter
_extras
(self
._build
_dep
_map
())
2790 return self
.__dep
_map
2793 def _filter_extras(dm
):
2795 Given a mapping of extras to dependencies, strip off
2796 environment markers and filter out any dependencies
2797 not matching the markers.
2799 for extra
in list(filter(None, dm
)):
2801 reqs
= dm
.pop(extra
)
2802 new_extra
, _
, marker
= extra
.partition(':')
2803 fails_marker
= marker
and (
2804 invalid_marker(marker
) or not evaluate_marker(marker
)
2808 new_extra
= safe_extra(new_extra
) or None
2810 dm
.setdefault(new_extra
, []).extend(reqs
)
2813 def _build_dep_map(self
):
2815 for name
in 'requires.txt', 'depends.txt':
2816 for extra
, reqs
in split_sections(self
._get
_metadata
(name
)):
2817 dm
.setdefault(extra
, []).extend(parse_requirements(reqs
))
2820 def requires(self
, extras
=()):
2821 """List of Requirements needed for this distro if `extras` are used"""
2824 deps
.extend(dm
.get(None, ()))
2827 deps
.extend(dm
[safe_extra(ext
)])
2828 except KeyError as e
:
2830 "%s has no such extra feature %r" % (self
, ext
)
2834 def _get_metadata_path_for_display(self
, name
):
2836 Return the path to the given metadata file, if available.
2839 # We need to access _get_metadata_path() on the provider object
2840 # directly rather than through this class's __getattr__()
2841 # since _get_metadata_path() is marked private.
2842 path
= self
._provider
._get
_metadata
_path
(name
)
2844 # Handle exceptions e.g. in case the distribution's metadata
2845 # provider doesn't support _get_metadata_path().
2847 return '[could not detect]'
2851 def _get_metadata(self
, name
):
2852 if self
.has_metadata(name
):
2853 for line
in self
.get_metadata_lines(name
):
2856 def _get_version(self
):
2857 lines
= self
._get
_metadata
(self
.PKG_INFO
)
2858 version
= _version_from_file(lines
)
2862 def activate(self
, path
=None, replace
=False):
2863 """Ensure distribution is importable on `path` (default=sys.path)"""
2866 self
.insert_on(path
, replace
=replace
)
2867 if path
is sys
.path
:
2868 fixup_namespace_packages(self
.location
)
2869 for pkg
in self
._get
_metadata
('namespace_packages.txt'):
2870 if pkg
in sys
.modules
:
2871 declare_namespace(pkg
)
2874 """Return what this distribution's standard .egg filename should be"""
2875 filename
= "%s-%s-py%s" % (
2876 to_filename(self
.project_name
),
2877 to_filename(self
.version
),
2878 self
.py_version
or PY_MAJOR
,
2882 filename
+= '-' + self
.platform
2887 return "%s (%s)" % (self
, self
.location
)
2893 version
= getattr(self
, 'version', None)
2896 version
= version
or "[unknown version]"
2897 return "%s %s" % (self
.project_name
, version
)
2899 def __getattr__(self
, attr
):
2900 """Delegate all unrecognized public attributes to .metadata provider"""
2901 if attr
.startswith('_'):
2902 raise AttributeError(attr
)
2903 return getattr(self
._provider
, attr
)
2907 set(super(Distribution
, self
).__dir
__())
2908 |
set(attr
for attr
in self
._provider
.__dir
__() if not attr
.startswith('_'))
2912 def from_filename(cls
, filename
, metadata
=None, **kw
):
2913 return cls
.from_location(
2914 _normalize_cached(filename
), os
.path
.basename(filename
), metadata
, **kw
2917 def as_requirement(self
):
2918 """Return a ``Requirement`` that matches this distribution exactly"""
2919 if isinstance(self
.parsed_version
, packaging
.version
.Version
):
2920 spec
= "%s==%s" % (self
.project_name
, self
.parsed_version
)
2922 spec
= "%s===%s" % (self
.project_name
, self
.parsed_version
)
2924 return Requirement
.parse(spec
)
2926 def load_entry_point(self
, group
, name
):
2927 """Return the `name` entry point of `group` or raise ImportError"""
2928 ep
= self
.get_entry_info(group
, name
)
2930 raise ImportError("Entry point %r not found" % ((group
, name
),))
2933 def get_entry_map(self
, group
=None):
2934 """Return the entry point map for `group`, or the full entry map"""
2936 ep_map
= self
._ep
_map
2937 except AttributeError:
2938 ep_map
= self
._ep
_map
= EntryPoint
.parse_map(
2939 self
._get
_metadata
('entry_points.txt'), self
2941 if group
is not None:
2942 return ep_map
.get(group
, {})
2945 def get_entry_info(self
, group
, name
):
2946 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2947 return self
.get_entry_map(group
).get(name
)
2949 # FIXME: 'Distribution.insert_on' is too complex (13)
2950 def insert_on(self
, path
, loc
=None, replace
=False): # noqa: C901
2951 """Ensure self.location is on path
2953 If replace=False (default):
2954 - If location is already in path anywhere, do nothing.
2956 - If it's an egg and its parent directory is on path,
2957 insert just ahead of the parent.
2958 - Else: add to the end of path.
2960 - If location is already on path anywhere (not eggs)
2961 or higher priority than its parent (eggs)
2964 - If it's an egg and its parent directory is on path,
2965 insert just ahead of the parent,
2966 removing any lower-priority entries.
2967 - Else: add it to the front of path.
2970 loc
= loc
or self
.location
2974 nloc
= _normalize_cached(loc
)
2975 bdir
= os
.path
.dirname(nloc
)
2976 npath
= [(p
and _normalize_cached(p
) or p
) for p
in path
]
2978 for p
, item
in enumerate(npath
):
2983 # don't modify path (even removing duplicates) if
2984 # found and not replace
2986 elif item
== bdir
and self
.precedence
== EGG_DIST
:
2987 # if it's an .egg, give it precedence over its directory
2988 # UNLESS it's already been added to sys.path and replace=False
2989 if (not replace
) and nloc
in npath
[p
:]:
2991 if path
is sys
.path
:
2992 self
.check_version_conflict()
2994 npath
.insert(p
, nloc
)
2997 if path
is sys
.path
:
2998 self
.check_version_conflict()
3005 # p is the spot where we found or inserted loc; now remove duplicates
3008 np
= npath
.index(nloc
, p
+ 1)
3012 del npath
[np
], path
[np
]
3018 def check_version_conflict(self
):
3019 if self
.key
== 'setuptools':
3020 # ignore the inevitable setuptools self-conflicts :(
3023 nsp
= dict.fromkeys(self
._get
_metadata
('namespace_packages.txt'))
3024 loc
= normalize_path(self
.location
)
3025 for modname
in self
._get
_metadata
('top_level.txt'):
3027 modname
not in sys
.modules
3029 or modname
in _namespace_packages
3032 if modname
in ('pkg_resources', 'setuptools', 'site'):
3034 fn
= getattr(sys
.modules
[modname
], '__file__', None)
3036 normalize_path(fn
).startswith(loc
) or fn
.startswith(self
.location
)
3040 "Module %s was already imported from %s, but %s is being added"
3041 " to sys.path" % (modname
, fn
, self
.location
),
3044 def has_version(self
):
3048 issue_warning("Unbuilt egg for " + repr(self
))
3051 # TODO: remove this except clause when python/cpython#103632 is fixed.
3055 def clone(self
, **kw
):
3056 """Copy this distribution, substituting in any changed keyword args"""
3057 names
= 'project_name version py_version platform location precedence'
3058 for attr
in names
.split():
3059 kw
.setdefault(attr
, getattr(self
, attr
, None))
3060 kw
.setdefault('metadata', self
._provider
)
3061 return self
.__class
__(**kw
)
3065 return [dep
for dep
in self
._dep
_map
if dep
]
3068 class EggInfoDistribution(Distribution
):
3069 def _reload_version(self
):
3071 Packages installed by distutils (e.g. numpy or scipy),
3072 which uses an old safe_version, and so
3073 their version numbers can get mangled when
3074 converted to filenames (e.g., 1.11.0.dev0+2329eae to
3075 1.11.0.dev0_2329eae). These distributions will not be
3077 downstream by Distribution and safe_version, so
3078 take an extra step and try to get the version number from
3079 the metadata file itself instead of the filename.
3081 md_version
= self
._get
_version
()
3083 self
._version
= md_version
3087 class DistInfoDistribution(Distribution
):
3089 Wrap an actual or potential sys.path entry
3090 w/metadata, .dist-info style.
3093 PKG_INFO
= 'METADATA'
3094 EQEQ
= re
.compile(r
"([\(,])\s*(\d.*?)\s*([,\)])")
3097 def _parsed_pkg_info(self
):
3098 """Parse and cache metadata"""
3100 return self
._pkg
_info
3101 except AttributeError:
3102 metadata
= self
.get_metadata(self
.PKG_INFO
)
3103 self
._pkg
_info
= email
.parser
.Parser().parsestr(metadata
)
3104 return self
._pkg
_info
3109 return self
.__dep
_map
3110 except AttributeError:
3111 self
.__dep
_map
= self
._compute
_dependencies
()
3112 return self
.__dep
_map
3114 def _compute_dependencies(self
):
3115 """Recompute this distribution's dependencies."""
3116 dm
= self
.__dep
_map
= {None: []}
3119 # Including any condition expressions
3120 for req
in self
._parsed
_pkg
_info
.get_all('Requires-Dist') or []:
3121 reqs
.extend(parse_requirements(req
))
3123 def reqs_for_extra(extra
):
3125 if not req
.marker
or req
.marker
.evaluate({'extra': extra}
):
3128 common
= types
.MappingProxyType(dict.fromkeys(reqs_for_extra(None)))
3129 dm
[None].extend(common
)
3131 for extra
in self
._parsed
_pkg
_info
.get_all('Provides-Extra') or []:
3132 s_extra
= safe_extra(extra
.strip())
3133 dm
[s_extra
] = [r
for r
in reqs_for_extra(extra
) if r
not in common
]
3138 _distributionImpl
= {
3139 '.egg': Distribution
,
3140 '.egg-info': EggInfoDistribution
,
3141 '.dist-info': DistInfoDistribution
,
3145 def issue_warning(*args
, **kw
):
3149 # find the first stack frame that is *not* code in
3150 # the pkg_resources module, to use for the warning
3151 while sys
._getframe
(level
).f_globals
is g
:
3155 warnings
.warn(stacklevel
=level
+ 1, *args
, **kw
)
3158 def parse_requirements(strs
):
3160 Yield ``Requirement`` objects for each specification in `strs`.
3162 `strs` must be a string, or a (possibly-nested) iterable thereof.
3164 return map(Requirement
, join_continuation(map(drop_comment
, yield_lines(strs
))))
3167 class RequirementParseError(packaging
.requirements
.InvalidRequirement
):
3168 "Compatibility wrapper for InvalidRequirement"
3171 class Requirement(packaging
.requirements
.Requirement
):
3172 def __init__(self
, requirement_string
):
3173 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
3174 super(Requirement
, self
).__init
__(requirement_string
)
3175 self
.unsafe_name
= self
.name
3176 project_name
= safe_name(self
.name
)
3177 self
.project_name
, self
.key
= project_name
, project_name
.lower()
3178 self
.specs
= [(spec
.operator
, spec
.version
) for spec
in self
.specifier
]
3179 self
.extras
= tuple(map(safe_extra
, self
.extras
))
3184 frozenset(self
.extras
),
3185 str(self
.marker
) if self
.marker
else None,
3187 self
.__hash
= hash(self
.hashCmp
)
3189 def __eq__(self
, other
):
3190 return isinstance(other
, Requirement
) and self
.hashCmp
== other
.hashCmp
3192 def __ne__(self
, other
):
3193 return not self
== other
3195 def __contains__(self
, item
):
3196 if isinstance(item
, Distribution
):
3197 if item
.key
!= self
.key
:
3202 # Allow prereleases always in order to match the previous behavior of
3203 # this method. In the future this should be smarter and follow PEP 440
3205 return self
.specifier
.contains(item
, prereleases
=True)
3211 return "Requirement.parse(%r)" % str(self
)
3215 (req
,) = parse_requirements(s
)
3219 def _always_object(classes
):
3221 Ensure object appears in the mro even
3222 for old-style classes.
3224 if object not in classes
:
3225 return classes
+ (object,)
3229 def _find_adapter(registry
, ob
):
3230 """Return an adapter factory for `ob` from `registry`"""
3231 types
= _always_object(inspect
.getmro(getattr(ob
, '__class__', type(ob
))))
3237 def ensure_directory(path
):
3238 """Ensure that the parent directory of `path` exists"""
3239 dirname
= os
.path
.dirname(path
)
3240 os
.makedirs(dirname
, exist_ok
=True)
3243 def _bypass_ensure_directory(path
):
3244 """Sandbox-bypassing version of ensure_directory()"""
3245 if not WRITE_SUPPORT
:
3246 raise IOError('"os.mkdir" not supported on this platform.')
3247 dirname
, filename
= split(path
)
3248 if dirname
and filename
and not isdir(dirname
):
3249 _bypass_ensure_directory(dirname
)
3251 mkdir(dirname
, 0o755)
3252 except FileExistsError
:
3256 def split_sections(s
):
3257 """Split a string or iterable thereof into (section, content) pairs
3259 Each ``section`` is a stripped version of the section header ("[section]")
3260 and each ``content`` is a list of stripped lines excluding blank lines and
3261 comment-only lines. If there are any such lines before the first section
3262 header, they're returned in a first ``section`` of ``None``.
3266 for line
in yield_lines(s
):
3267 if line
.startswith("["):
3268 if line
.endswith("]"):
3269 if section
or content
:
3270 yield section
, content
3271 section
= line
[1:-1].strip()
3274 raise ValueError("Invalid section heading", line
)
3276 content
.append(line
)
3278 # wrap up last segment
3279 yield section
, content
3282 def _mkstemp(*args
, **kw
):
3285 # temporarily bypass sandboxing
3287 return tempfile
.mkstemp(*args
, **kw
)
3289 # and then put it back
3293 # Silence the PEP440Warning by default, so that end users don't get hit by it
3294 # randomly just because they use pkg_resources. We want to append the rule
3295 # because we want earlier uses of filterwarnings to take precedence over this
3297 warnings
.filterwarnings("ignore", category
=PEP440Warning
, append
=True)
3300 # from jaraco.functools 1.3
3301 def _call_aside(f
, *args
, **kwargs
):
3307 def _initialize(g
=globals()):
3308 "Set up global resource manager (deliberately not state-saved)"
3309 manager
= ResourceManager()
3310 g
['_manager'] = manager
3312 (name
, getattr(manager
, name
))
3313 for name
in dir(manager
)
3314 if not name
.startswith('_')
3318 class PkgResourcesDeprecationWarning(Warning):
3320 Base class for warning about deprecations in ``pkg_resources``
3322 This class is not derived from ``DeprecationWarning``, and as such is
3328 def _initialize_master_working_set():
3330 Prepare the master working set and make the ``require()``
3333 This function has explicit effects on the global state
3334 of pkg_resources. It is intended to be invoked once at
3335 the initialization of this module.
3337 Invocation by other packages is unsupported and done
3340 working_set
= WorkingSet
._build
_master
()
3341 _declare_state('object', working_set
=working_set
)
3343 require
= working_set
.require
3344 iter_entry_points
= working_set
.iter_entry_points
3345 add_activation_listener
= working_set
.subscribe
3346 run_script
= working_set
.run_script
3347 # backward compatibility
3348 run_main
= run_script
3349 # Activate all distributions already on sys.path with replace=False and
3350 # ensure that all distributions added to the working set in the future
3351 # (e.g. by calling ``require()``) will get activated as well,
3352 # with higher priority (replace=True).
3353 tuple(dist
.activate(replace
=False) for dist
in working_set
)
3354 add_activation_listener(
3355 lambda dist
: dist
.activate(replace
=True),
3358 working_set
.entries
= []
3360 list(map(working_set
.add_entry
, sys
.path
))
3361 globals().update(locals())