5
A resource is a logical file contained within a package, or a logical
6
subdirectory thereof. The package resource API expects resource names
7
to have their path parts separated with ``/``, *not* whatever the local
8
path separator is. Do not use os.path operations to manipulate resource
9
names being passed into the API.
11
The package resource API is designed to work with normal filesystem packages,
12
.egg files, and unpacked .egg files. It can also work in a limited way with
13
.zip files and with custom PEP 302 loaders that support the ``get_data()``
17
from __future__ import absolute_import
38
from pkgutil import get_importer
43
# Python 3.2 compatibility
46
from pkg_resources.extern import six
47
from pkg_resources.extern.six.moves import urllib, map, filter
49
# capture these to bypass sandboxing
52
from os import mkdir, rename, unlink
55
# no write support, probably under GAE
58
from os import open as os_open
59
from os.path import isdir, split
62
import importlib.machinery as importlib_machinery
63
# access attribute to force import under delayed import mechanisms.
64
importlib_machinery.__name__
66
importlib_machinery = None
68
from pkg_resources.extern import packaging
69
__import__('pkg_resources.extern.packaging.version')
70
__import__('pkg_resources.extern.packaging.specifiers')
71
__import__('pkg_resources.extern.packaging.requirements')
72
__import__('pkg_resources.extern.packaging.markers')
75
if (3, 0) < sys.version_info < (3, 3):
77
"Support for Python 3.0-3.2 has been dropped. Future versions "
82
# declare some globals that will be defined later to
83
# satisfy the linters.
88
class PEP440Warning(RuntimeWarning):
90
Used when there is an issue with a version or specifier not complying with
95
class _SetuptoolsVersionMixin(object):
98
return super(_SetuptoolsVersionMixin, self).__hash__()
100
def __lt__(self, other):
101
if isinstance(other, tuple):
102
return tuple(self) < other
104
return super(_SetuptoolsVersionMixin, self).__lt__(other)
106
def __le__(self, other):
107
if isinstance(other, tuple):
108
return tuple(self) <= other
110
return super(_SetuptoolsVersionMixin, self).__le__(other)
112
def __eq__(self, other):
113
if isinstance(other, tuple):
114
return tuple(self) == other
116
return super(_SetuptoolsVersionMixin, self).__eq__(other)
118
def __ge__(self, other):
119
if isinstance(other, tuple):
120
return tuple(self) >= other
122
return super(_SetuptoolsVersionMixin, self).__ge__(other)
124
def __gt__(self, other):
125
if isinstance(other, tuple):
126
return tuple(self) > other
128
return super(_SetuptoolsVersionMixin, self).__gt__(other)
130
def __ne__(self, other):
131
if isinstance(other, tuple):
132
return tuple(self) != other
134
return super(_SetuptoolsVersionMixin, self).__ne__(other)
136
def __getitem__(self, key):
137
return tuple(self)[key]
140
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
149
def _parse_version_parts(s):
150
for part in component_re.split(s):
151
part = replace(part, part)
152
if not part or part == '.':
154
if part[:1] in '0123456789':
155
# pad for numeric comparison
160
# ensure that alpha/beta/candidate are before final
163
def old_parse_version(s):
165
for part in _parse_version_parts(s.lower()):
166
if part.startswith('*'):
167
# remove '-' before a prerelease tag
169
while parts and parts[-1] == '*final-':
171
# remove trailing zeros from each series of numeric parts
172
while parts and parts[-1] == '00000000':
177
# Warn for use of this function
179
"You have iterated over the result of "
180
"pkg_resources.parse_version. This is a legacy behavior which is "
181
"inconsistent with the new version class introduced in setuptools "
182
"8.0. In most cases, conversion to a tuple is unnecessary. For "
183
"comparison of versions, sort the Version instances directly. If "
184
"you have another use case requiring the tuple, please file a "
185
"bug with the setuptools project describing that need.",
190
for part in old_parse_version(str(self)):
194
class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
198
class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
199
packaging.version.LegacyVersion):
203
def parse_version(v):
205
return SetuptoolsVersion(v)
206
except packaging.version.InvalidVersion:
207
return SetuptoolsLegacyVersion(v)
212
def _declare_state(vartype, **kw):
214
_state_vars.update(dict.fromkeys(kw, vartype))
219
for k, v in _state_vars.items():
220
state[k] = g['_sget_'+v](g[k])
223
def __setstate__(state):
225
for k, v in state.items():
226
g['_sset_'+_state_vars[k]](k, g[k], v)
232
def _sset_dict(key, ob, state):
236
def _sget_object(val):
237
return val.__getstate__()
239
def _sset_object(key, ob, state):
240
ob.__setstate__(state)
242
_sget_none = _sset_none = lambda *args: None
245
def get_supported_platform():
246
"""Return this platform's maximum compatible version.
248
distutils.util.get_platform() normally reports the minimum version
249
of Mac OS X that would be required to *use* extensions produced by
250
distutils. But what we want when checking compatibility is to know the
251
version of Mac OS X that we are *running*. To allow usage of packages that
252
explicitly require a newer version of Mac OS X, we must also know the
253
current version of the OS.
255
If this condition occurs for any other platform with a version in its
256
platform strings, this function should be extended accordingly.
258
plat = get_build_platform()
259
m = macosVersionString.match(plat)
260
if m is not None and sys.platform == "darwin":
262
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
269
# Basic resource access and distribution/entry point discovery
270
'require', 'run_script', 'get_provider', 'get_distribution',
271
'load_entry_point', 'get_entry_map', 'get_entry_info',
273
'resource_string', 'resource_stream', 'resource_filename',
274
'resource_listdir', 'resource_exists', 'resource_isdir',
276
# Environmental control
277
'declare_namespace', 'working_set', 'add_activation_listener',
278
'find_distributions', 'set_extraction_path', 'cleanup_resources',
281
# Primary implementation classes
282
'Environment', 'WorkingSet', 'ResourceManager',
283
'Distribution', 'Requirement', 'EntryPoint',
286
'ResolutionError', 'VersionConflict', 'DistributionNotFound',
287
'UnknownExtra', 'ExtractionError',
292
# Parsing functions and string utilities
293
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
294
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
295
'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
297
# filesystem utilities
298
'ensure_directory', 'normalize_path',
300
# Distribution "precedence" constants
301
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
303
# "Provider" interfaces, implementations, and registration/lookup APIs
304
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
305
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
306
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
307
'register_finder', 'register_namespace_handler', 'register_loader_type',
308
'fixup_namespace_packages', 'get_importer',
310
# Deprecated/backward compatibility only
311
'run_main', 'AvailableDistributions',
314
class ResolutionError(Exception):
315
"""Abstract base for dependency resolution errors"""
317
return self.__class__.__name__+repr(self.args)
320
class VersionConflict(ResolutionError):
322
An already-installed version conflicts with the requested version.
324
Should be initialized with the installed Distribution and the requested
328
_template = "{self.dist} is installed but {self.req} is required"
339
return self._template.format(**locals())
341
def with_context(self, required_by):
343
If required_by is non-empty, return a version of self that is a
344
ContextualVersionConflict.
348
args = self.args + (required_by,)
349
return ContextualVersionConflict(*args)
352
class ContextualVersionConflict(VersionConflict):
354
A VersionConflict that accepts a third parameter, the set of the
355
requirements that required the installed Distribution.
358
_template = VersionConflict._template + ' by {self.required_by}'
361
def required_by(self):
365
class DistributionNotFound(ResolutionError):
366
"""A requested distribution was not found"""
368
_template = ("The '{self.req}' distribution was not found "
369
"and is required by {self.requirers_str}")
380
def requirers_str(self):
381
if not self.requirers:
382
return 'the application'
383
return ', '.join(self.requirers)
386
return self._template.format(**locals())
392
class UnknownExtra(ResolutionError):
393
"""Distribution doesn't have an "extra feature" of the given name"""
394
_provider_factories = {}
396
PY_MAJOR = sys.version[:3]
403
def register_loader_type(loader_type, provider_factory):
404
"""Register `provider_factory` to make providers for `loader_type`
406
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
407
and `provider_factory` is a function that, passed a *module* object,
408
returns an ``IResourceProvider`` for that module.
410
_provider_factories[loader_type] = provider_factory
412
def get_provider(moduleOrReq):
413
"""Return an IResourceProvider for the named module or requirement"""
414
if isinstance(moduleOrReq, Requirement):
415
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
417
module = sys.modules[moduleOrReq]
419
__import__(moduleOrReq)
420
module = sys.modules[moduleOrReq]
421
loader = getattr(module, '__loader__', None)
422
return _find_adapter(_provider_factories, loader)(module)
424
def _macosx_vers(_cache=[]):
426
version = platform.mac_ver()[0]
427
# fallback for MacPorts
429
plist = '/System/Library/CoreServices/SystemVersion.plist'
430
if os.path.exists(plist):
431
if hasattr(plistlib, 'readPlist'):
432
plist_content = plistlib.readPlist(plist)
433
if 'ProductVersion' in plist_content:
434
version = plist_content['ProductVersion']
436
_cache.append(version.split('.'))
439
def _macosx_arch(machine):
440
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
442
def get_build_platform():
443
"""Return this platform's string for platform-specific distributions
445
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
446
needs some hacks for Linux and Mac OS X.
449
# Python 2.7 or >=3.2
450
from sysconfig import get_platform
452
from distutils.util import get_platform
454
plat = get_platform()
455
if sys.platform == "darwin" and not plat.startswith('macosx-'):
457
version = _macosx_vers()
458
machine = os.uname()[4].replace(" ", "_")
459
return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
460
_macosx_arch(machine))
462
# if someone is running a non-Mac darwin system, this will fall
463
# through to the default implementation
467
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
468
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
469
# XXX backward compat
470
get_platform = get_build_platform
473
def compatible_platforms(provided, required):
474
"""Can code for the `provided` platform run on the `required` platform?
476
Returns true if either platform is ``None``, or the platforms are equal.
478
XXX Needs compatibility checks for Linux and other unixy OSes.
480
if provided is None or required is None or provided==required:
484
# Mac OS X special cases
485
reqMac = macosVersionString.match(required)
487
provMac = macosVersionString.match(provided)
489
# is this a Mac package?
491
# this is backwards compatibility for packages built before
492
# setuptools 0.6. All packages built after this point will
493
# use the new macosx designation.
494
provDarwin = darwinVersionString.match(provided)
496
dversion = int(provDarwin.group(1))
497
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
498
if dversion == 7 and macosversion >= "10.3" or \
499
dversion == 8 and macosversion >= "10.4":
501
# egg isn't macosx or legacy darwin
504
# are they the same major version and machine type?
505
if provMac.group(1) != reqMac.group(1) or \
506
provMac.group(3) != reqMac.group(3):
509
# is the required OS major update >= the provided one?
510
if int(provMac.group(2)) > int(reqMac.group(2)):
515
# XXX Linux and other platforms' special cases should go here
519
def run_script(dist_spec, script_name):
520
"""Locate distribution `dist_spec` and run its `script_name` script"""
521
ns = sys._getframe(1).f_globals
522
name = ns['__name__']
524
ns['__name__'] = name
525
require(dist_spec)[0].run_script(script_name, ns)
527
# backward compatibility
528
run_main = run_script
530
def get_distribution(dist):
531
"""Return a current distribution object for a Requirement or string"""
532
if isinstance(dist, six.string_types):
533
dist = Requirement.parse(dist)
534
if isinstance(dist, Requirement):
535
dist = get_provider(dist)
536
if not isinstance(dist, Distribution):
537
raise TypeError("Expected string, Requirement, or Distribution", dist)
540
def load_entry_point(dist, group, name):
541
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
542
return get_distribution(dist).load_entry_point(group, name)
544
def get_entry_map(dist, group=None):
545
"""Return the entry point map for `group`, or the full entry map"""
546
return get_distribution(dist).get_entry_map(group)
548
def get_entry_info(dist, group, name):
549
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
550
return get_distribution(dist).get_entry_info(group, name)
553
class IMetadataProvider:
555
def has_metadata(name):
556
"""Does the package's distribution contain the named metadata?"""
558
def get_metadata(name):
559
"""The named metadata resource as a string"""
561
def get_metadata_lines(name):
562
"""Yield named metadata resource as list of non-blank non-comment lines
564
Leading and trailing whitespace is stripped from each line, and lines
565
with ``#`` as the first non-blank character are omitted."""
567
def metadata_isdir(name):
568
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
570
def metadata_listdir(name):
571
"""List of metadata names in the directory (like ``os.listdir()``)"""
573
def run_script(script_name, namespace):
574
"""Execute the named script in the supplied namespace dictionary"""
577
class IResourceProvider(IMetadataProvider):
578
"""An object that provides access to package resources"""
580
def get_resource_filename(manager, resource_name):
581
"""Return a true filesystem path for `resource_name`
583
`manager` must be an ``IResourceManager``"""
585
def get_resource_stream(manager, resource_name):
586
"""Return a readable file-like object for `resource_name`
588
`manager` must be an ``IResourceManager``"""
590
def get_resource_string(manager, resource_name):
591
"""Return a string containing the contents of `resource_name`
593
`manager` must be an ``IResourceManager``"""
595
def has_resource(resource_name):
596
"""Does the package contain the named resource?"""
598
def resource_isdir(resource_name):
599
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
601
def resource_listdir(resource_name):
602
"""List of resource names in the directory (like ``os.listdir()``)"""
605
class WorkingSet(object):
606
"""A collection of active distributions on sys.path (or a similar list)"""
608
def __init__(self, entries=None):
609
"""Create working set from list of path entries (default=sys.path)"""
618
for entry in entries:
619
self.add_entry(entry)
622
def _build_master(cls):
624
Prepare the master working set.
628
from __main__ import __requires__
630
# The main program does not list any requirements
633
# ensure the requirements are met
635
ws.require(__requires__)
636
except VersionConflict:
637
return cls._build_from_requirements(__requires__)
642
def _build_from_requirements(cls, req_spec):
644
Build a working set from a requirement spec. Rewrites sys.path.
646
# try it without defaults already on sys.path
647
# by starting with an empty path
649
reqs = parse_requirements(req_spec)
650
dists = ws.resolve(reqs, Environment())
654
# add any missing entries from sys.path
655
for entry in sys.path:
656
if entry not in ws.entries:
659
# then copy back to sys.path
660
sys.path[:] = ws.entries
663
def add_entry(self, entry):
664
"""Add a path item to ``.entries``, finding any distributions on it
666
``find_distributions(entry, True)`` is used to find distributions
667
corresponding to the path entry, and they are added. `entry` is
668
always appended to ``.entries``, even if it is already present.
669
(This is because ``sys.path`` can contain the same value more than
670
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
673
self.entry_keys.setdefault(entry, [])
674
self.entries.append(entry)
675
for dist in find_distributions(entry, True):
676
self.add(dist, entry, False)
678
def __contains__(self, dist):
679
"""True if `dist` is the active distribution for its project"""
680
return self.by_key.get(dist.key) == dist
683
"""Find a distribution matching requirement `req`
685
If there is an active distribution for the requested project, this
686
returns it as long as it meets the version requirement specified by
687
`req`. But, if there is an active distribution for the project and it
688
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
689
If there is no active distribution for the requested project, ``None``
692
dist = self.by_key.get(req.key)
693
if dist is not None and dist not in req:
695
raise VersionConflict(dist, req)
698
def iter_entry_points(self, group, name=None):
699
"""Yield entry point objects from `group` matching `name`
701
If `name` is None, yields all entry points in `group` from all
702
distributions in the working set, otherwise only ones matching
703
both `group` and `name` are yielded (in distribution order).
706
entries = dist.get_entry_map(group)
708
for ep in entries.values():
710
elif name in entries:
713
def run_script(self, requires, script_name):
714
"""Locate distribution for `requires` and run `script_name` script"""
715
ns = sys._getframe(1).f_globals
716
name = ns['__name__']
718
ns['__name__'] = name
719
self.require(requires)[0].run_script(script_name, ns)
722
"""Yield distributions for non-duplicate projects in the working set
724
The yield order is the order in which the items' path entries were
725
added to the working set.
728
for item in self.entries:
729
if item not in self.entry_keys:
730
# workaround a cache issue
733
for key in self.entry_keys[item]:
736
yield self.by_key[key]
738
def add(self, dist, entry=None, insert=True, replace=False):
739
"""Add `dist` to working set, associated with `entry`
741
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
742
On exit from this routine, `entry` is added to the end of the working
743
set's ``.entries`` (if it wasn't already present).
745
`dist` is only added to the working set if it's for a project that
746
doesn't already have a distribution in the set, unless `replace=True`.
747
If it's added, any callbacks registered with the ``subscribe()`` method
751
dist.insert_on(self.entries, entry, replace=replace)
754
entry = dist.location
755
keys = self.entry_keys.setdefault(entry,[])
756
keys2 = self.entry_keys.setdefault(dist.location,[])
757
if not replace and dist.key in self.by_key:
758
# ignore hidden distros
761
self.by_key[dist.key] = dist
762
if dist.key not in keys:
763
keys.append(dist.key)
764
if dist.key not in keys2:
765
keys2.append(dist.key)
766
self._added_new(dist)
768
def resolve(self, requirements, env=None, installer=None,
769
replace_conflicting=False):
770
"""List all distributions needed to (recursively) meet `requirements`
772
`requirements` must be a sequence of ``Requirement`` objects. `env`,
773
if supplied, should be an ``Environment`` instance. If
774
not supplied, it defaults to all distributions available within any
775
entry or distribution in the working set. `installer`, if supplied,
776
will be invoked with each requirement that cannot be met by an
777
already-installed distribution; it should return a ``Distribution`` or
780
Unless `replace_conflicting=True`, raises a VersionConflict exception if
781
any requirements are found on the path that have the correct name but
782
the wrong version. Otherwise, if an `installer` is supplied it will be
783
invoked to obtain the correct version of the requirement and activate
788
requirements = list(requirements)[::-1]
789
# set of processed requirements
795
req_extras = _ReqExtras()
797
# Mapping of requirement to set of distributions that required it;
798
# useful for reporting info about conflicts.
799
required_by = collections.defaultdict(set)
802
# process dependencies breadth-first
803
req = requirements.pop(0)
805
# Ignore cyclic or redundant dependencies
808
if not req_extras.markers_pass(req):
811
dist = best.get(req.key)
813
# Find the best distribution and add it to the map
814
dist = self.by_key.get(req.key)
815
if dist is None or (dist not in req and replace_conflicting):
819
env = Environment(self.entries)
821
# Use an empty environment and workingset to avoid
822
# any further conflicts with the conflicting
824
env = Environment([])
826
dist = best[req.key] = env.best_match(req, ws, installer)
828
requirers = required_by.get(req, None)
829
raise DistributionNotFound(req, requirers)
830
to_activate.append(dist)
832
# Oops, the "best" so far conflicts with a dependency
833
dependent_req = required_by[req]
834
raise VersionConflict(dist, req).with_context(dependent_req)
836
# push the new requirements onto the stack
837
new_requirements = dist.requires(req.extras)[::-1]
838
requirements.extend(new_requirements)
840
# Register the new requirements needed by req
841
for new_requirement in new_requirements:
842
required_by[new_requirement].add(req.project_name)
843
req_extras[new_requirement] = req.extras
845
processed[req] = True
847
# return list of distros to activate
850
def find_plugins(self, plugin_env, full_env=None, installer=None,
852
"""Find all activatable distributions in `plugin_env`
856
distributions, errors = working_set.find_plugins(
857
Environment(plugin_dirlist)
859
# add plugins+libs to sys.path
860
map(working_set.add, distributions)
862
print('Could not load', errors)
864
The `plugin_env` should be an ``Environment`` instance that contains
865
only distributions that are in the project's "plugin directory" or
866
directories. The `full_env`, if supplied, should be an ``Environment``
867
contains all currently-available distributions. If `full_env` is not
868
supplied, one is created automatically from the ``WorkingSet`` this
869
method is called on, which will typically mean that every directory on
870
``sys.path`` will be scanned for distributions.
872
`installer` is a standard installer callback as used by the
873
``resolve()`` method. The `fallback` flag indicates whether we should
874
attempt to resolve older versions of a plugin if the newest version
877
This method returns a 2-tuple: (`distributions`, `error_info`), where
878
`distributions` is a list of the distributions found in `plugin_env`
879
that were loadable, along with any other distributions that are needed
880
to resolve their dependencies. `error_info` is a dictionary mapping
881
unloadable plugin distributions to an exception instance describing the
882
error that occurred. Usually this will be a ``DistributionNotFound`` or
883
``VersionConflict`` instance.
886
plugin_projects = list(plugin_env)
887
# scan project names in alphabetic order
888
plugin_projects.sort()
894
env = Environment(self.entries)
897
env = full_env + plugin_env
899
shadow_set = self.__class__([])
900
# put all our entries in shadow_set
901
list(map(shadow_set.add, self))
903
for project_name in plugin_projects:
905
for dist in plugin_env[project_name]:
907
req = [dist.as_requirement()]
910
resolvees = shadow_set.resolve(req, env, installer)
912
except ResolutionError as v:
916
# try the next older version of project
919
# give up on this project, keep going
923
list(map(shadow_set.add, resolvees))
924
distributions.update(dict.fromkeys(resolvees))
926
# success, no need to try any more versions of this project
929
distributions = list(distributions)
932
return distributions, error_info
934
def require(self, *requirements):
935
"""Ensure that distributions matching `requirements` are activated
937
`requirements` must be a string or a (possibly-nested) sequence
938
thereof, specifying the distributions and versions required. The
939
return value is a sequence of the distributions that needed to be
940
activated to fulfill the requirements; all relevant distributions are
941
included, even if they were already activated in this working set.
943
needed = self.resolve(parse_requirements(requirements))
950
def subscribe(self, callback):
951
"""Invoke `callback` for all distributions (including existing ones)"""
952
if callback in self.callbacks:
954
self.callbacks.append(callback)
958
def _added_new(self, dist):
959
for callback in self.callbacks:
962
def __getstate__(self):
964
self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
968
def __setstate__(self, e_k_b_c):
969
entries, keys, by_key, callbacks = e_k_b_c
970
self.entries = entries[:]
971
self.entry_keys = keys.copy()
972
self.by_key = by_key.copy()
973
self.callbacks = callbacks[:]
976
class _ReqExtras(dict):
978
Map each requirement to the extras that demanded it.
981
def markers_pass(self, req):
983
Evaluate markers for req against each extra that
986
Return False if the req has a marker and fails
987
evaluation. Otherwise, return True.
990
req.marker.evaluate({'extra': extra})
991
for extra in self.get(req, ())
993
return not req.marker or any(extra_evals) or req.marker.evaluate()
996
class Environment(object):
997
"""Searchable snapshot of distributions on a search path"""
999
def __init__(self, search_path=None, platform=get_supported_platform(),
1001
"""Snapshot distributions available on a search path
1003
Any distributions found on `search_path` are added to the environment.
1004
`search_path` should be a sequence of ``sys.path`` items. If not
1005
supplied, ``sys.path`` is used.
1007
`platform` is an optional string specifying the name of the platform
1008
that platform-specific distributions must be compatible with. If
1009
unspecified, it defaults to the current platform. `python` is an
1010
optional string naming the desired version of Python (e.g. ``'3.3'``);
1011
it defaults to the current version.
1013
You may explicitly set `platform` (and/or `python`) to ``None`` if you
1014
wish to map *all* distributions, not just those compatible with the
1015
running platform or Python version.
1018
self.platform = platform
1019
self.python = python
1020
self.scan(search_path)
1022
def can_add(self, dist):
1023
"""Is distribution `dist` acceptable for this environment?
1025
The distribution must match the platform and python version
1026
requirements specified when this environment was created, or False
1029
return (self.python is None or dist.py_version is None
1030
or dist.py_version==self.python) \
1031
and compatible_platforms(dist.platform, self.platform)
1033
def remove(self, dist):
1034
"""Remove `dist` from the environment"""
1035
self._distmap[dist.key].remove(dist)
1037
def scan(self, search_path=None):
1038
"""Scan `search_path` for distributions usable in this environment
1040
Any distributions found are added to the environment.
1041
`search_path` should be a sequence of ``sys.path`` items. If not
1042
supplied, ``sys.path`` is used. Only distributions conforming to
1043
the platform/python version defined at initialization are added.
1045
if search_path is None:
1046
search_path = sys.path
1048
for item in search_path:
1049
for dist in find_distributions(item):
1052
def __getitem__(self, project_name):
1053
"""Return a newest-to-oldest list of distributions for `project_name`
1055
Uses case-insensitive `project_name` comparison, assuming all the
1056
project's distributions use their project's name converted to all
1057
lowercase as their key.
1060
distribution_key = project_name.lower()
1061
return self._distmap.get(distribution_key, [])
1063
def add(self, dist):
1064
"""Add `dist` if we ``can_add()`` it and it has not already been added
1066
if self.can_add(dist) and dist.has_version():
1067
dists = self._distmap.setdefault(dist.key, [])
1068
if dist not in dists:
1070
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1072
def best_match(self, req, working_set, installer=None):
1073
"""Find distribution best matching `req` and usable on `working_set`
1075
This calls the ``find(req)`` method of the `working_set` to see if a
1076
suitable distribution is already active. (This may raise
1077
``VersionConflict`` if an unsuitable version of the project is already
1078
active in the specified `working_set`.) If a suitable distribution
1079
isn't active, this method returns the newest distribution in the
1080
environment that meets the ``Requirement`` in `req`. If no suitable
1081
distribution is found, and `installer` is supplied, then the result of
1082
calling the environment's ``obtain(req, installer)`` method will be
1085
dist = working_set.find(req)
1086
if dist is not None:
1088
for dist in self[req.key]:
1091
# try to download/install
1092
return self.obtain(req, installer)
1094
def obtain(self, requirement, installer=None):
1095
"""Obtain a distribution matching `requirement` (e.g. via download)
1097
Obtain a distro that matches requirement (e.g. via download). In the
1098
base ``Environment`` class, this routine just returns
1099
``installer(requirement)``, unless `installer` is None, in which case
1100
None is returned instead. This method is a hook that allows subclasses
1101
to attempt other ways of obtaining a distribution before falling back
1102
to the `installer` argument."""
1103
if installer is not None:
1104
return installer(requirement)
1107
"""Yield the unique project names of the available distributions"""
1108
for key in self._distmap.keys():
1112
def __iadd__(self, other):
1113
"""In-place addition of a distribution or environment"""
1114
if isinstance(other, Distribution):
1116
elif isinstance(other, Environment):
1117
for project in other:
1118
for dist in other[project]:
1121
raise TypeError("Can't add %r to environment" % (other,))
1124
def __add__(self, other):
1125
"""Add an environment or distribution to an environment"""
1126
new = self.__class__([], platform=None, python=None)
1127
for env in self, other:
1132
# XXX backward compatibility
1133
AvailableDistributions = Environment
1136
class ExtractionError(RuntimeError):
1137
"""An error occurred extracting a resource
1139
The following attributes are available from instances of this exception:
1142
The resource manager that raised this exception
1145
The base directory for resource extraction
1148
The exception instance that caused extraction to fail
1152
class ResourceManager:
1153
"""Manage resource extraction and packages"""
1154
extraction_path = None
1157
self.cached_files = {}
1159
def resource_exists(self, package_or_requirement, resource_name):
1160
"""Does the named resource exist?"""
1161
return get_provider(package_or_requirement).has_resource(resource_name)
1163
def resource_isdir(self, package_or_requirement, resource_name):
1164
"""Is the named resource an existing directory?"""
1165
return get_provider(package_or_requirement).resource_isdir(
1169
def resource_filename(self, package_or_requirement, resource_name):
1170
"""Return a true filesystem path for specified resource"""
1171
return get_provider(package_or_requirement).get_resource_filename(
1175
def resource_stream(self, package_or_requirement, resource_name):
1176
"""Return a readable file-like object for specified resource"""
1177
return get_provider(package_or_requirement).get_resource_stream(
1181
def resource_string(self, package_or_requirement, resource_name):
1182
"""Return specified resource as a string"""
1183
return get_provider(package_or_requirement).get_resource_string(
1187
def resource_listdir(self, package_or_requirement, resource_name):
1188
"""List the contents of the named resource directory"""
1189
return get_provider(package_or_requirement).resource_listdir(
1193
def extraction_error(self):
1194
"""Give an error message for problems extracting file(s)"""
1196
old_exc = sys.exc_info()[1]
1197
cache_path = self.extraction_path or get_default_cache()
1199
tmpl = textwrap.dedent("""
1200
Can't extract file(s) to egg cache
1202
The following error occurred while trying to extract file(s) to the Python egg
1207
The Python egg cache directory is currently set to:
1211
Perhaps your account does not have write access to this directory? You can
1212
change the cache directory by setting the PYTHON_EGG_CACHE environment
1213
variable to point to an accessible directory.
1215
err = ExtractionError(tmpl.format(**locals()))
1217
err.cache_path = cache_path
1218
err.original_error = old_exc
1221
def get_cache_path(self, archive_name, names=()):
1222
"""Return absolute location in cache for `archive_name` and `names`
1224
The parent directory of the resulting path will be created if it does
1225
not already exist. `archive_name` should be the base filename of the
1226
enclosing egg (which may not be the name of the enclosing zipfile!),
1227
including its ".egg" extension. `names`, if provided, should be a
1228
sequence of path name parts "under" the egg's extraction location.
1230
This method should only be called by resource providers that need to
1231
obtain an extraction location, and only for names they intend to
1232
extract, as it tracks the generated names for possible cleanup later.
1234
extract_path = self.extraction_path or get_default_cache()
1235
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
1237
_bypass_ensure_directory(target_path)
1239
self.extraction_error()
1241
self._warn_unsafe_extraction_path(extract_path)
1243
self.cached_files[target_path] = 1
1247
def _warn_unsafe_extraction_path(path):
1249
If the default extraction path is overridden and set to an insecure
1250
location, such as /tmp, it opens up an opportunity for an attacker to
1251
replace an extracted file with an unauthorized payload. Warn the user
1252
if a known insecure location is used.
1254
See Distribute #375 for more details.
1256
if os.name == 'nt' and not path.startswith(os.environ['windir']):
1257
# On Windows, permissions are generally restrictive by default
1258
# and temp directories are not writable by other users, so
1259
# bypass the warning.
1261
mode = os.stat(path).st_mode
1262
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1263
msg = ("%s is writable by group/others and vulnerable to attack "
1265
"used with get_resource_filename. Consider a more secure "
1266
"location (set with .set_extraction_path or the "
1267
"PYTHON_EGG_CACHE environment variable)." % path)
1268
warnings.warn(msg, UserWarning)
1270
def postprocess(self, tempname, filename):
1271
"""Perform any platform-specific postprocessing of `tempname`
1273
This is where Mac header rewrites should be done; other platforms don't
1274
have anything special they should do.
1276
Resource providers should call this method ONLY after successfully
1277
extracting a compressed resource. They must NOT call it on resources
1278
that are already in the filesystem.
1280
`tempname` is the current (temporary) name of the file, and `filename`
1281
is the name it will be renamed to by the caller after this routine
1285
if os.name == 'posix':
1286
# Make the resource executable
1287
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1288
os.chmod(tempname, mode)
1290
def set_extraction_path(self, path):
1291
"""Set the base path where resources will be extracted to, if needed.
1293
If you do not call this routine before any extractions take place, the
1294
path defaults to the return value of ``get_default_cache()``. (Which
1295
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1296
platform-specific fallbacks. See that routine's documentation for more
1299
Resources are extracted to subdirectories of this path based upon
1300
information given by the ``IResourceProvider``. You may set this to a
1301
temporary directory, but then you must call ``cleanup_resources()`` to
1302
delete the extracted files when done. There is no guarantee that
1303
``cleanup_resources()`` will be able to remove all extracted files.
1305
(Note: you may not change the extraction path for a given resource
1306
manager once resources have been extracted, unless you first call
1307
``cleanup_resources()``.)
1309
if self.cached_files:
1311
"Can't change extraction path, files already extracted"
1314
self.extraction_path = path
1316
def cleanup_resources(self, force=False):
1318
Delete all extracted resource files and directories, returning a list
1319
of the file and directory names that could not be successfully removed.
1320
This function does not have any concurrency protection, so it should
1321
generally only be called when the extraction path is a temporary
1322
directory exclusive to a single process. This method is not
1323
automatically called; you must call it explicitly or register it as an
1324
``atexit`` function if you wish to ensure cleanup of a temporary
1325
directory used for extractions.
1329
def get_default_cache():
1330
"""Determine the default cache location
1332
This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1333
Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1334
"Application Data" directory. On all other systems, it's "~/.python-eggs".
1337
return os.environ['PYTHON_EGG_CACHE']
1342
return os.path.expanduser('~/.python-eggs')
1344
# XXX this may be locale-specific!
1345
app_data = 'Application Data'
1347
# best option, should be locale-safe
1348
(('APPDATA',), None),
1349
(('USERPROFILE',), app_data),
1350
(('HOMEDRIVE','HOMEPATH'), app_data),
1351
(('HOMEPATH',), app_data),
1354
(('WINDIR',), app_data),
1357
for keys, subdir in app_homes:
1360
if key in os.environ:
1361
dirname = os.path.join(dirname, os.environ[key])
1366
dirname = os.path.join(dirname, subdir)
1367
return os.path.join(dirname, 'Python-Eggs')
1370
"Please set the PYTHON_EGG_CACHE enviroment variable"
1373
def safe_name(name):
1374
"""Convert an arbitrary string to a standard distribution name
1376
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1378
return re.sub('[^A-Za-z0-9.]+', '-', name)
1381
def safe_version(version):
1383
Convert an arbitrary string to a standard version string
1386
# normalize the version
1387
return str(packaging.version.Version(version))
1388
except packaging.version.InvalidVersion:
1389
version = version.replace(' ','.')
1390
return re.sub('[^A-Za-z0-9.]+', '-', version)
1393
def safe_extra(extra):
1394
"""Convert an arbitrary string to a standard 'extra' name
1396
Any runs of non-alphanumeric characters are replaced with a single '_',
1397
and the result is always lowercased.
1399
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1402
def to_filename(name):
1403
"""Convert a project or version name to its filename-escaped form
1405
Any '-' characters are currently replaced with '_'.
1407
return name.replace('-','_')
1410
def invalid_marker(text):
1412
Validate text as a PEP 508 environment marker; return an exception
1413
if invalid or False otherwise.
1416
evaluate_marker(text)
1417
except SyntaxError as e:
1424
def evaluate_marker(text, extra=None):
1426
Evaluate a PEP 508 environment marker.
1427
Return a boolean indicating the marker result in this environment.
1428
Raise SyntaxError if marker is invalid.
1430
This implementation uses the 'pyparsing' module.
1433
marker = packaging.markers.Marker(text)
1434
return marker.evaluate()
1435
except packaging.markers.InvalidMarker as e:
1436
raise SyntaxError(e)
1440
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1446
def __init__(self, module):
1447
self.loader = getattr(module, '__loader__', None)
1448
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1450
def get_resource_filename(self, manager, resource_name):
1451
return self._fn(self.module_path, resource_name)
1453
def get_resource_stream(self, manager, resource_name):
1454
return io.BytesIO(self.get_resource_string(manager, resource_name))
1456
def get_resource_string(self, manager, resource_name):
1457
return self._get(self._fn(self.module_path, resource_name))
1459
def has_resource(self, resource_name):
1460
return self._has(self._fn(self.module_path, resource_name))
1462
def has_metadata(self, name):
1463
return self.egg_info and self._has(self._fn(self.egg_info, name))
1465
if sys.version_info <= (3,):
1466
def get_metadata(self, name):
1467
if not self.egg_info:
1469
return self._get(self._fn(self.egg_info, name))
1471
def get_metadata(self, name):
1472
if not self.egg_info:
1474
return self._get(self._fn(self.egg_info, name)).decode("utf-8")
1476
def get_metadata_lines(self, name):
1477
return yield_lines(self.get_metadata(name))
1479
def resource_isdir(self, resource_name):
1480
return self._isdir(self._fn(self.module_path, resource_name))
1482
def metadata_isdir(self, name):
1483
return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1485
def resource_listdir(self, resource_name):
1486
return self._listdir(self._fn(self.module_path, resource_name))
1488
def metadata_listdir(self, name):
1490
return self._listdir(self._fn(self.egg_info, name))
1493
def run_script(self, script_name, namespace):
1494
script = 'scripts/'+script_name
1495
if not self.has_metadata(script):
1496
raise ResolutionError("No script named %r" % script_name)
1497
script_text = self.get_metadata(script).replace('\r\n', '\n')
1498
script_text = script_text.replace('\r', '\n')
1499
script_filename = self._fn(self.egg_info, script)
1500
namespace['__file__'] = script_filename
1501
if os.path.exists(script_filename):
1502
source = open(script_filename).read()
1503
code = compile(source, script_filename, 'exec')
1504
exec(code, namespace, namespace)
1506
from linecache import cache
1507
cache[script_filename] = (
1508
len(script_text), 0, script_text.split('\n'), script_filename
1510
script_code = compile(script_text, script_filename,'exec')
1511
exec(script_code, namespace, namespace)
1513
def _has(self, path):
1514
raise NotImplementedError(
1515
"Can't perform this operation for unregistered loader type"
1518
def _isdir(self, path):
1519
raise NotImplementedError(
1520
"Can't perform this operation for unregistered loader type"
1523
def _listdir(self, path):
1524
raise NotImplementedError(
1525
"Can't perform this operation for unregistered loader type"
1528
def _fn(self, base, resource_name):
1530
return os.path.join(base, *resource_name.split('/'))
1533
def _get(self, path):
1534
if hasattr(self.loader, 'get_data'):
1535
return self.loader.get_data(path)
1536
raise NotImplementedError(
1537
"Can't perform this operation for loaders without 'get_data()'"
1540
register_loader_type(object, NullProvider)
1543
class EggProvider(NullProvider):
1544
"""Provider based on a virtual filesystem"""
1546
def __init__(self, module):
1547
NullProvider.__init__(self, module)
1548
self._setup_prefix()
1550
def _setup_prefix(self):
1551
# we assume here that our metadata may be nested inside a "basket"
1552
# of multiple eggs; that's why we use module_path instead of .archive
1553
path = self.module_path
1556
if _is_unpacked_egg(path):
1557
self.egg_name = os.path.basename(path)
1558
self.egg_info = os.path.join(path, 'EGG-INFO')
1559
self.egg_root = path
1562
path, base = os.path.split(path)
1564
class DefaultProvider(EggProvider):
1565
"""Provides access to package resources in the filesystem"""
1567
def _has(self, path):
1568
return os.path.exists(path)
1570
def _isdir(self, path):
1571
return os.path.isdir(path)
1573
def _listdir(self, path):
1574
return os.listdir(path)
1576
def get_resource_stream(self, manager, resource_name):
1577
return open(self._fn(self.module_path, resource_name), 'rb')
1579
def _get(self, path):
1580
with open(path, 'rb') as stream:
1581
return stream.read()
1585
loader_cls = getattr(importlib_machinery, 'SourceFileLoader',
1587
register_loader_type(loader_cls, cls)
1589
DefaultProvider._register()
1592
class EmptyProvider(NullProvider):
1593
"""Provider that returns nothing for all requests"""
1595
_isdir = _has = lambda self, path: False
1596
_get = lambda self, path: ''
1597
_listdir = lambda self, path: []
1603
empty_provider = EmptyProvider()
1606
class ZipManifests(dict):
1608
zip manifest builder
1612
def build(cls, path):
1614
Build a dictionary similar to the zipimport directory
1615
caches, except instead of tuples, store ZipInfo objects.
1617
Use a platform-specific path separator (os.sep) for the path keys
1618
for compatibility with pypy on Windows.
1620
with ContextualZipFile(path) as zfile:
1623
name.replace('/', os.sep),
1624
zfile.getinfo(name),
1626
for name in zfile.namelist()
1633
class MemoizedZipManifests(ZipManifests):
1635
Memoized zipfile manifests.
1637
manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1639
def load(self, path):
1641
Load a manifest at path or return a suitable manifest already loaded.
1643
path = os.path.normpath(path)
1644
mtime = os.stat(path).st_mtime
1646
if path not in self or self[path].mtime != mtime:
1647
manifest = self.build(path)
1648
self[path] = self.manifest_mod(manifest, mtime)
1650
return self[path].manifest
1653
class ContextualZipFile(zipfile.ZipFile):
1655
Supplement ZipFile class to support context manager for Python 2.6
1658
def __enter__(self):
1661
def __exit__(self, type, value, traceback):
1664
def __new__(cls, *args, **kwargs):
1666
Construct a ZipFile or ContextualZipFile as appropriate
1668
if hasattr(zipfile.ZipFile, '__exit__'):
1669
return zipfile.ZipFile(*args, **kwargs)
1670
return super(ContextualZipFile, cls).__new__(cls)
1673
class ZipProvider(EggProvider):
1674
"""Resource support for zips and eggs"""
1677
_zip_manifests = MemoizedZipManifests()
1679
def __init__(self, module):
1680
EggProvider.__init__(self, module)
1681
self.zip_pre = self.loader.archive+os.sep
1683
def _zipinfo_name(self, fspath):
1684
# Convert a virtual filename (full path to file) into a zipfile subpath
1685
# usable with the zipimport directory cache for our target archive
1686
if fspath.startswith(self.zip_pre):
1687
return fspath[len(self.zip_pre):]
1688
raise AssertionError(
1689
"%s is not a subpath of %s" % (fspath, self.zip_pre)
1692
def _parts(self, zip_path):
1693
# Convert a zipfile subpath into an egg-relative path part list.
1695
fspath = self.zip_pre+zip_path
1696
if fspath.startswith(self.egg_root+os.sep):
1697
return fspath[len(self.egg_root)+1:].split(os.sep)
1698
raise AssertionError(
1699
"%s is not a subpath of %s" % (fspath, self.egg_root)
1704
return self._zip_manifests.load(self.loader.archive)
1706
def get_resource_filename(self, manager, resource_name):
1707
if not self.egg_name:
1708
raise NotImplementedError(
1709
"resource_filename() only supported for .egg, not .zip"
1711
# no need to lock for extraction, since we use temp names
1712
zip_path = self._resource_to_zip(resource_name)
1713
eagers = self._get_eager_resources()
1714
if '/'.join(self._parts(zip_path)) in eagers:
1716
self._extract_resource(manager, self._eager_to_zip(name))
1717
return self._extract_resource(manager, zip_path)
1720
def _get_date_and_size(zip_stat):
1721
size = zip_stat.file_size
1722
# ymdhms+wday, yday, dst
1723
date_time = zip_stat.date_time + (0, 0, -1)
1724
# 1980 offset already done
1725
timestamp = time.mktime(date_time)
1726
return timestamp, size
1728
def _extract_resource(self, manager, zip_path):
1730
if zip_path in self._index():
1731
for name in self._index()[zip_path]:
1732
last = self._extract_resource(
1733
manager, os.path.join(zip_path, name)
1735
# return the extracted directory name
1736
return os.path.dirname(last)
1738
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1740
if not WRITE_SUPPORT:
1741
raise IOError('"os.rename" and "os.unlink" are not supported '
1745
real_path = manager.get_cache_path(
1746
self.egg_name, self._parts(zip_path)
1749
if self._is_current(real_path, zip_path):
1752
outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1753
os.write(outf, self.loader.get_data(zip_path))
1755
utime(tmpnam, (timestamp, timestamp))
1756
manager.postprocess(tmpnam, real_path)
1759
rename(tmpnam, real_path)
1762
if os.path.isfile(real_path):
1763
if self._is_current(real_path, zip_path):
1764
# the file became current since it was checked above,
1767
# Windows, del old file and retry
1770
rename(tmpnam, real_path)
1775
# report a user-friendly error
1776
manager.extraction_error()
1780
def _is_current(self, file_path, zip_path):
1782
Return True if the file_path is current for this zip_path
1784
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1785
if not os.path.isfile(file_path):
1787
stat = os.stat(file_path)
1788
if stat.st_size!=size or stat.st_mtime!=timestamp:
1790
# check that the contents match
1791
zip_contents = self.loader.get_data(zip_path)
1792
with open(file_path, 'rb') as f:
1793
file_contents = f.read()
1794
return zip_contents == file_contents
1796
def _get_eager_resources(self):
1797
if self.eagers is None:
1799
for name in ('native_libs.txt', 'eager_resources.txt'):
1800
if self.has_metadata(name):
1801
eagers.extend(self.get_metadata_lines(name))
1802
self.eagers = eagers
1807
return self._dirindex
1808
except AttributeError:
1810
for path in self.zipinfo:
1811
parts = path.split(os.sep)
1813
parent = os.sep.join(parts[:-1])
1815
ind[parent].append(parts[-1])
1818
ind[parent] = [parts.pop()]
1819
self._dirindex = ind
1822
def _has(self, fspath):
1823
zip_path = self._zipinfo_name(fspath)
1824
return zip_path in self.zipinfo or zip_path in self._index()
1826
def _isdir(self, fspath):
1827
return self._zipinfo_name(fspath) in self._index()
1829
def _listdir(self, fspath):
1830
return list(self._index().get(self._zipinfo_name(fspath), ()))
1832
def _eager_to_zip(self, resource_name):
1833
return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1835
def _resource_to_zip(self, resource_name):
1836
return self._zipinfo_name(self._fn(self.module_path, resource_name))
1838
register_loader_type(zipimport.zipimporter, ZipProvider)
1841
class FileMetadata(EmptyProvider):
1842
"""Metadata handler for standalone PKG-INFO files
1846
metadata = FileMetadata("/path/to/PKG-INFO")
1848
This provider rejects all data and metadata requests except for PKG-INFO,
1849
which is treated as existing, and will be the contents of the file at
1850
the provided location.
1853
def __init__(self, path):
1856
def has_metadata(self, name):
1857
return name=='PKG-INFO' and os.path.isfile(self.path)
1859
def get_metadata(self, name):
1860
if name=='PKG-INFO':
1861
with io.open(self.path, encoding='utf-8') as f:
1864
except UnicodeDecodeError as exc:
1865
# add path context to error message
1866
tmpl = " in {self.path}"
1867
exc.reason += tmpl.format(self=self)
1870
raise KeyError("No metadata except PKG-INFO is available")
1872
def get_metadata_lines(self, name):
1873
return yield_lines(self.get_metadata(name))
1876
class PathMetadata(DefaultProvider):
1877
"""Metadata provider for egg directories
1883
egg_info = "/path/to/PackageName.egg-info"
1884
base_dir = os.path.dirname(egg_info)
1885
metadata = PathMetadata(base_dir, egg_info)
1886
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1887
dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1889
# Unpacked egg directories:
1891
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1892
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1893
dist = Distribution.from_filename(egg_path, metadata=metadata)
1896
def __init__(self, path, egg_info):
1897
self.module_path = path
1898
self.egg_info = egg_info
1901
class EggMetadata(ZipProvider):
1902
"""Metadata provider for .egg files"""
1904
def __init__(self, importer):
1905
"""Create a metadata provider from a zipimporter"""
1907
self.zip_pre = importer.archive+os.sep
1908
self.loader = importer
1910
self.module_path = os.path.join(importer.archive, importer.prefix)
1912
self.module_path = importer.archive
1913
self._setup_prefix()
1915
_declare_state('dict', _distribution_finders = {})
1917
def register_finder(importer_type, distribution_finder):
1918
"""Register `distribution_finder` to find distributions in sys.path items
1920
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1921
handler), and `distribution_finder` is a callable that, passed a path
1922
item and the importer instance, yields ``Distribution`` instances found on
1923
that path item. See ``pkg_resources.find_on_path`` for an example."""
1924
_distribution_finders[importer_type] = distribution_finder
1927
def find_distributions(path_item, only=False):
1928
"""Yield distributions accessible via `path_item`"""
1929
importer = get_importer(path_item)
1930
finder = _find_adapter(_distribution_finders, importer)
1931
return finder(importer, path_item, only)
1933
def find_eggs_in_zip(importer, path_item, only=False):
1935
Find eggs in zip files; possibly multiple nested eggs.
1937
if importer.archive.endswith('.whl'):
1938
# wheels are not supported with this finder
1939
# they don't have PKG-INFO metadata, and won't ever contain eggs
1941
metadata = EggMetadata(importer)
1942
if metadata.has_metadata('PKG-INFO'):
1943
yield Distribution.from_filename(path_item, metadata=metadata)
1945
# don't yield nested distros
1947
for subitem in metadata.resource_listdir('/'):
1948
if _is_unpacked_egg(subitem):
1949
subpath = os.path.join(path_item, subitem)
1950
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
1953
register_finder(zipimport.zipimporter, find_eggs_in_zip)
1955
def find_nothing(importer, path_item, only=False):
1957
register_finder(object, find_nothing)
1959
def find_on_path(importer, path_item, only=False):
1960
"""Yield distributions accessible on a sys.path directory"""
1961
path_item = _normalize_cached(path_item)
1963
if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
1964
if _is_unpacked_egg(path_item):
1965
yield Distribution.from_filename(
1966
path_item, metadata=PathMetadata(
1967
path_item, os.path.join(path_item,'EGG-INFO')
1971
# scan for .egg and .egg-info in directory
1972
for entry in os.listdir(path_item):
1973
lower = entry.lower()
1974
if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
1975
fullpath = os.path.join(path_item, entry)
1976
if os.path.isdir(fullpath):
1977
# egg-info directory, allow getting metadata
1978
metadata = PathMetadata(path_item, fullpath)
1980
metadata = FileMetadata(fullpath)
1981
yield Distribution.from_location(
1982
path_item, entry, metadata, precedence=DEVELOP_DIST
1984
elif not only and _is_unpacked_egg(entry):
1985
dists = find_distributions(os.path.join(path_item, entry))
1988
elif not only and lower.endswith('.egg-link'):
1989
with open(os.path.join(path_item, entry)) as entry_file:
1990
entry_lines = entry_file.readlines()
1991
for line in entry_lines:
1992
if not line.strip():
1994
path = os.path.join(path_item, line.rstrip())
1995
dists = find_distributions(path)
1999
register_finder(pkgutil.ImpImporter, find_on_path)
2001
if hasattr(importlib_machinery, 'FileFinder'):
2002
register_finder(importlib_machinery.FileFinder, find_on_path)
2004
_declare_state('dict', _namespace_handlers={})
2005
_declare_state('dict', _namespace_packages={})
2008
def register_namespace_handler(importer_type, namespace_handler):
2009
"""Register `namespace_handler` to declare namespace packages
2011
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2012
handler), and `namespace_handler` is a callable like this::
2014
def namespace_handler(importer, path_entry, moduleName, module):
2015
# return a path_entry to use for child packages
2017
Namespace handlers are only called if the importer object has already
2018
agreed that it can handle the relevant path item, and they should only
2019
return a subpath if the module __path__ does not already contain an
2020
equivalent subpath. For an example namespace handler, see
2021
``pkg_resources.file_ns_handler``.
2023
_namespace_handlers[importer_type] = namespace_handler
2025
def _handle_ns(packageName, path_item):
2026
"""Ensure that named package includes a subpath of path_item (if needed)"""
2028
importer = get_importer(path_item)
2029
if importer is None:
2031
loader = importer.find_module(packageName)
2034
module = sys.modules.get(packageName)
2036
module = sys.modules[packageName] = types.ModuleType(packageName)
2037
module.__path__ = []
2038
_set_parent_ns(packageName)
2039
elif not hasattr(module,'__path__'):
2040
raise TypeError("Not a package:", packageName)
2041
handler = _find_adapter(_namespace_handlers, importer)
2042
subpath = handler(importer, path_item, packageName, module)
2043
if subpath is not None:
2044
path = module.__path__
2045
path.append(subpath)
2046
loader.load_module(packageName)
2047
_rebuild_mod_path(path, packageName, module)
2051
def _rebuild_mod_path(orig_path, package_name, module):
2053
Rebuild module.__path__ ensuring that all entries are ordered
2054
corresponding to their sys.path order
2056
sys_path = [_normalize_cached(p) for p in sys.path]
2057
def position_in_sys_path(path):
2059
Return the ordinal of the path based on its position in sys.path
2061
path_parts = path.split(os.sep)
2062
module_parts = package_name.count('.') + 1
2063
parts = path_parts[:-module_parts]
2064
return sys_path.index(_normalize_cached(os.sep.join(parts)))
2066
orig_path.sort(key=position_in_sys_path)
2067
module.__path__[:] = [_normalize_cached(p) for p in orig_path]
2070
def declare_namespace(packageName):
2071
"""Declare that package 'packageName' is a namespace package"""
2075
if packageName in _namespace_packages:
2078
path, parent = sys.path, None
2079
if '.' in packageName:
2080
parent = '.'.join(packageName.split('.')[:-1])
2081
declare_namespace(parent)
2082
if parent not in _namespace_packages:
2085
path = sys.modules[parent].__path__
2086
except AttributeError:
2087
raise TypeError("Not a package:", parent)
2089
# Track what packages are namespaces, so when new path items are added,
2090
# they can be updated
2091
_namespace_packages.setdefault(parent,[]).append(packageName)
2092
_namespace_packages.setdefault(packageName,[])
2094
for path_item in path:
2095
# Ensure all the parent's path items are reflected in the child,
2097
_handle_ns(packageName, path_item)
2102
def fixup_namespace_packages(path_item, parent=None):
2103
"""Ensure that previously-declared namespace packages include path_item"""
2106
for package in _namespace_packages.get(parent,()):
2107
subpath = _handle_ns(package, path_item)
2109
fixup_namespace_packages(subpath, package)
2113
def file_ns_handler(importer, path_item, packageName, module):
2114
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
2116
subpath = os.path.join(path_item, packageName.split('.')[-1])
2117
normalized = _normalize_cached(subpath)
2118
for item in module.__path__:
2119
if _normalize_cached(item)==normalized:
2122
# Only return the path if it's not already there
2125
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2126
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2128
if hasattr(importlib_machinery, 'FileFinder'):
2129
register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2132
def null_ns_handler(importer, path_item, packageName, module):
2135
register_namespace_handler(object, null_ns_handler)
2138
def normalize_path(filename):
2139
"""Normalize a file/dir name for comparison purposes"""
2140
return os.path.normcase(os.path.realpath(filename))
2142
def _normalize_cached(filename, _cache={}):
2144
return _cache[filename]
2146
_cache[filename] = result = normalize_path(filename)
2149
def _is_unpacked_egg(path):
2151
Determine if given path appears to be an unpacked egg.
2154
path.lower().endswith('.egg')
2157
def _set_parent_ns(packageName):
2158
parts = packageName.split('.')
2161
parent = '.'.join(parts)
2162
setattr(sys.modules[parent], name, sys.modules[packageName])
2165
def yield_lines(strs):
2166
"""Yield non-empty/non-comment lines of a string or sequence"""
2167
if isinstance(strs, six.string_types):
2168
for s in strs.splitlines():
2170
# skip blank lines/comments
2171
if s and not s.startswith('#'):
2175
for s in yield_lines(ss):
2178
MODULE = re.compile(r"\w+(\.\w+)*$").match
2179
EGG_NAME = re.compile(
2183
-py(?P<pyver>[^-]+) (
2189
re.VERBOSE | re.IGNORECASE,
2193
class EntryPoint(object):
2194
"""Object representing an advertised importable object"""
2196
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2197
if not MODULE(module_name):
2198
raise ValueError("Invalid module name", module_name)
2200
self.module_name = module_name
2201
self.attrs = tuple(attrs)
2202
self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
2206
s = "%s = %s" % (self.name, self.module_name)
2208
s += ':' + '.'.join(self.attrs)
2210
s += ' [%s]' % ','.join(self.extras)
2214
return "EntryPoint.parse(%r)" % str(self)
2216
def load(self, require=True, *args, **kwargs):
2218
Require packages for this EntryPoint, then resolve it.
2220
if not require or args or kwargs:
2222
"Parameters to load are deprecated. Call .resolve and "
2223
".require separately.",
2228
self.require(*args, **kwargs)
2229
return self.resolve()
2233
Resolve the entry point from its module and attrs.
2235
module = __import__(self.module_name, fromlist=['__name__'], level=0)
2237
return functools.reduce(getattr, self.attrs, module)
2238
except AttributeError as exc:
2239
raise ImportError(str(exc))
2241
def require(self, env=None, installer=None):
2242
if self.extras and not self.dist:
2243
raise UnknownExtra("Can't require() without a distribution", self)
2244
reqs = self.dist.requires(self.extras)
2245
items = working_set.resolve(reqs, env, installer)
2246
list(map(working_set.add, items))
2248
pattern = re.compile(
2252
r'(?P<module>[\w.]+)\s*'
2253
r'(:\s*(?P<attr>[\w.]+))?\s*'
2254
r'(?P<extras>\[.*\])?\s*$'
2258
def parse(cls, src, dist=None):
2259
"""Parse a single entry point from string `src`
2261
Entry point syntax follows the form::
2263
name = some.module:some.attr [extra1, extra2]
2265
The entry name and module name are required, but the ``:attrs`` and
2266
``[extras]`` parts are optional
2268
m = cls.pattern.match(src)
2270
msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2271
raise ValueError(msg, src)
2273
extras = cls._parse_extras(res['extras'])
2274
attrs = res['attr'].split('.') if res['attr'] else ()
2275
return cls(res['name'], res['module'], attrs, extras, dist)
2278
def _parse_extras(cls, extras_spec):
2281
req = Requirement.parse('x' + extras_spec)
2287
def parse_group(cls, group, lines, dist=None):
2288
"""Parse an entry point group"""
2289
if not MODULE(group):
2290
raise ValueError("Invalid group name", group)
2292
for line in yield_lines(lines):
2293
ep = cls.parse(line, dist)
2295
raise ValueError("Duplicate entry point", group, ep.name)
2300
def parse_map(cls, data, dist=None):
2301
"""Parse a map of entry point groups"""
2302
if isinstance(data, dict):
2305
data = split_sections(data)
2307
for group, lines in data:
2311
raise ValueError("Entry points must be listed in groups")
2312
group = group.strip()
2314
raise ValueError("Duplicate group name", group)
2315
maps[group] = cls.parse_group(group, lines, dist)
2319
def _remove_md5_fragment(location):
2322
parsed = urllib.parse.urlparse(location)
2323
if parsed[-1].startswith('md5='):
2324
return urllib.parse.urlunparse(parsed[:-1] + ('',))
2328
def _version_from_file(lines):
2330
Given an iterable of lines from a Metadata file, return
2331
the value of the Version field, if present, or None otherwise.
2333
is_version_line = lambda line: line.lower().startswith('version:')
2334
version_lines = filter(is_version_line, lines)
2335
line = next(iter(version_lines), '')
2336
_, _, value = line.partition(':')
2337
return safe_version(value.strip()) or None
2340
class Distribution(object):
2341
"""Wrap an actual or potential sys.path entry w/metadata"""
2342
PKG_INFO = 'PKG-INFO'
2344
def __init__(self, location=None, metadata=None, project_name=None,
2345
version=None, py_version=PY_MAJOR, platform=None,
2346
precedence=EGG_DIST):
2347
self.project_name = safe_name(project_name or 'Unknown')
2348
if version is not None:
2349
self._version = safe_version(version)
2350
self.py_version = py_version
2351
self.platform = platform
2352
self.location = location
2353
self.precedence = precedence
2354
self._provider = metadata or empty_provider
2357
def from_location(cls, location, basename, metadata=None, **kw):
2358
project_name, version, py_version, platform = [None]*4
2359
basename, ext = os.path.splitext(basename)
2360
if ext.lower() in _distributionImpl:
2361
cls = _distributionImpl[ext.lower()]
2363
match = EGG_NAME(basename)
2365
project_name, version, py_version, platform = match.group(
2366
'name', 'ver', 'pyver', 'plat'
2369
location, metadata, project_name=project_name, version=version,
2370
py_version=py_version, platform=platform, **kw
2373
def _reload_version(self):
2379
self.parsed_version,
2382
_remove_md5_fragment(self.location),
2383
self.py_version or '',
2384
self.platform or '',
2388
return hash(self.hashcmp)
2390
def __lt__(self, other):
2391
return self.hashcmp < other.hashcmp
2393
def __le__(self, other):
2394
return self.hashcmp <= other.hashcmp
2396
def __gt__(self, other):
2397
return self.hashcmp > other.hashcmp
2399
def __ge__(self, other):
2400
return self.hashcmp >= other.hashcmp
2402
def __eq__(self, other):
2403
if not isinstance(other, self.__class__):
2404
# It's not a Distribution, so they are not equal
2406
return self.hashcmp == other.hashcmp
2408
def __ne__(self, other):
2409
return not self == other
2411
# These properties have to be lazy so that we don't have to load any
2412
# metadata until/unless it's actually needed. (i.e., some distributions
2413
# may not know their name or version without loading PKG-INFO)
2419
except AttributeError:
2420
self._key = key = self.project_name.lower()
2424
def parsed_version(self):
2425
if not hasattr(self, "_parsed_version"):
2426
self._parsed_version = parse_version(self.version)
2428
return self._parsed_version
2430
def _warn_legacy_version(self):
2431
LV = packaging.version.LegacyVersion
2432
is_legacy = isinstance(self._parsed_version, LV)
2436
# While an empty version is technically a legacy version and
2437
# is not a valid PEP 440 version, it's also unlikely to
2438
# actually come from someone and instead it is more likely that
2439
# it comes from setuptools attempting to parse a filename and
2440
# including it in the list. So for that we'll gate this warning
2441
# on if the version is anything at all or not.
2442
if not self.version:
2445
tmpl = textwrap.dedent("""
2446
'{project_name} ({version})' is being parsed as a legacy,
2448
version. You may find odd behavior and sort order.
2449
In particular it will be sorted as less than 0.0. It
2450
is recommended to migrate to PEP 440 compatible
2452
""").strip().replace('\n', ' ')
2454
warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2459
return self._version
2460
except AttributeError:
2461
version = _version_from_file(self._get_metadata(self.PKG_INFO))
2463
tmpl = "Missing 'Version:' header and/or %s file"
2464
raise ValueError(tmpl % self.PKG_INFO, self)
2470
return self.__dep_map
2471
except AttributeError:
2472
dm = self.__dep_map = {None: []}
2473
for name in 'requires.txt', 'depends.txt':
2474
for extra, reqs in split_sections(self._get_metadata(name)):
2477
extra, marker = extra.split(':', 1)
2478
if invalid_marker(marker):
2481
elif not evaluate_marker(marker):
2483
extra = safe_extra(extra) or None
2484
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2487
def requires(self, extras=()):
2488
"""List of Requirements needed for this distro if `extras` are used"""
2491
deps.extend(dm.get(None, ()))
2494
deps.extend(dm[safe_extra(ext)])
2497
"%s has no such extra feature %r" % (self, ext)
2501
def _get_metadata(self, name):
2502
if self.has_metadata(name):
2503
for line in self.get_metadata_lines(name):
2506
def activate(self, path=None):
2507
"""Ensure distribution is importable on `path` (default=sys.path)"""
2510
self.insert_on(path, replace=True)
2511
if path is sys.path:
2512
fixup_namespace_packages(self.location)
2513
for pkg in self._get_metadata('namespace_packages.txt'):
2514
if pkg in sys.modules:
2515
declare_namespace(pkg)
2518
"""Return what this distribution's standard .egg filename should be"""
2519
filename = "%s-%s-py%s" % (
2520
to_filename(self.project_name), to_filename(self.version),
2521
self.py_version or PY_MAJOR
2525
filename += '-' + self.platform
2530
return "%s (%s)" % (self, self.location)
2536
version = getattr(self, 'version', None)
2539
version = version or "[unknown version]"
2540
return "%s %s" % (self.project_name, version)
2542
def __getattr__(self, attr):
2543
"""Delegate all unrecognized public attributes to .metadata provider"""
2544
if attr.startswith('_'):
2545
raise AttributeError(attr)
2546
return getattr(self._provider, attr)
2549
def from_filename(cls, filename, metadata=None, **kw):
2550
return cls.from_location(
2551
_normalize_cached(filename), os.path.basename(filename), metadata,
2555
def as_requirement(self):
2556
"""Return a ``Requirement`` that matches this distribution exactly"""
2557
if isinstance(self.parsed_version, packaging.version.Version):
2558
spec = "%s==%s" % (self.project_name, self.parsed_version)
2560
spec = "%s===%s" % (self.project_name, self.parsed_version)
2562
return Requirement.parse(spec)
2564
def load_entry_point(self, group, name):
2565
"""Return the `name` entry point of `group` or raise ImportError"""
2566
ep = self.get_entry_info(group, name)
2568
raise ImportError("Entry point %r not found" % ((group, name),))
2571
def get_entry_map(self, group=None):
2572
"""Return the entry point map for `group`, or the full entry map"""
2574
ep_map = self._ep_map
2575
except AttributeError:
2576
ep_map = self._ep_map = EntryPoint.parse_map(
2577
self._get_metadata('entry_points.txt'), self
2579
if group is not None:
2580
return ep_map.get(group,{})
2583
def get_entry_info(self, group, name):
2584
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
2585
return self.get_entry_map(group).get(name)
2587
def insert_on(self, path, loc=None, replace=False):
2588
"""Insert self.location in path before its nearest parent directory"""
2590
loc = loc or self.location
2594
nloc = _normalize_cached(loc)
2595
bdir = os.path.dirname(nloc)
2596
npath= [(p and _normalize_cached(p) or p) for p in path]
2598
for p, item in enumerate(npath):
2601
elif item == bdir and self.precedence == EGG_DIST:
2602
# if it's an .egg, give it precedence over its directory
2603
if path is sys.path:
2604
self.check_version_conflict()
2606
npath.insert(p, nloc)
2609
if path is sys.path:
2610
self.check_version_conflict()
2617
# p is the spot where we found or inserted loc; now remove duplicates
2620
np = npath.index(nloc, p+1)
2624
del npath[np], path[np]
2630
def check_version_conflict(self):
2631
if self.key == 'setuptools':
2632
# ignore the inevitable setuptools self-conflicts :(
2635
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2636
loc = normalize_path(self.location)
2637
for modname in self._get_metadata('top_level.txt'):
2638
if (modname not in sys.modules or modname in nsp
2639
or modname in _namespace_packages):
2641
if modname in ('pkg_resources', 'setuptools', 'site'):
2643
fn = getattr(sys.modules[modname], '__file__', None)
2644
if fn and (normalize_path(fn).startswith(loc) or
2645
fn.startswith(self.location)):
2648
"Module %s was already imported from %s, but %s is being added"
2649
" to sys.path" % (modname, fn, self.location),
2652
def has_version(self):
2656
issue_warning("Unbuilt egg for " + repr(self))
2660
def clone(self,**kw):
2661
"""Copy this distribution, substituting in any changed keyword args"""
2662
names = 'project_name version py_version platform location precedence'
2663
for attr in names.split():
2664
kw.setdefault(attr, getattr(self, attr, None))
2665
kw.setdefault('metadata', self._provider)
2666
return self.__class__(**kw)
2670
return [dep for dep in self._dep_map if dep]
2673
class EggInfoDistribution(Distribution):
2675
def _reload_version(self):
2677
Packages installed by distutils (e.g. numpy or scipy),
2678
which uses an old safe_version, and so
2679
their version numbers can get mangled when
2680
converted to filenames (e.g., 1.11.0.dev0+2329eae to
2681
1.11.0.dev0_2329eae). These distributions will not be
2683
downstream by Distribution and safe_version, so
2684
take an extra step and try to get the version number from
2685
the metadata file itself instead of the filename.
2687
md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
2689
self._version = md_version
2693
class DistInfoDistribution(Distribution):
2694
"""Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
2695
PKG_INFO = 'METADATA'
2696
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2699
def _parsed_pkg_info(self):
2700
"""Parse and cache metadata"""
2702
return self._pkg_info
2703
except AttributeError:
2704
metadata = self.get_metadata(self.PKG_INFO)
2705
self._pkg_info = email.parser.Parser().parsestr(metadata)
2706
return self._pkg_info
2711
return self.__dep_map
2712
except AttributeError:
2713
self.__dep_map = self._compute_dependencies()
2714
return self.__dep_map
2716
def _compute_dependencies(self):
2717
"""Recompute this distribution's dependencies."""
2718
dm = self.__dep_map = {None: []}
2721
# Including any condition expressions
2722
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2723
reqs.extend(parse_requirements(req))
2725
def reqs_for_extra(extra):
2727
if not req.marker or req.marker.evaluate({'extra': extra}):
2730
common = frozenset(reqs_for_extra(None))
2731
dm[None].extend(common)
2733
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2734
extra = safe_extra(extra.strip())
2735
dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
2740
_distributionImpl = {
2741
'.egg': Distribution,
2742
'.egg-info': EggInfoDistribution,
2743
'.dist-info': DistInfoDistribution,
2747
def issue_warning(*args,**kw):
2751
# find the first stack frame that is *not* code in
2752
# the pkg_resources module, to use for the warning
2753
while sys._getframe(level).f_globals is g:
2757
warnings.warn(stacklevel=level + 1, *args, **kw)
2760
class RequirementParseError(ValueError):
2762
return ' '.join(self.args)
2765
def parse_requirements(strs):
2766
"""Yield ``Requirement`` objects for each specification in `strs`
2768
`strs` must be a string, or a (possibly-nested) iterable thereof.
2770
# create a steppable iterator, so we can handle \-continuations
2771
lines = iter(yield_lines(strs))
2774
# Drop comments -- a hash without a space may be in a URL.
2776
line = line[:line.find(' #')]
2777
# If there is a line continuation, drop it, and append the next line.
2778
if line.endswith('\\'):
2779
line = line[:-2].strip()
2781
yield Requirement(line)
2784
class Requirement(packaging.requirements.Requirement):
2785
def __init__(self, requirement_string):
2786
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2788
super(Requirement, self).__init__(requirement_string)
2789
except packaging.requirements.InvalidRequirement as e:
2790
raise RequirementParseError(str(e))
2791
self.unsafe_name = self.name
2792
project_name = safe_name(self.name)
2793
self.project_name, self.key = project_name, project_name.lower()
2795
(spec.operator, spec.version) for spec in self.specifier]
2796
self.extras = tuple(map(safe_extra, self.extras))
2800
frozenset(self.extras),
2801
str(self.marker) if self.marker else None,
2803
self.__hash = hash(self.hashCmp)
2805
def __eq__(self, other):
2807
isinstance(other, Requirement) and
2808
self.hashCmp == other.hashCmp
2811
def __ne__(self, other):
2812
return not self == other
2814
def __contains__(self, item):
2815
if isinstance(item, Distribution):
2816
if item.key != self.key:
2821
# Allow prereleases always in order to match the previous behavior of
2822
# this method. In the future this should be smarter and follow PEP 440
2824
return self.specifier.contains(item, prereleases=True)
2829
def __repr__(self): return "Requirement.parse(%r)" % str(self)
2833
req, = parse_requirements(s)
2838
"""Get an mro for a type or classic class"""
2839
if not isinstance(cls, type):
2840
class cls(cls, object): pass
2841
return cls.__mro__[1:]
2844
def _find_adapter(registry, ob):
2845
"""Return an adapter factory for `ob` from `registry`"""
2846
for t in _get_mro(getattr(ob, '__class__', type(ob))):
2851
def ensure_directory(path):
2852
"""Ensure that the parent directory of `path` exists"""
2853
dirname = os.path.dirname(path)
2854
if not os.path.isdir(dirname):
2855
os.makedirs(dirname)
2858
def _bypass_ensure_directory(path):
2859
"""Sandbox-bypassing version of ensure_directory()"""
2860
if not WRITE_SUPPORT:
2861
raise IOError('"os.mkdir" not supported on this platform.')
2862
dirname, filename = split(path)
2863
if dirname and filename and not isdir(dirname):
2864
_bypass_ensure_directory(dirname)
2865
mkdir(dirname, 0o755)
2868
def split_sections(s):
2869
"""Split a string or iterable thereof into (section, content) pairs
2871
Each ``section`` is a stripped version of the section header ("[section]")
2872
and each ``content`` is a list of stripped lines excluding blank lines and
2873
comment-only lines. If there are any such lines before the first section
2874
header, they're returned in a first ``section`` of ``None``.
2878
for line in yield_lines(s):
2879
if line.startswith("["):
2880
if line.endswith("]"):
2881
if section or content:
2882
yield section, content
2883
section = line[1:-1].strip()
2886
raise ValueError("Invalid section heading", line)
2888
content.append(line)
2890
# wrap up last segment
2891
yield section, content
2893
def _mkstemp(*args,**kw):
2896
# temporarily bypass sandboxing
2898
return tempfile.mkstemp(*args,**kw)
2900
# and then put it back
2904
# Silence the PEP440Warning by default, so that end users don't get hit by it
2905
# randomly just because they use pkg_resources. We want to append the rule
2906
# because we want earlier uses of filterwarnings to take precedence over this
2908
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
2911
# from jaraco.functools 1.3
2912
def _call_aside(f, *args, **kwargs):
2918
def _initialize(g=globals()):
2919
"Set up global resource manager (deliberately not state-saved)"
2920
manager = ResourceManager()
2921
g['_manager'] = manager
2922
for name in dir(manager):
2923
if not name.startswith('_'):
2924
g[name] = getattr(manager, name)
2928
def _initialize_master_working_set():
2930
Prepare the master working set and make the ``require()``
2933
This function has explicit effects on the global state
2934
of pkg_resources. It is intended to be invoked once at
2935
the initialization of this module.
2937
Invocation by other packages is unsupported and done
2940
working_set = WorkingSet._build_master()
2941
_declare_state('object', working_set=working_set)
2943
require = working_set.require
2944
iter_entry_points = working_set.iter_entry_points
2945
add_activation_listener = working_set.subscribe
2946
run_script = working_set.run_script
2947
# backward compatibility
2948
run_main = run_script
2949
# Activate all distributions already on sys.path, and ensure that
2950
# all distributions added to the working set in the future (e.g. by
2951
# calling ``require()``) will get activated as well.
2952
add_activation_listener(lambda dist: dist.activate())
2953
working_set.entries=[]
2955
list(map(working_set.add_entry, sys.path))
2956
globals().update(locals())