1
"""Package resource API
4
A resource is a logical file contained within a package, or a logical
5
subdirectory thereof. The package resource API expects resource names
6
to have their path parts separated with ``/``, *not* whatever the local
7
path separator is. Do not use os.path operations to manipulate resource
8
names being passed into the API.
10
The package resource API is designed to work with normal filesystem packages,
11
.egg files, and unpacked .egg files. It can also work in a limited way with
12
.zip files and with custom PEP 302 loaders that support the ``get_data()``
16
import sys, os, zipimport, time, re, imp, new
18
# Fix for Python 2.6 deprecation warning
20
ImmutableSet = frozenset
22
from sets import ImmutableSet
24
from os import utime, rename, unlink # capture these to bypass sandboxing
25
from os import open as os_open
27
def get_supported_platform():
28
"""Return this platform's maximum compatible version.
30
distutils.util.get_platform() normally reports the minimum version
31
of Mac OS X that would be required to *use* extensions produced by
32
distutils. But what we want when checking compatibility is to know the
33
version of Mac OS X that we are *running*. To allow usage of packages that
34
explicitly require a newer version of Mac OS X, we must also know the
35
current version of the OS.
37
If this condition occurs for any other platform with a version in its
38
platform strings, this function should be extended accordingly.
40
plat = get_build_platform(); m = macosVersionString.match(plat)
41
if m is not None and sys.platform == "darwin":
43
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
49
# Basic resource access and distribution/entry point discovery
50
'require', 'run_script', 'get_provider', 'get_distribution',
51
'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
52
'resource_string', 'resource_stream', 'resource_filename',
53
'resource_listdir', 'resource_exists', 'resource_isdir',
55
# Environmental control
56
'declare_namespace', 'working_set', 'add_activation_listener',
57
'find_distributions', 'set_extraction_path', 'cleanup_resources',
60
# Primary implementation classes
61
'Environment', 'WorkingSet', 'ResourceManager',
62
'Distribution', 'Requirement', 'EntryPoint',
65
'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
68
# Parsing functions and string utilities
69
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
70
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
71
'safe_extra', 'to_filename',
73
# filesystem utilities
74
'ensure_directory', 'normalize_path',
76
# Distribution "precedence" constants
77
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
79
# "Provider" interfaces, implementations, and registration/lookup APIs
80
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
81
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
82
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
83
'register_finder', 'register_namespace_handler', 'register_loader_type',
84
'fixup_namespace_packages', 'get_importer',
86
# Deprecated/backward compatibility only
87
'run_main', 'AvailableDistributions',
89
class ResolutionError(Exception):
90
"""Abstract base for dependency resolution errors"""
91
def __repr__(self): return self.__class__.__name__+repr(self.args)
93
class VersionConflict(ResolutionError):
94
"""An already-installed version conflicts with the requested version"""
96
class DistributionNotFound(ResolutionError):
97
"""A requested distribution was not found"""
99
class UnknownExtra(ResolutionError):
100
"""Distribution doesn't have an "extra feature" of the given name"""
101
_provider_factories = {}
102
PY_MAJOR = sys.version[:3]
109
def register_loader_type(loader_type, provider_factory):
110
"""Register `provider_factory` to make providers for `loader_type`
112
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
113
and `provider_factory` is a function that, passed a *module* object,
114
returns an ``IResourceProvider`` for that module.
116
_provider_factories[loader_type] = provider_factory
118
def get_provider(moduleOrReq):
119
"""Return an IResourceProvider for the named module or requirement"""
120
if isinstance(moduleOrReq,Requirement):
121
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
123
module = sys.modules[moduleOrReq]
125
__import__(moduleOrReq)
126
module = sys.modules[moduleOrReq]
127
loader = getattr(module, '__loader__', None)
128
return _find_adapter(_provider_factories, loader)(module)
130
def _macosx_vers(_cache=[]):
132
info = os.popen('/usr/bin/sw_vers').read().splitlines()
134
key, value = line.split(None, 1)
135
if key == 'ProductVersion:':
136
_cache.append(value.strip().split("."))
139
raise ValueError, "What?!"
142
def _macosx_arch(machine):
143
return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
145
def get_build_platform():
146
"""Return this platform's string for platform-specific distributions
148
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
149
needs some hacks for Linux and Mac OS X.
151
from distutils.util import get_platform
152
plat = get_platform()
153
if sys.platform == "darwin" and not plat.startswith('macosx-'):
155
version = _macosx_vers()
156
machine = os.uname()[4].replace(" ", "_")
157
return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
158
_macosx_arch(machine))
160
# if someone is running a non-Mac darwin system, this will fall
161
# through to the default implementation
165
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
166
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
167
get_platform = get_build_platform # XXX backward compat
171
def compatible_platforms(provided,required):
172
"""Can code for the `provided` platform run on the `required` platform?
174
Returns true if either platform is ``None``, or the platforms are equal.
176
XXX Needs compatibility checks for Linux and other unixy OSes.
178
if provided is None or required is None or provided==required:
179
return True # easy case
181
# Mac OS X special cases
182
reqMac = macosVersionString.match(required)
184
provMac = macosVersionString.match(provided)
186
# is this a Mac package?
188
# this is backwards compatibility for packages built before
189
# setuptools 0.6. All packages built after this point will
190
# use the new macosx designation.
191
provDarwin = darwinVersionString.match(provided)
193
dversion = int(provDarwin.group(1))
194
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
195
if dversion == 7 and macosversion >= "10.3" or \
196
dversion == 8 and macosversion >= "10.4":
199
#warnings.warn("Mac eggs should be rebuilt to "
200
# "use the macosx designation instead of darwin.",
201
# category=DeprecationWarning)
203
return False # egg isn't macosx or legacy darwin
205
# are they the same major version and machine type?
206
if provMac.group(1) != reqMac.group(1) or \
207
provMac.group(3) != reqMac.group(3):
212
# is the required OS major update >= the provided one?
213
if int(provMac.group(2)) > int(reqMac.group(2)):
218
# XXX Linux and other platforms' special cases should go here
222
def run_script(dist_spec, script_name):
223
"""Locate distribution `dist_spec` and run its `script_name` script"""
224
ns = sys._getframe(1).f_globals
225
name = ns['__name__']
227
ns['__name__'] = name
228
require(dist_spec)[0].run_script(script_name, ns)
230
run_main = run_script # backward compatibility
232
def get_distribution(dist):
233
"""Return a current distribution object for a Requirement or string"""
234
if isinstance(dist,basestring): dist = Requirement.parse(dist)
235
if isinstance(dist,Requirement): dist = get_provider(dist)
236
if not isinstance(dist,Distribution):
237
raise TypeError("Expected string, Requirement, or Distribution", dist)
240
def load_entry_point(dist, group, name):
241
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
242
return get_distribution(dist).load_entry_point(group, name)
244
def get_entry_map(dist, group=None):
245
"""Return the entry point map for `group`, or the full entry map"""
246
return get_distribution(dist).get_entry_map(group)
248
def get_entry_info(dist, group, name):
249
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
250
return get_distribution(dist).get_entry_info(group, name)
253
class IMetadataProvider:
255
def has_metadata(name):
256
"""Does the package's distribution contain the named metadata?"""
258
def get_metadata(name):
259
"""The named metadata resource as a string"""
261
def get_metadata_lines(name):
262
"""Yield named metadata resource as list of non-blank non-comment lines
264
Leading and trailing whitespace is stripped from each line, and lines
265
with ``#`` as the first non-blank character are omitted."""
267
def metadata_isdir(name):
268
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
270
def metadata_listdir(name):
271
"""List of metadata names in the directory (like ``os.listdir()``)"""
273
def run_script(script_name, namespace):
274
"""Execute the named script in the supplied namespace dictionary"""
294
class IResourceProvider(IMetadataProvider):
295
"""An object that provides access to package resources"""
297
def get_resource_filename(manager, resource_name):
298
"""Return a true filesystem path for `resource_name`
300
`manager` must be an ``IResourceManager``"""
302
def get_resource_stream(manager, resource_name):
303
"""Return a readable file-like object for `resource_name`
305
`manager` must be an ``IResourceManager``"""
307
def get_resource_string(manager, resource_name):
308
"""Return a string containing the contents of `resource_name`
310
`manager` must be an ``IResourceManager``"""
312
def has_resource(resource_name):
313
"""Does the package contain the named resource?"""
315
def resource_isdir(resource_name):
316
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
318
def resource_listdir(resource_name):
319
"""List of resource names in the directory (like ``os.listdir()``)"""
335
class WorkingSet(object):
336
"""A collection of active distributions on sys.path (or a similar list)"""
338
def __init__(self, entries=None):
339
"""Create working set from list of path entries (default=sys.path)"""
348
for entry in entries:
349
self.add_entry(entry)
352
def add_entry(self, entry):
353
"""Add a path item to ``.entries``, finding any distributions on it
355
``find_distributions(entry,False)`` is used to find distributions
356
corresponding to the path entry, and they are added. `entry` is
357
always appended to ``.entries``, even if it is already present.
358
(This is because ``sys.path`` can contain the same value more than
359
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
362
self.entry_keys.setdefault(entry, [])
363
self.entries.append(entry)
364
for dist in find_distributions(entry, True):
365
self.add(dist, entry, False)
368
def __contains__(self,dist):
369
"""True if `dist` is the active distribution for its project"""
370
return self.by_key.get(dist.key) == dist
377
"""Find a distribution matching requirement `req`
379
If there is an active distribution for the requested project, this
380
returns it as long as it meets the version requirement specified by
381
`req`. But, if there is an active distribution for the project and it
382
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
383
If there is no active distribution for the requested project, ``None``
386
dist = self.by_key.get(req.key)
387
if dist is not None and dist not in req:
388
raise VersionConflict(dist,req) # XXX add more info
392
def iter_entry_points(self, group, name=None):
393
"""Yield entry point objects from `group` matching `name`
395
If `name` is None, yields all entry points in `group` from all
396
distributions in the working set, otherwise only ones matching
397
both `group` and `name` are yielded (in distribution order).
400
entries = dist.get_entry_map(group)
402
for ep in entries.values():
404
elif name in entries:
407
def run_script(self, requires, script_name):
408
"""Locate distribution for `requires` and run `script_name` script"""
409
ns = sys._getframe(1).f_globals
410
name = ns['__name__']
412
ns['__name__'] = name
413
self.require(requires)[0].run_script(script_name, ns)
418
"""Yield distributions for non-duplicate projects in the working set
420
The yield order is the order in which the items' path entries were
421
added to the working set.
424
for item in self.entries:
425
for key in self.entry_keys[item]:
428
yield self.by_key[key]
430
def add(self, dist, entry=None, insert=True):
431
"""Add `dist` to working set, associated with `entry`
433
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
434
On exit from this routine, `entry` is added to the end of the working
435
set's ``.entries`` (if it wasn't already present).
437
`dist` is only added to the working set if it's for a project that
438
doesn't already have a distribution in the set. If it's added, any
439
callbacks registered with the ``subscribe()`` method will be called.
442
dist.insert_on(self.entries, entry)
445
entry = dist.location
446
keys = self.entry_keys.setdefault(entry,[])
447
keys2 = self.entry_keys.setdefault(dist.location,[])
448
if dist.key in self.by_key:
449
return # ignore hidden distros
451
self.by_key[dist.key] = dist
452
if dist.key not in keys:
453
keys.append(dist.key)
454
if dist.key not in keys2:
455
keys2.append(dist.key)
456
self._added_new(dist)
458
def resolve(self, requirements, env=None, installer=None):
459
"""List all distributions needed to (recursively) meet `requirements`
461
`requirements` must be a sequence of ``Requirement`` objects. `env`,
462
if supplied, should be an ``Environment`` instance. If
463
not supplied, it defaults to all distributions available within any
464
entry or distribution in the working set. `installer`, if supplied,
465
will be invoked with each requirement that cannot be met by an
466
already-installed distribution; it should return a ``Distribution`` or
470
requirements = list(requirements)[::-1] # set up the stack
471
processed = {} # set of processed requirements
472
best = {} # key -> dist
476
req = requirements.pop(0) # process dependencies breadth-first
478
# Ignore cyclic or redundant dependencies
480
dist = best.get(req.key)
482
# Find the best distribution and add it to the map
483
dist = self.by_key.get(req.key)
486
env = Environment(self.entries)
487
dist = best[req.key] = env.best_match(req, self, installer)
489
raise DistributionNotFound(req) # XXX put more info here
490
to_activate.append(dist)
492
# Oops, the "best" so far conflicts with a dependency
493
raise VersionConflict(dist,req) # XXX put more info here
494
requirements.extend(dist.requires(req.extras)[::-1])
495
processed[req] = True
497
return to_activate # return list of distros to activate
499
def find_plugins(self,
500
plugin_env, full_env=None, installer=None, fallback=True
502
"""Find all activatable distributions in `plugin_env`
506
distributions, errors = working_set.find_plugins(
507
Environment(plugin_dirlist)
509
map(working_set.add, distributions) # add plugins+libs to sys.path
510
print "Couldn't load", errors # display errors
512
The `plugin_env` should be an ``Environment`` instance that contains
513
only distributions that are in the project's "plugin directory" or
514
directories. The `full_env`, if supplied, should be an ``Environment``
515
contains all currently-available distributions. If `full_env` is not
516
supplied, one is created automatically from the ``WorkingSet`` this
517
method is called on, which will typically mean that every directory on
518
``sys.path`` will be scanned for distributions.
520
`installer` is a standard installer callback as used by the
521
``resolve()`` method. The `fallback` flag indicates whether we should
522
attempt to resolve older versions of a plugin if the newest version
525
This method returns a 2-tuple: (`distributions`, `error_info`), where
526
`distributions` is a list of the distributions found in `plugin_env`
527
that were loadable, along with any other distributions that are needed
528
to resolve their dependencies. `error_info` is a dictionary mapping
529
unloadable plugin distributions to an exception instance describing the
530
error that occurred. Usually this will be a ``DistributionNotFound`` or
531
``VersionConflict`` instance.
534
plugin_projects = list(plugin_env)
535
plugin_projects.sort() # scan project names in alphabetic order
541
env = Environment(self.entries)
544
env = full_env + plugin_env
546
shadow_set = self.__class__([])
547
map(shadow_set.add, self) # put all our entries in shadow_set
549
for project_name in plugin_projects:
551
for dist in plugin_env[project_name]:
553
req = [dist.as_requirement()]
556
resolvees = shadow_set.resolve(req, env, installer)
558
except ResolutionError,v:
559
error_info[dist] = v # save error info
561
continue # try the next older version of project
563
break # give up on this project, keep going
566
map(shadow_set.add, resolvees)
567
distributions.update(dict.fromkeys(resolvees))
569
# success, no need to try any more versions of this project
572
distributions = list(distributions)
575
return distributions, error_info
581
def require(self, *requirements):
582
"""Ensure that distributions matching `requirements` are activated
584
`requirements` must be a string or a (possibly-nested) sequence
585
thereof, specifying the distributions and versions required. The
586
return value is a sequence of the distributions that needed to be
587
activated to fulfill the requirements; all relevant distributions are
588
included, even if they were already activated in this working set.
591
needed = self.resolve(parse_requirements(requirements))
599
def subscribe(self, callback):
600
"""Invoke `callback` for all distributions (including existing ones)"""
601
if callback in self.callbacks:
603
self.callbacks.append(callback)
608
def _added_new(self, dist):
609
for callback in self.callbacks:
622
class Environment(object):
623
"""Searchable snapshot of distributions on a search path"""
625
def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
626
"""Snapshot distributions available on a search path
628
Any distributions found on `search_path` are added to the environment.
629
`search_path` should be a sequence of ``sys.path`` items. If not
630
supplied, ``sys.path`` is used.
632
`platform` is an optional string specifying the name of the platform
633
that platform-specific distributions must be compatible with. If
634
unspecified, it defaults to the current platform. `python` is an
635
optional string naming the desired version of Python (e.g. ``'2.4'``);
636
it defaults to the current version.
638
You may explicitly set `platform` (and/or `python`) to ``None`` if you
639
wish to map *all* distributions, not just those compatible with the
640
running platform or Python version.
644
self.platform = platform
646
self.scan(search_path)
648
def can_add(self, dist):
649
"""Is distribution `dist` acceptable for this environment?
651
The distribution must match the platform and python version
652
requirements specified when this environment was created, or False
655
return (self.python is None or dist.py_version is None
656
or dist.py_version==self.python) \
657
and compatible_platforms(dist.platform,self.platform)
659
def remove(self, dist):
660
"""Remove `dist` from the environment"""
661
self._distmap[dist.key].remove(dist)
663
def scan(self, search_path=None):
664
"""Scan `search_path` for distributions usable in this environment
666
Any distributions found are added to the environment.
667
`search_path` should be a sequence of ``sys.path`` items. If not
668
supplied, ``sys.path`` is used. Only distributions conforming to
669
the platform/python version defined at initialization are added.
671
if search_path is None:
672
search_path = sys.path
674
for item in search_path:
675
for dist in find_distributions(item):
678
def __getitem__(self,project_name):
679
"""Return a newest-to-oldest list of distributions for `project_name`
682
return self._cache[project_name]
684
project_name = project_name.lower()
685
if project_name not in self._distmap:
688
if project_name not in self._cache:
689
dists = self._cache[project_name] = self._distmap[project_name]
692
return self._cache[project_name]
695
"""Add `dist` if we ``can_add()`` it and it isn't already added"""
696
if self.can_add(dist) and dist.has_version():
697
dists = self._distmap.setdefault(dist.key,[])
698
if dist not in dists:
700
if dist.key in self._cache:
701
_sort_dists(self._cache[dist.key])
704
def best_match(self, req, working_set, installer=None):
705
"""Find distribution best matching `req` and usable on `working_set`
707
This calls the ``find(req)`` method of the `working_set` to see if a
708
suitable distribution is already active. (This may raise
709
``VersionConflict`` if an unsuitable version of the project is already
710
active in the specified `working_set`.) If a suitable distribution
711
isn't active, this method returns the newest distribution in the
712
environment that meets the ``Requirement`` in `req`. If no suitable
713
distribution is found, and `installer` is supplied, then the result of
714
calling the environment's ``obtain(req, installer)`` method will be
717
dist = working_set.find(req)
720
for dist in self[req.key]:
723
return self.obtain(req, installer) # try and download/install
725
def obtain(self, requirement, installer=None):
726
"""Obtain a distribution matching `requirement` (e.g. via download)
728
Obtain a distro that matches requirement (e.g. via download). In the
729
base ``Environment`` class, this routine just returns
730
``installer(requirement)``, unless `installer` is None, in which case
731
None is returned instead. This method is a hook that allows subclasses
732
to attempt other ways of obtaining a distribution before falling back
733
to the `installer` argument."""
734
if installer is not None:
735
return installer(requirement)
738
"""Yield the unique project names of the available distributions"""
739
for key in self._distmap.keys():
740
if self[key]: yield key
745
def __iadd__(self, other):
746
"""In-place addition of a distribution or environment"""
747
if isinstance(other,Distribution):
749
elif isinstance(other,Environment):
750
for project in other:
751
for dist in other[project]:
754
raise TypeError("Can't add %r to environment" % (other,))
757
def __add__(self, other):
758
"""Add an environment or distribution to an environment"""
759
new = self.__class__([], platform=None, python=None)
760
for env in self, other:
765
AvailableDistributions = Environment # XXX backward compatibility
768
class ExtractionError(RuntimeError):
769
"""An error occurred extracting a resource
771
The following attributes are available from instances of this exception:
774
The resource manager that raised this exception
777
The base directory for resource extraction
780
The exception instance that caused extraction to fail
786
class ResourceManager:
787
"""Manage resource extraction and packages"""
788
extraction_path = None
791
self.cached_files = {}
793
def resource_exists(self, package_or_requirement, resource_name):
794
"""Does the named resource exist?"""
795
return get_provider(package_or_requirement).has_resource(resource_name)
797
def resource_isdir(self, package_or_requirement, resource_name):
798
"""Is the named resource an existing directory?"""
799
return get_provider(package_or_requirement).resource_isdir(
803
def resource_filename(self, package_or_requirement, resource_name):
804
"""Return a true filesystem path for specified resource"""
805
return get_provider(package_or_requirement).get_resource_filename(
809
def resource_stream(self, package_or_requirement, resource_name):
810
"""Return a readable file-like object for specified resource"""
811
return get_provider(package_or_requirement).get_resource_stream(
815
def resource_string(self, package_or_requirement, resource_name):
816
"""Return specified resource as a string"""
817
return get_provider(package_or_requirement).get_resource_string(
821
def resource_listdir(self, package_or_requirement, resource_name):
822
"""List the contents of the named resource directory"""
823
return get_provider(package_or_requirement).resource_listdir(
827
def extraction_error(self):
828
"""Give an error message for problems extracting file(s)"""
830
old_exc = sys.exc_info()[1]
831
cache_path = self.extraction_path or get_default_cache()
833
err = ExtractionError("""Can't extract file(s) to egg cache
835
The following error occurred while trying to extract file(s) to the Python egg
840
The Python egg cache directory is currently set to:
844
Perhaps your account does not have write access to this directory? You can
845
change the cache directory by setting the PYTHON_EGG_CACHE environment
846
variable to point to an accessible directory.
847
""" % (old_exc, cache_path)
850
err.cache_path = cache_path
851
err.original_error = old_exc
868
def get_cache_path(self, archive_name, names=()):
869
"""Return absolute location in cache for `archive_name` and `names`
871
The parent directory of the resulting path will be created if it does
872
not already exist. `archive_name` should be the base filename of the
873
enclosing egg (which may not be the name of the enclosing zipfile!),
874
including its ".egg" extension. `names`, if provided, should be a
875
sequence of path name parts "under" the egg's extraction location.
877
This method should only be called by resource providers that need to
878
obtain an extraction location, and only for names they intend to
879
extract, as it tracks the generated names for possible cleanup later.
881
extract_path = self.extraction_path or get_default_cache()
882
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
884
ensure_directory(target_path)
886
self.extraction_error()
888
self.cached_files[target_path] = 1
892
def postprocess(self, tempname, filename):
893
"""Perform any platform-specific postprocessing of `tempname`
895
This is where Mac header rewrites should be done; other platforms don't
896
have anything special they should do.
898
Resource providers should call this method ONLY after successfully
899
extracting a compressed resource. They must NOT call it on resources
900
that are already in the filesystem.
902
`tempname` is the current (temporary) name of the file, and `filename`
903
is the name it will be renamed to by the caller after this routine
909
def set_extraction_path(self, path):
910
"""Set the base path where resources will be extracted to, if needed.
912
If you do not call this routine before any extractions take place, the
913
path defaults to the return value of ``get_default_cache()``. (Which
914
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
915
platform-specific fallbacks. See that routine's documentation for more
918
Resources are extracted to subdirectories of this path based upon
919
information given by the ``IResourceProvider``. You may set this to a
920
temporary directory, but then you must call ``cleanup_resources()`` to
921
delete the extracted files when done. There is no guarantee that
922
``cleanup_resources()`` will be able to remove all extracted files.
924
(Note: you may not change the extraction path for a given resource
925
manager once resources have been extracted, unless you first call
926
``cleanup_resources()``.)
928
if self.cached_files:
930
"Can't change extraction path, files already extracted"
933
self.extraction_path = path
935
def cleanup_resources(self, force=False):
937
Delete all extracted resource files and directories, returning a list
938
of the file and directory names that could not be successfully removed.
939
This function does not have any concurrency protection, so it should
940
generally only be called when the extraction path is a temporary
941
directory exclusive to a single process. This method is not
942
automatically called; you must call it explicitly or register it as an
943
``atexit`` function if you wish to ensure cleanup of a temporary
944
directory used for extractions.
950
def get_default_cache():
951
"""Determine the default cache location
953
This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
954
Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
955
"Application Data" directory. On all other systems, it's "~/.python-eggs".
958
return os.environ['PYTHON_EGG_CACHE']
963
return os.path.expanduser('~/.python-eggs')
965
app_data = 'Application Data' # XXX this may be locale-specific!
967
(('APPDATA',), None), # best option, should be locale-safe
968
(('USERPROFILE',), app_data),
969
(('HOMEDRIVE','HOMEPATH'), app_data),
970
(('HOMEPATH',), app_data),
972
(('WINDIR',), app_data), # 95/98/ME
975
for keys, subdir in app_homes:
978
if key in os.environ:
979
dirname = os.path.join(os.environ[key])
984
dirname = os.path.join(dirname,subdir)
985
return os.path.join(dirname, 'Python-Eggs')
988
"Please set the PYTHON_EGG_CACHE enviroment variable"
992
"""Convert an arbitrary string to a standard distribution name
994
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
996
return re.sub('[^A-Za-z0-9.]+', '-', name)
999
def safe_version(version):
1000
"""Convert an arbitrary string to a standard version string
1002
Spaces become dots, and all other non-alphanumeric characters become
1003
dashes, with runs of multiple dashes condensed to a single dash.
1005
version = version.replace(' ','.')
1006
return re.sub('[^A-Za-z0-9.]+', '-', version)
1009
def safe_extra(extra):
1010
"""Convert an arbitrary string to a standard 'extra' name
1012
Any runs of non-alphanumeric characters are replaced with a single '_',
1013
and the result is always lowercased.
1015
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1018
def to_filename(name):
1019
"""Convert a project or version name to its filename-escaped form
1021
Any '-' characters are currently replaced with '_'.
1023
return name.replace('-','_')
1033
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1039
def __init__(self, module):
1040
self.loader = getattr(module, '__loader__', None)
1041
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1043
def get_resource_filename(self, manager, resource_name):
1044
return self._fn(self.module_path, resource_name)
1046
def get_resource_stream(self, manager, resource_name):
1047
return StringIO(self.get_resource_string(manager, resource_name))
1049
def get_resource_string(self, manager, resource_name):
1050
return self._get(self._fn(self.module_path, resource_name))
1052
def has_resource(self, resource_name):
1053
return self._has(self._fn(self.module_path, resource_name))
1055
def has_metadata(self, name):
1056
return self.egg_info and self._has(self._fn(self.egg_info,name))
1058
def get_metadata(self, name):
1059
if not self.egg_info:
1061
return self._get(self._fn(self.egg_info,name))
1063
def get_metadata_lines(self, name):
1064
return yield_lines(self.get_metadata(name))
1066
def resource_isdir(self,resource_name):
1067
return self._isdir(self._fn(self.module_path, resource_name))
1069
def metadata_isdir(self,name):
1070
return self.egg_info and self._isdir(self._fn(self.egg_info,name))
1073
def resource_listdir(self,resource_name):
1074
return self._listdir(self._fn(self.module_path,resource_name))
1076
def metadata_listdir(self,name):
1078
return self._listdir(self._fn(self.egg_info,name))
1081
def run_script(self,script_name,namespace):
1082
script = 'scripts/'+script_name
1083
if not self.has_metadata(script):
1084
raise ResolutionError("No script named %r" % script_name)
1085
script_text = self.get_metadata(script).replace('\r\n','\n')
1086
script_text = script_text.replace('\r','\n')
1087
script_filename = self._fn(self.egg_info,script)
1088
namespace['__file__'] = script_filename
1089
if os.path.exists(script_filename):
1090
execfile(script_filename, namespace, namespace)
1092
from linecache import cache
1093
cache[script_filename] = (
1094
len(script_text), 0, script_text.split('\n'), script_filename
1096
script_code = compile(script_text,script_filename,'exec')
1097
exec script_code in namespace, namespace
1099
def _has(self, path):
1100
raise NotImplementedError(
1101
"Can't perform this operation for unregistered loader type"
1104
def _isdir(self, path):
1105
raise NotImplementedError(
1106
"Can't perform this operation for unregistered loader type"
1109
def _listdir(self, path):
1110
raise NotImplementedError(
1111
"Can't perform this operation for unregistered loader type"
1114
def _fn(self, base, resource_name):
1115
return os.path.join(base, *resource_name.split('/'))
1117
def _get(self, path):
1118
if hasattr(self.loader, 'get_data'):
1119
return self.loader.get_data(path)
1120
raise NotImplementedError(
1121
"Can't perform this operation for loaders without 'get_data()'"
1124
register_loader_type(object, NullProvider)
1127
class EggProvider(NullProvider):
1128
"""Provider based on a virtual filesystem"""
1130
def __init__(self,module):
1131
NullProvider.__init__(self,module)
1132
self._setup_prefix()
1134
def _setup_prefix(self):
1135
# we assume here that our metadata may be nested inside a "basket"
1136
# of multiple eggs; that's why we use module_path instead of .archive
1137
path = self.module_path
1140
if path.lower().endswith('.egg'):
1141
self.egg_name = os.path.basename(path)
1142
self.egg_info = os.path.join(path, 'EGG-INFO')
1143
self.egg_root = path
1146
path, base = os.path.split(path)
1155
class DefaultProvider(EggProvider):
1156
"""Provides access to package resources in the filesystem"""
1158
def _has(self, path):
1159
return os.path.exists(path)
1161
def _isdir(self,path):
1162
return os.path.isdir(path)
1164
def _listdir(self,path):
1165
return os.listdir(path)
1167
def get_resource_stream(self, manager, resource_name):
1168
return open(self._fn(self.module_path, resource_name), 'rb')
1170
def _get(self, path):
1171
stream = open(path, 'rb')
1173
return stream.read()
1177
register_loader_type(type(None), DefaultProvider)
1180
class EmptyProvider(NullProvider):
1181
"""Provider that returns nothing for all requests"""
1183
_isdir = _has = lambda self,path: False
1184
_get = lambda self,path: ''
1185
_listdir = lambda self,path: []
1191
empty_provider = EmptyProvider()
1196
class ZipProvider(EggProvider):
1197
"""Resource support for zips and eggs"""
1201
def __init__(self, module):
1202
EggProvider.__init__(self,module)
1203
self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
1204
self.zip_pre = self.loader.archive+os.sep
1206
def _zipinfo_name(self, fspath):
1207
# Convert a virtual filename (full path to file) into a zipfile subpath
1208
# usable with the zipimport directory cache for our target archive
1209
if fspath.startswith(self.zip_pre):
1210
return fspath[len(self.zip_pre):]
1211
raise AssertionError(
1212
"%s is not a subpath of %s" % (fspath,self.zip_pre)
1215
def _parts(self,zip_path):
1216
# Convert a zipfile subpath into an egg-relative path part list
1217
fspath = self.zip_pre+zip_path # pseudo-fs path
1218
if fspath.startswith(self.egg_root+os.sep):
1219
return fspath[len(self.egg_root)+1:].split(os.sep)
1220
raise AssertionError(
1221
"%s is not a subpath of %s" % (fspath,self.egg_root)
1224
def get_resource_filename(self, manager, resource_name):
1225
if not self.egg_name:
1226
raise NotImplementedError(
1227
"resource_filename() only supported for .egg, not .zip"
1229
# no need to lock for extraction, since we use temp names
1230
zip_path = self._resource_to_zip(resource_name)
1231
eagers = self._get_eager_resources()
1232
if '/'.join(self._parts(zip_path)) in eagers:
1234
self._extract_resource(manager, self._eager_to_zip(name))
1235
return self._extract_resource(manager, zip_path)
1237
def _extract_resource(self, manager, zip_path):
1239
if zip_path in self._index():
1240
for name in self._index()[zip_path]:
1241
last = self._extract_resource(
1242
manager, os.path.join(zip_path, name)
1244
return os.path.dirname(last) # return the extracted directory name
1246
zip_stat = self.zipinfo[zip_path]
1247
t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
1249
(d>>9)+1980, (d>>5)&0xF, d&0x1F, # ymd
1250
(t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1 # hms, etc.
1252
timestamp = time.mktime(date_time)
1255
real_path = manager.get_cache_path(
1256
self.egg_name, self._parts(zip_path)
1259
if os.path.isfile(real_path):
1260
stat = os.stat(real_path)
1261
if stat.st_size==size and stat.st_mtime==timestamp:
1262
# size and stamp match, don't bother extracting
1265
outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1266
os.write(outf, self.loader.get_data(zip_path))
1268
utime(tmpnam, (timestamp,timestamp))
1269
manager.postprocess(tmpnam, real_path)
1272
rename(tmpnam, real_path)
1275
if os.path.isfile(real_path):
1276
stat = os.stat(real_path)
1278
if stat.st_size==size and stat.st_mtime==timestamp:
1279
# size and stamp match, somebody did it just ahead of
1282
elif os.name=='nt': # Windows, del old file and retry
1284
rename(tmpnam, real_path)
1289
manager.extraction_error() # report a user-friendly error
1293
def _get_eager_resources(self):
1294
if self.eagers is None:
1296
for name in ('native_libs.txt', 'eager_resources.txt'):
1297
if self.has_metadata(name):
1298
eagers.extend(self.get_metadata_lines(name))
1299
self.eagers = eagers
1304
return self._dirindex
1305
except AttributeError:
1307
for path in self.zipinfo:
1308
parts = path.split(os.sep)
1310
parent = os.sep.join(parts[:-1])
1312
ind[parent].append(parts[-1])
1315
ind[parent] = [parts.pop()]
1316
self._dirindex = ind
1319
def _has(self, fspath):
1320
zip_path = self._zipinfo_name(fspath)
1321
return zip_path in self.zipinfo or zip_path in self._index()
1323
def _isdir(self,fspath):
1324
return self._zipinfo_name(fspath) in self._index()
1326
def _listdir(self,fspath):
1327
return list(self._index().get(self._zipinfo_name(fspath), ()))
1329
def _eager_to_zip(self,resource_name):
1330
return self._zipinfo_name(self._fn(self.egg_root,resource_name))
1332
def _resource_to_zip(self,resource_name):
1333
return self._zipinfo_name(self._fn(self.module_path,resource_name))
1335
register_loader_type(zipimport.zipimporter, ZipProvider)
1360
class FileMetadata(EmptyProvider):
1361
"""Metadata handler for standalone PKG-INFO files
1365
metadata = FileMetadata("/path/to/PKG-INFO")
1367
This provider rejects all data and metadata requests except for PKG-INFO,
1368
which is treated as existing, and will be the contents of the file at
1369
the provided location.
1372
def __init__(self,path):
1375
def has_metadata(self,name):
1376
return name=='PKG-INFO'
1378
def get_metadata(self,name):
1379
if name=='PKG-INFO':
1380
return open(self.path,'rU').read()
1381
raise KeyError("No metadata except PKG-INFO is available")
1383
def get_metadata_lines(self,name):
1384
return yield_lines(self.get_metadata(name))
1401
class PathMetadata(DefaultProvider):
1402
"""Metadata provider for egg directories
1408
egg_info = "/path/to/PackageName.egg-info"
1409
base_dir = os.path.dirname(egg_info)
1410
metadata = PathMetadata(base_dir, egg_info)
1411
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1412
dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
1414
# Unpacked egg directories:
1416
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1417
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1418
dist = Distribution.from_filename(egg_path, metadata=metadata)
1421
def __init__(self, path, egg_info):
1422
self.module_path = path
1423
self.egg_info = egg_info
1426
class EggMetadata(ZipProvider):
1427
"""Metadata provider for .egg files"""
1429
def __init__(self, importer):
1430
"""Create a metadata provider from a zipimporter"""
1432
self.zipinfo = zipimport._zip_directory_cache[importer.archive]
1433
self.zip_pre = importer.archive+os.sep
1434
self.loader = importer
1436
self.module_path = os.path.join(importer.archive, importer.prefix)
1438
self.module_path = importer.archive
1439
self._setup_prefix()
1443
"""PEP 302 Importer that wraps Python's "normal" import algorithm"""
1445
def __init__(self, path=None):
1448
def find_module(self, fullname, path=None):
1449
subname = fullname.split(".")[-1]
1450
if subname != fullname and self.path is None:
1452
if self.path is None:
1457
file, filename, etc = imp.find_module(subname, path)
1460
return ImpLoader(file, filename, etc)
1464
"""PEP 302 Loader that wraps Python's "normal" import algorithm"""
1466
def __init__(self, file, filename, etc):
1468
self.filename = filename
1471
def load_module(self, fullname):
1473
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
1475
if self.file: self.file.close()
1476
# Note: we don't set __loader__ because we want the module to look
1477
# normal; i.e. this is just a wrapper for standard import machinery
1483
def get_importer(path_item):
1484
"""Retrieve a PEP 302 "importer" for the given path item
1486
If there is no importer, this returns a wrapper around the builtin import
1487
machinery. The returned importer is only cached if it was created by a
1491
importer = sys.path_importer_cache[path_item]
1493
for hook in sys.path_hooks:
1495
importer = hook(path_item)
1503
sys.path_importer_cache.setdefault(path_item,importer)
1504
if importer is None:
1506
importer = ImpWrapper(path_item)
1512
from pkgutil import get_importer, ImpImporter
1514
pass # Python 2.3 or 2.4, use our own implementation
1516
ImpWrapper = ImpImporter # Python 2.5, use pkgutil's implementation
1517
del ImpLoader, ImpImporter
1524
_distribution_finders = {}
1526
def register_finder(importer_type, distribution_finder):
1527
"""Register `distribution_finder` to find distributions in sys.path items
1529
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1530
handler), and `distribution_finder` is a callable that, passed a path
1531
item and the importer instance, yields ``Distribution`` instances found on
1532
that path item. See ``pkg_resources.find_on_path`` for an example."""
1533
_distribution_finders[importer_type] = distribution_finder
1536
def find_distributions(path_item, only=False):
1537
"""Yield distributions accessible via `path_item`"""
1538
importer = get_importer(path_item)
1539
finder = _find_adapter(_distribution_finders, importer)
1540
return finder(importer, path_item, only)
1542
def find_in_zip(importer, path_item, only=False):
1543
metadata = EggMetadata(importer)
1544
if metadata.has_metadata('PKG-INFO'):
1545
yield Distribution.from_filename(path_item, metadata=metadata)
1547
return # don't yield nested distros
1548
for subitem in metadata.resource_listdir('/'):
1549
if subitem.endswith('.egg'):
1550
subpath = os.path.join(path_item, subitem)
1551
for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
1554
register_finder(zipimport.zipimporter, find_in_zip)
1556
def StringIO(*args, **kw):
1557
"""Thunk to load the real StringIO on demand"""
1560
from cStringIO import StringIO
1562
from StringIO import StringIO
1563
return StringIO(*args,**kw)
1565
def find_nothing(importer, path_item, only=False):
1567
register_finder(object,find_nothing)
1569
def find_on_path(importer, path_item, only=False):
1570
"""Yield distributions accessible on a sys.path directory"""
1571
path_item = _normalize_cached(path_item)
1573
if os.path.isdir(path_item):
1574
if path_item.lower().endswith('.egg'):
1576
yield Distribution.from_filename(
1577
path_item, metadata=PathMetadata(
1578
path_item, os.path.join(path_item,'EGG-INFO')
1582
# scan for .egg and .egg-info in directory
1583
for entry in os.listdir(path_item):
1584
lower = entry.lower()
1585
if lower.endswith('.egg-info'):
1586
fullpath = os.path.join(path_item, entry)
1587
if os.path.isdir(fullpath):
1588
# egg-info directory, allow getting metadata
1589
metadata = PathMetadata(path_item, fullpath)
1591
metadata = FileMetadata(fullpath)
1592
yield Distribution.from_location(
1593
path_item,entry,metadata,precedence=DEVELOP_DIST
1595
elif not only and lower.endswith('.egg'):
1596
for dist in find_distributions(os.path.join(path_item, entry)):
1598
elif not only and lower.endswith('.egg-link'):
1599
for line in file(os.path.join(path_item, entry)):
1600
if not line.strip(): continue
1601
for item in find_distributions(line.rstrip()):
1604
register_finder(ImpWrapper,find_on_path)
1606
_namespace_handlers = {}
1607
_namespace_packages = {}
1609
def register_namespace_handler(importer_type, namespace_handler):
1610
"""Register `namespace_handler` to declare namespace packages
1612
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1613
handler), and `namespace_handler` is a callable like this::
1615
def namespace_handler(importer,path_entry,moduleName,module):
1616
# return a path_entry to use for child packages
1618
Namespace handlers are only called if the importer object has already
1619
agreed that it can handle the relevant path item, and they should only
1620
return a subpath if the module __path__ does not already contain an
1621
equivalent subpath. For an example namespace handler, see
1622
``pkg_resources.file_ns_handler``.
1624
_namespace_handlers[importer_type] = namespace_handler
1626
def _handle_ns(packageName, path_item):
1627
"""Ensure that named package includes a subpath of path_item (if needed)"""
1628
importer = get_importer(path_item)
1629
if importer is None:
1631
loader = importer.find_module(packageName)
1634
module = sys.modules.get(packageName)
1636
module = sys.modules[packageName] = new.module(packageName)
1637
module.__path__ = []; _set_parent_ns(packageName)
1638
elif not hasattr(module,'__path__'):
1639
raise TypeError("Not a package:", packageName)
1640
handler = _find_adapter(_namespace_handlers, importer)
1641
subpath = handler(importer,path_item,packageName,module)
1642
if subpath is not None:
1643
path = module.__path__; path.append(subpath)
1644
loader.load_module(packageName); module.__path__ = path
1647
def declare_namespace(packageName):
1648
"""Declare that package 'packageName' is a namespace package"""
1652
if packageName in _namespace_packages:
1655
path, parent = sys.path, None
1656
if '.' in packageName:
1657
parent = '.'.join(packageName.split('.')[:-1])
1658
declare_namespace(parent)
1661
path = sys.modules[parent].__path__
1662
except AttributeError:
1663
raise TypeError("Not a package:", parent)
1665
# Track what packages are namespaces, so when new path items are added,
1666
# they can be updated
1667
_namespace_packages.setdefault(parent,[]).append(packageName)
1668
_namespace_packages.setdefault(packageName,[])
1670
for path_item in path:
1671
# Ensure all the parent's path items are reflected in the child,
1673
_handle_ns(packageName, path_item)
1678
def fixup_namespace_packages(path_item, parent=None):
1679
"""Ensure that previously-declared namespace packages include path_item"""
1682
for package in _namespace_packages.get(parent,()):
1683
subpath = _handle_ns(package, path_item)
1684
if subpath: fixup_namespace_packages(subpath,package)
1688
def file_ns_handler(importer, path_item, packageName, module):
1689
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
1691
subpath = os.path.join(path_item, packageName.split('.')[-1])
1692
normalized = _normalize_cached(subpath)
1693
for item in module.__path__:
1694
if _normalize_cached(item)==normalized:
1697
# Only return the path if it's not already there
1700
register_namespace_handler(ImpWrapper,file_ns_handler)
1701
register_namespace_handler(zipimport.zipimporter,file_ns_handler)
1704
def null_ns_handler(importer, path_item, packageName, module):
1707
register_namespace_handler(object,null_ns_handler)
1710
def normalize_path(filename):
1711
"""Normalize a file/dir name for comparison purposes"""
1712
return os.path.normcase(os.path.realpath(filename))
1714
def _normalize_cached(filename,_cache={}):
1716
return _cache[filename]
1718
_cache[filename] = result = normalize_path(filename)
1721
def _set_parent_ns(packageName):
1722
parts = packageName.split('.')
1725
parent = '.'.join(parts)
1726
setattr(sys.modules[parent], name, sys.modules[packageName])
1729
def yield_lines(strs):
1730
"""Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
1731
if isinstance(strs,basestring):
1732
for s in strs.splitlines():
1734
if s and not s.startswith('#'): # skip blank lines/comments
1738
for s in yield_lines(ss):
1741
LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment
1742
CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation
1743
DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra
1744
VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info
1745
COMMA = re.compile(r"\s*,").match # comma between items
1746
OBRACKET = re.compile(r"\s*\[").match
1747
CBRACKET = re.compile(r"\s*\]").match
1748
MODULE = re.compile(r"\w+(\.\w+)*$").match
1749
EGG_NAME = re.compile(
1751
r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
1752
re.VERBOSE | re.IGNORECASE
1755
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
1756
replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c'}.get
1758
def _parse_version_parts(s):
1759
for part in component_re.split(s):
1760
part = replace(part,part)
1761
if not part or part=='.':
1763
if part[:1] in '0123456789':
1764
yield part.zfill(8) # pad for numeric comparison
1768
yield '*final' # ensure that alpha/beta/candidate are before final
1770
def parse_version(s):
1771
"""Convert a version string to a chronologically-sortable key
1773
This is a rough cross between distutils' StrictVersion and LooseVersion;
1774
if you give it versions that would work with StrictVersion, then it behaves
1775
the same; otherwise it acts like a slightly-smarter LooseVersion. It is
1776
*possible* to create pathological version coding schemes that will fool
1777
this parser, but they should be very rare in practice.
1779
The returned value will be a tuple of strings. Numeric portions of the
1780
version are padded to 8 digits so they will compare numerically, but
1781
without relying on how numbers compare relative to strings. Dots are
1782
dropped, but dashes are retained. Trailing zeros between alpha segments
1783
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
1784
"2.4". Alphanumeric parts are lower-cased.
1786
The algorithm assumes that strings like "-" and any alpha string that
1787
alphabetically follows "final" represents a "patch level". So, "2.4-1"
1788
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
1789
considered newer than "2.4-1", whic in turn is newer than "2.4".
1791
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
1792
come before "final" alphabetically) are assumed to be pre-release versions,
1793
so that the version "2.4" is considered newer than "2.4a1".
1795
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
1796
"rc" are treated as if they were "c", i.e. as though they were release
1797
candidates, and therefore are not as new as a version string that does not
1801
for part in _parse_version_parts(s.lower()):
1802
if part.startswith('*'):
1803
if part<'*final': # remove '-' before a prerelease tag
1804
while parts and parts[-1]=='*final-': parts.pop()
1805
# remove trailing zeros from each series of numeric parts
1806
while parts and parts[-1]=='00000000':
1811
class EntryPoint(object):
1812
"""Object representing an advertised importable object"""
1814
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
1815
if not MODULE(module_name):
1816
raise ValueError("Invalid module name", module_name)
1818
self.module_name = module_name
1819
self.attrs = tuple(attrs)
1820
self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
1824
s = "%s = %s" % (self.name, self.module_name)
1826
s += ':' + '.'.join(self.attrs)
1828
s += ' [%s]' % ','.join(self.extras)
1832
return "EntryPoint.parse(%r)" % str(self)
1834
def load(self, require=True, env=None, installer=None):
1835
if require: self.require(env, installer)
1836
entry = __import__(self.module_name, globals(),globals(), ['__name__'])
1837
for attr in self.attrs:
1839
entry = getattr(entry,attr)
1840
except AttributeError:
1841
raise ImportError("%r has no %r attribute" % (entry,attr))
1844
def require(self, env=None, installer=None):
1845
if self.extras and not self.dist:
1846
raise UnknownExtra("Can't require() without a distribution", self)
1847
map(working_set.add,
1848
working_set.resolve(self.dist.requires(self.extras),env,installer))
1853
def parse(cls, src, dist=None):
1854
"""Parse a single entry point from string `src`
1856
Entry point syntax follows the form::
1858
name = some.module:some.attr [extra1,extra2]
1860
The entry name and module name are required, but the ``:attrs`` and
1861
``[extras]`` parts are optional
1865
name,value = src.split('=',1)
1867
value,extras = value.split('[',1)
1868
req = Requirement.parse("x["+extras)
1869
if req.specs: raise ValueError
1872
value,attrs = value.split(':',1)
1873
if not MODULE(attrs.rstrip()):
1875
attrs = attrs.rstrip().split('.')
1878
"EntryPoint must be in 'name=module:attrs [extras]' format",
1882
return cls(name.strip(), value.strip(), attrs, extras, dist)
1884
parse = classmethod(parse)
1894
def parse_group(cls, group, lines, dist=None):
1895
"""Parse an entry point group"""
1896
if not MODULE(group):
1897
raise ValueError("Invalid group name", group)
1899
for line in yield_lines(lines):
1900
ep = cls.parse(line, dist)
1902
raise ValueError("Duplicate entry point", group, ep.name)
1906
parse_group = classmethod(parse_group)
1909
def parse_map(cls, data, dist=None):
1910
"""Parse a map of entry point groups"""
1911
if isinstance(data,dict):
1914
data = split_sections(data)
1916
for group, lines in data:
1920
raise ValueError("Entry points must be listed in groups")
1921
group = group.strip()
1923
raise ValueError("Duplicate group name", group)
1924
maps[group] = cls.parse_group(group, lines, dist)
1927
parse_map = classmethod(parse_map)
1934
class Distribution(object):
1935
"""Wrap an actual or potential sys.path entry w/metadata"""
1937
location=None, metadata=None, project_name=None, version=None,
1938
py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
1940
self.project_name = safe_name(project_name or 'Unknown')
1941
if version is not None:
1942
self._version = safe_version(version)
1943
self.py_version = py_version
1944
self.platform = platform
1945
self.location = location
1946
self.precedence = precedence
1947
self._provider = metadata or empty_provider
1950
def from_location(cls,location,basename,metadata=None,**kw):
1951
project_name, version, py_version, platform = [None]*4
1952
basename, ext = os.path.splitext(basename)
1953
if ext.lower() in (".egg",".egg-info"):
1954
match = EGG_NAME(basename)
1956
project_name, version, py_version, platform = match.group(
1957
'name','ver','pyver','plat'
1960
location, metadata, project_name=project_name, version=version,
1961
py_version=py_version, platform=platform, **kw
1963
from_location = classmethod(from_location)
1967
getattr(self,'parsed_version',()), self.precedence, self.key,
1968
-len(self.location or ''), self.location, self.py_version,
1972
def __cmp__(self, other): return cmp(self.hashcmp, other)
1973
def __hash__(self): return hash(self.hashcmp)
1975
# These properties have to be lazy so that we don't have to load any
1976
# metadata until/unless it's actually needed. (i.e., some distributions
1977
# may not know their name or version without loading PKG-INFO)
1983
except AttributeError:
1984
self._key = key = self.project_name.lower()
1989
def parsed_version(self):
1991
return self._parsed_version
1992
except AttributeError:
1993
self._parsed_version = pv = parse_version(self.version)
1996
parsed_version = property(parsed_version)
2001
return self._version
2002
except AttributeError:
2003
for line in self._get_metadata('PKG-INFO'):
2004
if line.lower().startswith('version:'):
2005
self._version = safe_version(line.split(':',1)[1].strip())
2006
return self._version
2009
"Missing 'Version:' header and/or PKG-INFO file", self
2011
version = property(version)
2019
return self.__dep_map
2020
except AttributeError:
2021
dm = self.__dep_map = {None: []}
2022
for name in 'requires.txt', 'depends.txt':
2023
for extra,reqs in split_sections(self._get_metadata(name)):
2024
if extra: extra = safe_extra(extra)
2025
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2027
_dep_map = property(_dep_map)
2029
def requires(self,extras=()):
2030
"""List of Requirements needed for this distro if `extras` are used"""
2033
deps.extend(dm.get(None,()))
2036
deps.extend(dm[safe_extra(ext)])
2039
"%s has no such extra feature %r" % (self, ext)
2043
def _get_metadata(self,name):
2044
if self.has_metadata(name):
2045
for line in self.get_metadata_lines(name):
2048
def activate(self,path=None):
2049
"""Ensure distribution is importable on `path` (default=sys.path)"""
2050
if path is None: path = sys.path
2051
self.insert_on(path)
2052
if path is sys.path:
2053
fixup_namespace_packages(self.location)
2054
map(declare_namespace, self._get_metadata('namespace_packages.txt'))
2058
"""Return what this distribution's standard .egg filename should be"""
2059
filename = "%s-%s-py%s" % (
2060
to_filename(self.project_name), to_filename(self.version),
2061
self.py_version or PY_MAJOR
2065
filename += '-'+self.platform
2070
return "%s (%s)" % (self,self.location)
2075
try: version = getattr(self,'version',None)
2076
except ValueError: version = None
2077
version = version or "[unknown version]"
2078
return "%s %s" % (self.project_name,version)
2080
def __getattr__(self,attr):
2081
"""Delegate all unrecognized public attributes to .metadata provider"""
2082
if attr.startswith('_'):
2083
raise AttributeError,attr
2084
return getattr(self._provider, attr)
2087
def from_filename(cls,filename,metadata=None, **kw):
2088
return cls.from_location(
2089
_normalize_cached(filename), os.path.basename(filename), metadata,
2092
from_filename = classmethod(from_filename)
2094
def as_requirement(self):
2095
"""Return a ``Requirement`` that matches this distribution exactly"""
2096
return Requirement.parse('%s==%s' % (self.project_name, self.version))
2098
def load_entry_point(self, group, name):
2099
"""Return the `name` entry point of `group` or raise ImportError"""
2100
ep = self.get_entry_info(group,name)
2102
raise ImportError("Entry point %r not found" % ((group,name),))
2105
def get_entry_map(self, group=None):
2106
"""Return the entry point map for `group`, or the full entry map"""
2108
ep_map = self._ep_map
2109
except AttributeError:
2110
ep_map = self._ep_map = EntryPoint.parse_map(
2111
self._get_metadata('entry_points.txt'), self
2113
if group is not None:
2114
return ep_map.get(group,{})
2117
def get_entry_info(self, group, name):
2118
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
2119
return self.get_entry_map(group).get(name)
2139
def insert_on(self, path, loc = None):
2140
"""Insert self.location in path before its nearest parent directory"""
2142
loc = loc or self.location
2146
if path is sys.path:
2147
self.check_version_conflict()
2149
nloc = _normalize_cached(loc)
2150
bdir = os.path.dirname(nloc)
2151
npath= map(_normalize_cached, path)
2154
for p, item in enumerate(npath):
2159
npath.insert(p, nloc)
2165
# p is the spot where we found or inserted loc; now remove duplicates
2168
np = npath.index(nloc, p+1)
2172
del npath[np], path[np]
2180
def check_version_conflict(self):
2181
if self.key=='setuptools':
2182
return # ignore the inevitable setuptools self-conflicts :(
2184
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2185
loc = normalize_path(self.location)
2186
for modname in self._get_metadata('top_level.txt'):
2187
if (modname not in sys.modules or modname in nsp
2188
or modname in _namespace_packages
2192
fn = getattr(sys.modules[modname], '__file__', None)
2193
if fn and normalize_path(fn).startswith(loc):
2196
"Module %s was already imported from %s, but %s is being added"
2197
" to sys.path" % (modname, fn, self.location),
2200
def has_version(self):
2204
issue_warning("Unbuilt egg for "+repr(self))
2208
def clone(self,**kw):
2209
"""Copy this distribution, substituting in any changed keyword args"""
2211
'project_name', 'version', 'py_version', 'platform', 'location',
2214
kw.setdefault(attr, getattr(self,attr,None))
2215
kw.setdefault('metadata', self._provider)
2216
return self.__class__(**kw)
2223
return [dep for dep in self._dep_map if dep]
2224
extras = property(extras)
2227
def issue_warning(*args,**kw):
2231
# find the first stack frame that is *not* code in
2232
# the pkg_resources module, to use for the warning
2233
while sys._getframe(level).f_globals is g:
2237
from warnings import warn
2238
warn(stacklevel = level+1, *args, **kw)
2262
def parse_requirements(strs):
2263
"""Yield ``Requirement`` objects for each specification in `strs`
2265
`strs` must be an instance of ``basestring``, or a (possibly-nested)
2268
# create a steppable iterator, so we can handle \-continuations
2269
lines = iter(yield_lines(strs))
2271
def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
2275
while not TERMINATOR(line,p):
2276
if CONTINUE(line,p):
2278
line = lines.next(); p = 0
2279
except StopIteration:
2281
"\\ must not appear on the last nonblank line"
2284
match = ITEM(line,p)
2286
raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
2288
items.append(match.group(*groups))
2291
match = COMMA(line,p)
2293
p = match.end() # skip the comma
2294
elif not TERMINATOR(line,p):
2296
"Expected ',' or end-of-list in",line,"at",line[p:]
2299
match = TERMINATOR(line,p)
2300
if match: p = match.end() # skip the terminator, if any
2301
return line, p, items
2304
match = DISTRO(line)
2306
raise ValueError("Missing distribution spec", line)
2307
project_name = match.group(1)
2311
match = OBRACKET(line,p)
2314
line, p, extras = scan_list(
2315
DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2318
line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
2319
specs = [(op,safe_version(val)) for op,val in specs]
2320
yield Requirement(project_name, specs, extras)
2323
def _sort_dists(dists):
2324
tmp = [(dist.hashcmp,dist) for dist in dists]
2326
dists[::-1] = [d for hc,d in tmp]
2345
def __init__(self, project_name, specs, extras):
2346
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2347
self.unsafe_name, project_name = project_name, safe_name(project_name)
2348
self.project_name, self.key = project_name, project_name.lower()
2349
index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
2351
self.specs = [(op,ver) for parsed,trans,op,ver in index]
2352
self.index, self.extras = index, tuple(map(safe_extra,extras))
2354
self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
2355
ImmutableSet(self.extras)
2357
self.__hash = hash(self.hashCmp)
2360
specs = ','.join([''.join(s) for s in self.specs])
2361
extras = ','.join(self.extras)
2362
if extras: extras = '[%s]' % extras
2363
return '%s%s%s' % (self.project_name, extras, specs)
2365
def __eq__(self,other):
2366
return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
2368
def __contains__(self,item):
2369
if isinstance(item,Distribution):
2370
if item.key <> self.key: return False
2371
if self.index: item = item.parsed_version # only get if we need it
2372
elif isinstance(item,basestring):
2373
item = parse_version(item)
2375
for parsed,trans,op,ver in self.index:
2376
action = trans[cmp(item,parsed)]
2377
if action=='F': return False
2378
elif action=='T': return True
2379
elif action=='+': last = True
2380
elif action=='-' or last is None: last = False
2381
if last is None: last = True # no rules encountered
2388
def __repr__(self): return "Requirement.parse(%r)" % str(self)
2392
reqs = list(parse_requirements(s))
2396
raise ValueError("Expected only one requirement", s)
2397
raise ValueError("No requirements found", s)
2399
parse = staticmethod(parse)
2413
"""Get an mro for a type or classic class"""
2414
if not isinstance(cls,type):
2415
class cls(cls,object): pass
2416
return cls.__mro__[1:]
2419
def _find_adapter(registry, ob):
2420
"""Return an adapter factory for `ob` from `registry`"""
2421
for t in _get_mro(getattr(ob, '__class__', type(ob))):
2426
def ensure_directory(path):
2427
"""Ensure that the parent directory of `path` exists"""
2428
dirname = os.path.dirname(path)
2429
if not os.path.isdir(dirname):
2430
os.makedirs(dirname)
2432
def split_sections(s):
2433
"""Split a string or iterable thereof into (section,content) pairs
2435
Each ``section`` is a stripped version of the section header ("[section]")
2436
and each ``content`` is a list of stripped lines excluding blank lines and
2437
comment-only lines. If there are any such lines before the first section
2438
header, they're returned in a first ``section`` of ``None``.
2442
for line in yield_lines(s):
2443
if line.startswith("["):
2444
if line.endswith("]"):
2445
if section or content:
2446
yield section, content
2447
section = line[1:-1].strip()
2450
raise ValueError("Invalid section heading", line)
2452
content.append(line)
2454
# wrap up last segment
2455
yield section, content
2457
def _mkstemp(*args,**kw):
2458
from tempfile import mkstemp
2461
os.open = os_open # temporarily bypass sandboxing
2462
return mkstemp(*args,**kw)
2464
os.open = old_open # and then put it back
2467
# Set up global resource manager
2468
_manager = ResourceManager()
2470
for name in dir(_manager):
2471
if not name.startswith('_'):
2472
g[name] = getattr(_manager, name)
2473
_initialize(globals())
2475
# Prepare the master working set and make the ``require()`` API available
2476
working_set = WorkingSet()
2478
# Does the main program list any requirements?
2479
from __main__ import __requires__
2481
pass # No: just use the default working set based on sys.path
2483
# Yes: ensure the requirements are met, by prefixing sys.path if necessary
2485
working_set.require(__requires__)
2486
except VersionConflict: # try it without defaults already on sys.path
2487
working_set = WorkingSet([]) # by starting with an empty path
2488
for dist in working_set.resolve(
2489
parse_requirements(__requires__), Environment()
2491
working_set.add(dist)
2492
for entry in sys.path: # add any missing entries from sys.path
2493
if entry not in working_set.entries:
2494
working_set.add_entry(entry)
2495
sys.path[:] = working_set.entries # then copy back to sys.path
2497
require = working_set.require
2498
iter_entry_points = working_set.iter_entry_points
2499
add_activation_listener = working_set.subscribe
2500
run_script = working_set.run_script
2501
run_main = run_script # backward compatibility
2502
# Activate all distributions already on sys.path, and ensure that
2503
# all distributions added to the working set in the future (e.g. by
2504
# calling ``require()``) will get activated as well.
2505
add_activation_listener(lambda dist: dist.activate())
2506
working_set.entries=[]; map(working_set.add_entry,sys.path) # match order