Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(348)

Side by Side Diff: recipe_engine/third_party/pkg_resources.py

Issue 1344583003: Recipe package system. (Closed) Base URL: git@github.com:luci/recipes-py.git@master
Patch Set: Recompiled proto Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 """
2 Package resource API
3 --------------------
4
5 A resource is a logical file contained within a package, or a logical
6 subdirectory thereof. The package resource API expects resource names
7 to have their path parts separated with ``/``, *not* whatever the local
8 path separator is. Do not use os.path operations to manipulate resource
9 names being passed into the API.
10
11 The package resource API is designed to work with normal filesystem packages,
12 .egg files, and unpacked .egg files. It can also work in a limited way with
13 .zip files and with custom PEP 302 loaders that support the ``get_data()``
14 method.
15 """
16
17 from __future__ import absolute_import
18
19 import sys
20 import os
21 import io
22 import time
23 import re
24 import imp
25 import zipfile
26 import zipimport
27 import warnings
28 import stat
29 import functools
30 import pkgutil
31 import token
32 import symbol
33 import operator
34 import platform
35 import collections
36 import plistlib
37 import email.parser
38 import tempfile
39 from pkgutil import get_importer
40
41 PY3 = sys.version_info > (3,)
42 PY2 = not PY3
43
44 if PY3:
45 from urllib.parse import urlparse, urlunparse
46
47 if PY2:
48 from urlparse import urlparse, urlunparse
49
50 if PY3:
51 string_types = str,
52 else:
53 string_types = str, eval('unicode')
54
55 # capture these to bypass sandboxing
56 from os import utime
57 try:
58 from os import mkdir, rename, unlink
59 WRITE_SUPPORT = True
60 except ImportError:
61 # no write support, probably under GAE
62 WRITE_SUPPORT = False
63
64 from os import open as os_open
65 from os.path import isdir, split
66
67 # Avoid try/except due to potential problems with delayed import mechanisms.
68 if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
69 import importlib._bootstrap as importlib_bootstrap
70 else:
71 importlib_bootstrap = None
72
73 try:
74 import parser
75 except ImportError:
76 pass
77
78 import setuptools._vendor.packaging.version
79 import setuptools._vendor.packaging.specifiers
80 packaging = setuptools._vendor.packaging
81
82
83 class PEP440Warning(RuntimeWarning):
84 """
85 Used when there is an issue with a version or specifier not complying with
86 PEP 440.
87 """
88
89
90 class _SetuptoolsVersionMixin(object):
91
92 def __hash__(self):
93 return super(_SetuptoolsVersionMixin, self).__hash__()
94
95 def __lt__(self, other):
96 if isinstance(other, tuple):
97 return tuple(self) < other
98 else:
99 return super(_SetuptoolsVersionMixin, self).__lt__(other)
100
101 def __le__(self, other):
102 if isinstance(other, tuple):
103 return tuple(self) <= other
104 else:
105 return super(_SetuptoolsVersionMixin, self).__le__(other)
106
107 def __eq__(self, other):
108 if isinstance(other, tuple):
109 return tuple(self) == other
110 else:
111 return super(_SetuptoolsVersionMixin, self).__eq__(other)
112
113 def __ge__(self, other):
114 if isinstance(other, tuple):
115 return tuple(self) >= other
116 else:
117 return super(_SetuptoolsVersionMixin, self).__ge__(other)
118
119 def __gt__(self, other):
120 if isinstance(other, tuple):
121 return tuple(self) > other
122 else:
123 return super(_SetuptoolsVersionMixin, self).__gt__(other)
124
125 def __ne__(self, other):
126 if isinstance(other, tuple):
127 return tuple(self) != other
128 else:
129 return super(_SetuptoolsVersionMixin, self).__ne__(other)
130
131 def __getitem__(self, key):
132 return tuple(self)[key]
133
134 def __iter__(self):
135 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
136 replace = {
137 'pre': 'c',
138 'preview': 'c',
139 '-': 'final-',
140 'rc': 'c',
141 'dev': '@',
142 }.get
143
144 def _parse_version_parts(s):
145 for part in component_re.split(s):
146 part = replace(part, part)
147 if not part or part == '.':
148 continue
149 if part[:1] in '0123456789':
150 # pad for numeric comparison
151 yield part.zfill(8)
152 else:
153 yield '*'+part
154
155 # ensure that alpha/beta/candidate are before final
156 yield '*final'
157
158 def old_parse_version(s):
159 parts = []
160 for part in _parse_version_parts(s.lower()):
161 if part.startswith('*'):
162 # remove '-' before a prerelease tag
163 if part < '*final':
164 while parts and parts[-1] == '*final-':
165 parts.pop()
166 # remove trailing zeros from each series of numeric parts
167 while parts and parts[-1] == '00000000':
168 parts.pop()
169 parts.append(part)
170 return tuple(parts)
171
172 # Warn for use of this function
173 warnings.warn(
174 "You have iterated over the result of "
175 "pkg_resources.parse_version. This is a legacy behavior which is "
176 "inconsistent with the new version class introduced in setuptools "
177 "8.0. That class should be used directly instead of attempting to "
178 "iterate over the result.",
179 RuntimeWarning,
180 stacklevel=1,
181 )
182
183 for part in old_parse_version(str(self)):
184 yield part
185
186
187 class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
188 pass
189
190
191 class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
192 packaging.version.LegacyVersion):
193 pass
194
195
196 def parse_version(v):
197 try:
198 return SetuptoolsVersion(v)
199 except packaging.version.InvalidVersion:
200 return SetuptoolsLegacyVersion(v)
201
202
203 _state_vars = {}
204
205 def _declare_state(vartype, **kw):
206 globals().update(kw)
207 _state_vars.update(dict.fromkeys(kw, vartype))
208
209 def __getstate__():
210 state = {}
211 g = globals()
212 for k, v in _state_vars.items():
213 state[k] = g['_sget_'+v](g[k])
214 return state
215
216 def __setstate__(state):
217 g = globals()
218 for k, v in state.items():
219 g['_sset_'+_state_vars[k]](k, g[k], v)
220 return state
221
222 def _sget_dict(val):
223 return val.copy()
224
225 def _sset_dict(key, ob, state):
226 ob.clear()
227 ob.update(state)
228
229 def _sget_object(val):
230 return val.__getstate__()
231
232 def _sset_object(key, ob, state):
233 ob.__setstate__(state)
234
235 _sget_none = _sset_none = lambda *args: None
236
237
238 def get_supported_platform():
239 """Return this platform's maximum compatible version.
240
241 distutils.util.get_platform() normally reports the minimum version
242 of Mac OS X that would be required to *use* extensions produced by
243 distutils. But what we want when checking compatibility is to know the
244 version of Mac OS X that we are *running*. To allow usage of packages that
245 explicitly require a newer version of Mac OS X, we must also know the
246 current version of the OS.
247
248 If this condition occurs for any other platform with a version in its
249 platform strings, this function should be extended accordingly.
250 """
251 plat = get_build_platform()
252 m = macosVersionString.match(plat)
253 if m is not None and sys.platform == "darwin":
254 try:
255 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
256 except ValueError:
257 # not Mac OS X
258 pass
259 return plat
260
261 __all__ = [
262 # Basic resource access and distribution/entry point discovery
263 'require', 'run_script', 'get_provider', 'get_distribution',
264 'load_entry_point', 'get_entry_map', 'get_entry_info',
265 'iter_entry_points',
266 'resource_string', 'resource_stream', 'resource_filename',
267 'resource_listdir', 'resource_exists', 'resource_isdir',
268
269 # Environmental control
270 'declare_namespace', 'working_set', 'add_activation_listener',
271 'find_distributions', 'set_extraction_path', 'cleanup_resources',
272 'get_default_cache',
273
274 # Primary implementation classes
275 'Environment', 'WorkingSet', 'ResourceManager',
276 'Distribution', 'Requirement', 'EntryPoint',
277
278 # Exceptions
279 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
280 'UnknownExtra', 'ExtractionError',
281
282 # Warnings
283 'PEP440Warning',
284
285 # Parsing functions and string utilities
286 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
287 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
288 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
289
290 # filesystem utilities
291 'ensure_directory', 'normalize_path',
292
293 # Distribution "precedence" constants
294 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
295
296 # "Provider" interfaces, implementations, and registration/lookup APIs
297 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
298 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
299 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
300 'register_finder', 'register_namespace_handler', 'register_loader_type',
301 'fixup_namespace_packages', 'get_importer',
302
303 # Deprecated/backward compatibility only
304 'run_main', 'AvailableDistributions',
305 ]
306
307 class ResolutionError(Exception):
308 """Abstract base for dependency resolution errors"""
309 def __repr__(self):
310 return self.__class__.__name__+repr(self.args)
311
312 class VersionConflict(ResolutionError):
313 """An already-installed version conflicts with the requested version"""
314
315 class DistributionNotFound(ResolutionError):
316 """A requested distribution was not found"""
317
318 class UnknownExtra(ResolutionError):
319 """Distribution doesn't have an "extra feature" of the given name"""
320 _provider_factories = {}
321
322 PY_MAJOR = sys.version[:3]
323 EGG_DIST = 3
324 BINARY_DIST = 2
325 SOURCE_DIST = 1
326 CHECKOUT_DIST = 0
327 DEVELOP_DIST = -1
328
329 def register_loader_type(loader_type, provider_factory):
330 """Register `provider_factory` to make providers for `loader_type`
331
332 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
333 and `provider_factory` is a function that, passed a *module* object,
334 returns an ``IResourceProvider`` for that module.
335 """
336 _provider_factories[loader_type] = provider_factory
337
338 def get_provider(moduleOrReq):
339 """Return an IResourceProvider for the named module or requirement"""
340 if isinstance(moduleOrReq, Requirement):
341 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
342 try:
343 module = sys.modules[moduleOrReq]
344 except KeyError:
345 __import__(moduleOrReq)
346 module = sys.modules[moduleOrReq]
347 loader = getattr(module, '__loader__', None)
348 return _find_adapter(_provider_factories, loader)(module)
349
350 def _macosx_vers(_cache=[]):
351 if not _cache:
352 version = platform.mac_ver()[0]
353 # fallback for MacPorts
354 if version == '':
355 plist = '/System/Library/CoreServices/SystemVersion.plist'
356 if os.path.exists(plist):
357 if hasattr(plistlib, 'readPlist'):
358 plist_content = plistlib.readPlist(plist)
359 if 'ProductVersion' in plist_content:
360 version = plist_content['ProductVersion']
361
362 _cache.append(version.split('.'))
363 return _cache[0]
364
365 def _macosx_arch(machine):
366 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
367
368 def get_build_platform():
369 """Return this platform's string for platform-specific distributions
370
371 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
372 needs some hacks for Linux and Mac OS X.
373 """
374 try:
375 # Python 2.7 or >=3.2
376 from sysconfig import get_platform
377 except ImportError:
378 from distutils.util import get_platform
379
380 plat = get_platform()
381 if sys.platform == "darwin" and not plat.startswith('macosx-'):
382 try:
383 version = _macosx_vers()
384 machine = os.uname()[4].replace(" ", "_")
385 return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
386 _macosx_arch(machine))
387 except ValueError:
388 # if someone is running a non-Mac darwin system, this will fall
389 # through to the default implementation
390 pass
391 return plat
392
393 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
394 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
395 # XXX backward compat
396 get_platform = get_build_platform
397
398
399 def compatible_platforms(provided, required):
400 """Can code for the `provided` platform run on the `required` platform?
401
402 Returns true if either platform is ``None``, or the platforms are equal.
403
404 XXX Needs compatibility checks for Linux and other unixy OSes.
405 """
406 if provided is None or required is None or provided==required:
407 # easy case
408 return True
409
410 # Mac OS X special cases
411 reqMac = macosVersionString.match(required)
412 if reqMac:
413 provMac = macosVersionString.match(provided)
414
415 # is this a Mac package?
416 if not provMac:
417 # this is backwards compatibility for packages built before
418 # setuptools 0.6. All packages built after this point will
419 # use the new macosx designation.
420 provDarwin = darwinVersionString.match(provided)
421 if provDarwin:
422 dversion = int(provDarwin.group(1))
423 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
424 if dversion == 7 and macosversion >= "10.3" or \
425 dversion == 8 and macosversion >= "10.4":
426 return True
427 # egg isn't macosx or legacy darwin
428 return False
429
430 # are they the same major version and machine type?
431 if provMac.group(1) != reqMac.group(1) or \
432 provMac.group(3) != reqMac.group(3):
433 return False
434
435 # is the required OS major update >= the provided one?
436 if int(provMac.group(2)) > int(reqMac.group(2)):
437 return False
438
439 return True
440
441 # XXX Linux and other platforms' special cases should go here
442 return False
443
444
445 def run_script(dist_spec, script_name):
446 """Locate distribution `dist_spec` and run its `script_name` script"""
447 ns = sys._getframe(1).f_globals
448 name = ns['__name__']
449 ns.clear()
450 ns['__name__'] = name
451 require(dist_spec)[0].run_script(script_name, ns)
452
453 # backward compatibility
454 run_main = run_script
455
456 def get_distribution(dist):
457 """Return a current distribution object for a Requirement or string"""
458 if isinstance(dist, string_types):
459 dist = Requirement.parse(dist)
460 if isinstance(dist, Requirement):
461 dist = get_provider(dist)
462 if not isinstance(dist, Distribution):
463 raise TypeError("Expected string, Requirement, or Distribution", dist)
464 return dist
465
466 def load_entry_point(dist, group, name):
467 """Return `name` entry point of `group` for `dist` or raise ImportError"""
468 return get_distribution(dist).load_entry_point(group, name)
469
470 def get_entry_map(dist, group=None):
471 """Return the entry point map for `group`, or the full entry map"""
472 return get_distribution(dist).get_entry_map(group)
473
474 def get_entry_info(dist, group, name):
475 """Return the EntryPoint object for `group`+`name`, or ``None``"""
476 return get_distribution(dist).get_entry_info(group, name)
477
478
479 class IMetadataProvider:
480
481 def has_metadata(name):
482 """Does the package's distribution contain the named metadata?"""
483
484 def get_metadata(name):
485 """The named metadata resource as a string"""
486
487 def get_metadata_lines(name):
488 """Yield named metadata resource as list of non-blank non-comment lines
489
490 Leading and trailing whitespace is stripped from each line, and lines
491 with ``#`` as the first non-blank character are omitted."""
492
493 def metadata_isdir(name):
494 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
495
496 def metadata_listdir(name):
497 """List of metadata names in the directory (like ``os.listdir()``)"""
498
499 def run_script(script_name, namespace):
500 """Execute the named script in the supplied namespace dictionary"""
501
502
503 class IResourceProvider(IMetadataProvider):
504 """An object that provides access to package resources"""
505
506 def get_resource_filename(manager, resource_name):
507 """Return a true filesystem path for `resource_name`
508
509 `manager` must be an ``IResourceManager``"""
510
511 def get_resource_stream(manager, resource_name):
512 """Return a readable file-like object for `resource_name`
513
514 `manager` must be an ``IResourceManager``"""
515
516 def get_resource_string(manager, resource_name):
517 """Return a string containing the contents of `resource_name`
518
519 `manager` must be an ``IResourceManager``"""
520
521 def has_resource(resource_name):
522 """Does the package contain the named resource?"""
523
524 def resource_isdir(resource_name):
525 """Is the named resource a directory? (like ``os.path.isdir()``)"""
526
527 def resource_listdir(resource_name):
528 """List of resource names in the directory (like ``os.listdir()``)"""
529
530
531 class WorkingSet(object):
532 """A collection of active distributions on sys.path (or a similar list)"""
533
534 def __init__(self, entries=None):
535 """Create working set from list of path entries (default=sys.path)"""
536 self.entries = []
537 self.entry_keys = {}
538 self.by_key = {}
539 self.callbacks = []
540
541 if entries is None:
542 entries = sys.path
543
544 for entry in entries:
545 self.add_entry(entry)
546
547 @classmethod
548 def _build_master(cls):
549 """
550 Prepare the master working set.
551 """
552 ws = cls()
553 try:
554 from __main__ import __requires__
555 except ImportError:
556 # The main program does not list any requirements
557 return ws
558
559 # ensure the requirements are met
560 try:
561 ws.require(__requires__)
562 except VersionConflict:
563 return cls._build_from_requirements(__requires__)
564
565 return ws
566
567 @classmethod
568 def _build_from_requirements(cls, req_spec):
569 """
570 Build a working set from a requirement spec. Rewrites sys.path.
571 """
572 # try it without defaults already on sys.path
573 # by starting with an empty path
574 ws = cls([])
575 reqs = parse_requirements(req_spec)
576 dists = ws.resolve(reqs, Environment())
577 for dist in dists:
578 ws.add(dist)
579
580 # add any missing entries from sys.path
581 for entry in sys.path:
582 if entry not in ws.entries:
583 ws.add_entry(entry)
584
585 # then copy back to sys.path
586 sys.path[:] = ws.entries
587 return ws
588
589 def add_entry(self, entry):
590 """Add a path item to ``.entries``, finding any distributions on it
591
592 ``find_distributions(entry, True)`` is used to find distributions
593 corresponding to the path entry, and they are added. `entry` is
594 always appended to ``.entries``, even if it is already present.
595 (This is because ``sys.path`` can contain the same value more than
596 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
597 equal ``sys.path``.)
598 """
599 self.entry_keys.setdefault(entry, [])
600 self.entries.append(entry)
601 for dist in find_distributions(entry, True):
602 self.add(dist, entry, False)
603
604 def __contains__(self, dist):
605 """True if `dist` is the active distribution for its project"""
606 return self.by_key.get(dist.key) == dist
607
608 def find(self, req):
609 """Find a distribution matching requirement `req`
610
611 If there is an active distribution for the requested project, this
612 returns it as long as it meets the version requirement specified by
613 `req`. But, if there is an active distribution for the project and it
614 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
615 If there is no active distribution for the requested project, ``None``
616 is returned.
617 """
618 dist = self.by_key.get(req.key)
619 if dist is not None and dist not in req:
620 # XXX add more info
621 raise VersionConflict(dist, req)
622 else:
623 return dist
624
625 def iter_entry_points(self, group, name=None):
626 """Yield entry point objects from `group` matching `name`
627
628 If `name` is None, yields all entry points in `group` from all
629 distributions in the working set, otherwise only ones matching
630 both `group` and `name` are yielded (in distribution order).
631 """
632 for dist in self:
633 entries = dist.get_entry_map(group)
634 if name is None:
635 for ep in entries.values():
636 yield ep
637 elif name in entries:
638 yield entries[name]
639
640 def run_script(self, requires, script_name):
641 """Locate distribution for `requires` and run `script_name` script"""
642 ns = sys._getframe(1).f_globals
643 name = ns['__name__']
644 ns.clear()
645 ns['__name__'] = name
646 self.require(requires)[0].run_script(script_name, ns)
647
648 def __iter__(self):
649 """Yield distributions for non-duplicate projects in the working set
650
651 The yield order is the order in which the items' path entries were
652 added to the working set.
653 """
654 seen = {}
655 for item in self.entries:
656 if item not in self.entry_keys:
657 # workaround a cache issue
658 continue
659
660 for key in self.entry_keys[item]:
661 if key not in seen:
662 seen[key]=1
663 yield self.by_key[key]
664
665 def add(self, dist, entry=None, insert=True, replace=False):
666 """Add `dist` to working set, associated with `entry`
667
668 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
669 On exit from this routine, `entry` is added to the end of the working
670 set's ``.entries`` (if it wasn't already present).
671
672 `dist` is only added to the working set if it's for a project that
673 doesn't already have a distribution in the set, unless `replace=True`.
674 If it's added, any callbacks registered with the ``subscribe()`` method
675 will be called.
676 """
677 if insert:
678 dist.insert_on(self.entries, entry)
679
680 if entry is None:
681 entry = dist.location
682 keys = self.entry_keys.setdefault(entry,[])
683 keys2 = self.entry_keys.setdefault(dist.location,[])
684 if not replace and dist.key in self.by_key:
685 # ignore hidden distros
686 return
687
688 self.by_key[dist.key] = dist
689 if dist.key not in keys:
690 keys.append(dist.key)
691 if dist.key not in keys2:
692 keys2.append(dist.key)
693 self._added_new(dist)
694
695 def resolve(self, requirements, env=None, installer=None,
696 replace_conflicting=False):
697 """List all distributions needed to (recursively) meet `requirements`
698
699 `requirements` must be a sequence of ``Requirement`` objects. `env`,
700 if supplied, should be an ``Environment`` instance. If
701 not supplied, it defaults to all distributions available within any
702 entry or distribution in the working set. `installer`, if supplied,
703 will be invoked with each requirement that cannot be met by an
704 already-installed distribution; it should return a ``Distribution`` or
705 ``None``.
706
707 Unless `replace_conflicting=True`, raises a VersionConflict exception if
708 any requirements are found on the path that have the correct name but
709 the wrong version. Otherwise, if an `installer` is supplied it will be
710 invoked to obtain the correct version of the requirement and activate
711 it.
712 """
713
714 # set up the stack
715 requirements = list(requirements)[::-1]
716 # set of processed requirements
717 processed = {}
718 # key -> dist
719 best = {}
720 to_activate = []
721
722 # Mapping of requirement to set of distributions that required it;
723 # useful for reporting info about conflicts.
724 required_by = collections.defaultdict(set)
725
726 while requirements:
727 # process dependencies breadth-first
728 req = requirements.pop(0)
729 if req in processed:
730 # Ignore cyclic or redundant dependencies
731 continue
732 dist = best.get(req.key)
733 if dist is None:
734 # Find the best distribution and add it to the map
735 dist = self.by_key.get(req.key)
736 if dist is None or (dist not in req and replace_conflicting):
737 ws = self
738 if env is None:
739 if dist is None:
740 env = Environment(self.entries)
741 else:
742 # Use an empty environment and workingset to avoid
743 # any further conflicts with the conflicting
744 # distribution
745 env = Environment([])
746 ws = WorkingSet([])
747 dist = best[req.key] = env.best_match(req, ws, installer)
748 if dist is None:
749 #msg = ("The '%s' distribution was not found on this "
750 # "system, and is required by this application.")
751 #raise DistributionNotFound(msg % req)
752
753 # unfortunately, zc.buildout uses a str(err)
754 # to get the name of the distribution here..
755 raise DistributionNotFound(req)
756 to_activate.append(dist)
757 if dist not in req:
758 # Oops, the "best" so far conflicts with a dependency
759 tmpl = "%s is installed but %s is required by %s"
760 args = dist, req, list(required_by.get(req, []))
761 raise VersionConflict(tmpl % args)
762
763 # push the new requirements onto the stack
764 new_requirements = dist.requires(req.extras)[::-1]
765 requirements.extend(new_requirements)
766
767 # Register the new requirements needed by req
768 for new_requirement in new_requirements:
769 required_by[new_requirement].add(req.project_name)
770
771 processed[req] = True
772
773 # return list of distros to activate
774 return to_activate
775
776 def find_plugins(self, plugin_env, full_env=None, installer=None,
777 fallback=True):
778 """Find all activatable distributions in `plugin_env`
779
780 Example usage::
781
782 distributions, errors = working_set.find_plugins(
783 Environment(plugin_dirlist)
784 )
785 # add plugins+libs to sys.path
786 map(working_set.add, distributions)
787 # display errors
788 print('Could not load', errors)
789
790 The `plugin_env` should be an ``Environment`` instance that contains
791 only distributions that are in the project's "plugin directory" or
792 directories. The `full_env`, if supplied, should be an ``Environment``
793 contains all currently-available distributions. If `full_env` is not
794 supplied, one is created automatically from the ``WorkingSet`` this
795 method is called on, which will typically mean that every directory on
796 ``sys.path`` will be scanned for distributions.
797
798 `installer` is a standard installer callback as used by the
799 ``resolve()`` method. The `fallback` flag indicates whether we should
800 attempt to resolve older versions of a plugin if the newest version
801 cannot be resolved.
802
803 This method returns a 2-tuple: (`distributions`, `error_info`), where
804 `distributions` is a list of the distributions found in `plugin_env`
805 that were loadable, along with any other distributions that are needed
806 to resolve their dependencies. `error_info` is a dictionary mapping
807 unloadable plugin distributions to an exception instance describing the
808 error that occurred. Usually this will be a ``DistributionNotFound`` or
809 ``VersionConflict`` instance.
810 """
811
812 plugin_projects = list(plugin_env)
813 # scan project names in alphabetic order
814 plugin_projects.sort()
815
816 error_info = {}
817 distributions = {}
818
819 if full_env is None:
820 env = Environment(self.entries)
821 env += plugin_env
822 else:
823 env = full_env + plugin_env
824
825 shadow_set = self.__class__([])
826 # put all our entries in shadow_set
827 list(map(shadow_set.add, self))
828
829 for project_name in plugin_projects:
830
831 for dist in plugin_env[project_name]:
832
833 req = [dist.as_requirement()]
834
835 try:
836 resolvees = shadow_set.resolve(req, env, installer)
837
838 except ResolutionError:
839 v = sys.exc_info()[1]
840 # save error info
841 error_info[dist] = v
842 if fallback:
843 # try the next older version of project
844 continue
845 else:
846 # give up on this project, keep going
847 break
848
849 else:
850 list(map(shadow_set.add, resolvees))
851 distributions.update(dict.fromkeys(resolvees))
852
853 # success, no need to try any more versions of this project
854 break
855
856 distributions = list(distributions)
857 distributions.sort()
858
859 return distributions, error_info
860
861 def require(self, *requirements):
862 """Ensure that distributions matching `requirements` are activated
863
864 `requirements` must be a string or a (possibly-nested) sequence
865 thereof, specifying the distributions and versions required. The
866 return value is a sequence of the distributions that needed to be
867 activated to fulfill the requirements; all relevant distributions are
868 included, even if they were already activated in this working set.
869 """
870 needed = self.resolve(parse_requirements(requirements))
871
872 for dist in needed:
873 self.add(dist)
874
875 return needed
876
877 def subscribe(self, callback):
878 """Invoke `callback` for all distributions (including existing ones)"""
879 if callback in self.callbacks:
880 return
881 self.callbacks.append(callback)
882 for dist in self:
883 callback(dist)
884
885 def _added_new(self, dist):
886 for callback in self.callbacks:
887 callback(dist)
888
889 def __getstate__(self):
890 return (
891 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
892 self.callbacks[:]
893 )
894
895 def __setstate__(self, e_k_b_c):
896 entries, keys, by_key, callbacks = e_k_b_c
897 self.entries = entries[:]
898 self.entry_keys = keys.copy()
899 self.by_key = by_key.copy()
900 self.callbacks = callbacks[:]
901
902
903 class Environment(object):
904 """Searchable snapshot of distributions on a search path"""
905
906 def __init__(self, search_path=None, platform=get_supported_platform(),
907 python=PY_MAJOR):
908 """Snapshot distributions available on a search path
909
910 Any distributions found on `search_path` are added to the environment.
911 `search_path` should be a sequence of ``sys.path`` items. If not
912 supplied, ``sys.path`` is used.
913
914 `platform` is an optional string specifying the name of the platform
915 that platform-specific distributions must be compatible with. If
916 unspecified, it defaults to the current platform. `python` is an
917 optional string naming the desired version of Python (e.g. ``'3.3'``);
918 it defaults to the current version.
919
920 You may explicitly set `platform` (and/or `python`) to ``None`` if you
921 wish to map *all* distributions, not just those compatible with the
922 running platform or Python version.
923 """
924 self._distmap = {}
925 self.platform = platform
926 self.python = python
927 self.scan(search_path)
928
929 def can_add(self, dist):
930 """Is distribution `dist` acceptable for this environment?
931
932 The distribution must match the platform and python version
933 requirements specified when this environment was created, or False
934 is returned.
935 """
936 return (self.python is None or dist.py_version is None
937 or dist.py_version==self.python) \
938 and compatible_platforms(dist.platform, self.platform)
939
940 def remove(self, dist):
941 """Remove `dist` from the environment"""
942 self._distmap[dist.key].remove(dist)
943
944 def scan(self, search_path=None):
945 """Scan `search_path` for distributions usable in this environment
946
947 Any distributions found are added to the environment.
948 `search_path` should be a sequence of ``sys.path`` items. If not
949 supplied, ``sys.path`` is used. Only distributions conforming to
950 the platform/python version defined at initialization are added.
951 """
952 if search_path is None:
953 search_path = sys.path
954
955 for item in search_path:
956 for dist in find_distributions(item):
957 self.add(dist)
958
959 def __getitem__(self, project_name):
960 """Return a newest-to-oldest list of distributions for `project_name`
961
962 Uses case-insensitive `project_name` comparison, assuming all the
963 project's distributions use their project's name converted to all
964 lowercase as their key.
965
966 """
967 distribution_key = project_name.lower()
968 return self._distmap.get(distribution_key, [])
969
970 def add(self, dist):
971 """Add `dist` if we ``can_add()`` it and it has not already been added
972 """
973 if self.can_add(dist) and dist.has_version():
974 dists = self._distmap.setdefault(dist.key, [])
975 if dist not in dists:
976 dists.append(dist)
977 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
978
979 def best_match(self, req, working_set, installer=None):
980 """Find distribution best matching `req` and usable on `working_set`
981
982 This calls the ``find(req)`` method of the `working_set` to see if a
983 suitable distribution is already active. (This may raise
984 ``VersionConflict`` if an unsuitable version of the project is already
985 active in the specified `working_set`.) If a suitable distribution
986 isn't active, this method returns the newest distribution in the
987 environment that meets the ``Requirement`` in `req`. If no suitable
988 distribution is found, and `installer` is supplied, then the result of
989 calling the environment's ``obtain(req, installer)`` method will be
990 returned.
991 """
992 dist = working_set.find(req)
993 if dist is not None:
994 return dist
995 for dist in self[req.key]:
996 if dist in req:
997 return dist
998 # try to download/install
999 return self.obtain(req, installer)
1000
1001 def obtain(self, requirement, installer=None):
1002 """Obtain a distribution matching `requirement` (e.g. via download)
1003
1004 Obtain a distro that matches requirement (e.g. via download). In the
1005 base ``Environment`` class, this routine just returns
1006 ``installer(requirement)``, unless `installer` is None, in which case
1007 None is returned instead. This method is a hook that allows subclasses
1008 to attempt other ways of obtaining a distribution before falling back
1009 to the `installer` argument."""
1010 if installer is not None:
1011 return installer(requirement)
1012
1013 def __iter__(self):
1014 """Yield the unique project names of the available distributions"""
1015 for key in self._distmap.keys():
1016 if self[key]:
1017 yield key
1018
1019 def __iadd__(self, other):
1020 """In-place addition of a distribution or environment"""
1021 if isinstance(other, Distribution):
1022 self.add(other)
1023 elif isinstance(other, Environment):
1024 for project in other:
1025 for dist in other[project]:
1026 self.add(dist)
1027 else:
1028 raise TypeError("Can't add %r to environment" % (other,))
1029 return self
1030
1031 def __add__(self, other):
1032 """Add an environment or distribution to an environment"""
1033 new = self.__class__([], platform=None, python=None)
1034 for env in self, other:
1035 new += env
1036 return new
1037
1038
1039 # XXX backward compatibility
1040 AvailableDistributions = Environment
1041
1042
1043 class ExtractionError(RuntimeError):
1044 """An error occurred extracting a resource
1045
1046 The following attributes are available from instances of this exception:
1047
1048 manager
1049 The resource manager that raised this exception
1050
1051 cache_path
1052 The base directory for resource extraction
1053
1054 original_error
1055 The exception instance that caused extraction to fail
1056 """
1057
1058
1059 class ResourceManager:
1060 """Manage resource extraction and packages"""
1061 extraction_path = None
1062
1063 def __init__(self):
1064 self.cached_files = {}
1065
1066 def resource_exists(self, package_or_requirement, resource_name):
1067 """Does the named resource exist?"""
1068 return get_provider(package_or_requirement).has_resource(resource_name)
1069
1070 def resource_isdir(self, package_or_requirement, resource_name):
1071 """Is the named resource an existing directory?"""
1072 return get_provider(package_or_requirement).resource_isdir(
1073 resource_name
1074 )
1075
1076 def resource_filename(self, package_or_requirement, resource_name):
1077 """Return a true filesystem path for specified resource"""
1078 return get_provider(package_or_requirement).get_resource_filename(
1079 self, resource_name
1080 )
1081
1082 def resource_stream(self, package_or_requirement, resource_name):
1083 """Return a readable file-like object for specified resource"""
1084 return get_provider(package_or_requirement).get_resource_stream(
1085 self, resource_name
1086 )
1087
1088 def resource_string(self, package_or_requirement, resource_name):
1089 """Return specified resource as a string"""
1090 return get_provider(package_or_requirement).get_resource_string(
1091 self, resource_name
1092 )
1093
1094 def resource_listdir(self, package_or_requirement, resource_name):
1095 """List the contents of the named resource directory"""
1096 return get_provider(package_or_requirement).resource_listdir(
1097 resource_name
1098 )
1099
1100 def extraction_error(self):
1101 """Give an error message for problems extracting file(s)"""
1102
1103 old_exc = sys.exc_info()[1]
1104 cache_path = self.extraction_path or get_default_cache()
1105
1106 err = ExtractionError("""Can't extract file(s) to egg cache
1107
1108 The following error occurred while trying to extract file(s) to the Python egg
1109 cache:
1110
1111 %s
1112
1113 The Python egg cache directory is currently set to:
1114
1115 %s
1116
1117 Perhaps your account does not have write access to this directory? You can
1118 change the cache directory by setting the PYTHON_EGG_CACHE environment
1119 variable to point to an accessible directory.
1120 """ % (old_exc, cache_path)
1121 )
1122 err.manager = self
1123 err.cache_path = cache_path
1124 err.original_error = old_exc
1125 raise err
1126
1127 def get_cache_path(self, archive_name, names=()):
1128 """Return absolute location in cache for `archive_name` and `names`
1129
1130 The parent directory of the resulting path will be created if it does
1131 not already exist. `archive_name` should be the base filename of the
1132 enclosing egg (which may not be the name of the enclosing zipfile!),
1133 including its ".egg" extension. `names`, if provided, should be a
1134 sequence of path name parts "under" the egg's extraction location.
1135
1136 This method should only be called by resource providers that need to
1137 obtain an extraction location, and only for names they intend to
1138 extract, as it tracks the generated names for possible cleanup later.
1139 """
1140 extract_path = self.extraction_path or get_default_cache()
1141 target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
1142 try:
1143 _bypass_ensure_directory(target_path)
1144 except:
1145 self.extraction_error()
1146
1147 self._warn_unsafe_extraction_path(extract_path)
1148
1149 self.cached_files[target_path] = 1
1150 return target_path
1151
1152 @staticmethod
1153 def _warn_unsafe_extraction_path(path):
1154 """
1155 If the default extraction path is overridden and set to an insecure
1156 location, such as /tmp, it opens up an opportunity for an attacker to
1157 replace an extracted file with an unauthorized payload. Warn the user
1158 if a known insecure location is used.
1159
1160 See Distribute #375 for more details.
1161 """
1162 if os.name == 'nt' and not path.startswith(os.environ['windir']):
1163 # On Windows, permissions are generally restrictive by default
1164 # and temp directories are not writable by other users, so
1165 # bypass the warning.
1166 return
1167 mode = os.stat(path).st_mode
1168 if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1169 msg = ("%s is writable by group/others and vulnerable to attack "
1170 "when "
1171 "used with get_resource_filename. Consider a more secure "
1172 "location (set with .set_extraction_path or the "
1173 "PYTHON_EGG_CACHE environment variable)." % path)
1174 warnings.warn(msg, UserWarning)
1175
1176 def postprocess(self, tempname, filename):
1177 """Perform any platform-specific postprocessing of `tempname`
1178
1179 This is where Mac header rewrites should be done; other platforms don't
1180 have anything special they should do.
1181
1182 Resource providers should call this method ONLY after successfully
1183 extracting a compressed resource. They must NOT call it on resources
1184 that are already in the filesystem.
1185
1186 `tempname` is the current (temporary) name of the file, and `filename`
1187 is the name it will be renamed to by the caller after this routine
1188 returns.
1189 """
1190
1191 if os.name == 'posix':
1192 # Make the resource executable
1193 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1194 os.chmod(tempname, mode)
1195
1196 def set_extraction_path(self, path):
1197 """Set the base path where resources will be extracted to, if needed.
1198
1199 If you do not call this routine before any extractions take place, the
1200 path defaults to the return value of ``get_default_cache()``. (Which
1201 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1202 platform-specific fallbacks. See that routine's documentation for more
1203 details.)
1204
1205 Resources are extracted to subdirectories of this path based upon
1206 information given by the ``IResourceProvider``. You may set this to a
1207 temporary directory, but then you must call ``cleanup_resources()`` to
1208 delete the extracted files when done. There is no guarantee that
1209 ``cleanup_resources()`` will be able to remove all extracted files.
1210
1211 (Note: you may not change the extraction path for a given resource
1212 manager once resources have been extracted, unless you first call
1213 ``cleanup_resources()``.)
1214 """
1215 if self.cached_files:
1216 raise ValueError(
1217 "Can't change extraction path, files already extracted"
1218 )
1219
1220 self.extraction_path = path
1221
1222 def cleanup_resources(self, force=False):
1223 """
1224 Delete all extracted resource files and directories, returning a list
1225 of the file and directory names that could not be successfully removed.
1226 This function does not have any concurrency protection, so it should
1227 generally only be called when the extraction path is a temporary
1228 directory exclusive to a single process. This method is not
1229 automatically called; you must call it explicitly or register it as an
1230 ``atexit`` function if you wish to ensure cleanup of a temporary
1231 directory used for extractions.
1232 """
1233 # XXX
1234
1235 def get_default_cache():
1236 """Determine the default cache location
1237
1238 This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
1239 Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
1240 "Application Data" directory. On all other systems, it's "~/.python-eggs".
1241 """
1242 try:
1243 return os.environ['PYTHON_EGG_CACHE']
1244 except KeyError:
1245 pass
1246
1247 if os.name!='nt':
1248 return os.path.expanduser('~/.python-eggs')
1249
1250 # XXX this may be locale-specific!
1251 app_data = 'Application Data'
1252 app_homes = [
1253 # best option, should be locale-safe
1254 (('APPDATA',), None),
1255 (('USERPROFILE',), app_data),
1256 (('HOMEDRIVE','HOMEPATH'), app_data),
1257 (('HOMEPATH',), app_data),
1258 (('HOME',), None),
1259 # 95/98/ME
1260 (('WINDIR',), app_data),
1261 ]
1262
1263 for keys, subdir in app_homes:
1264 dirname = ''
1265 for key in keys:
1266 if key in os.environ:
1267 dirname = os.path.join(dirname, os.environ[key])
1268 else:
1269 break
1270 else:
1271 if subdir:
1272 dirname = os.path.join(dirname, subdir)
1273 return os.path.join(dirname, 'Python-Eggs')
1274 else:
1275 raise RuntimeError(
1276 "Please set the PYTHON_EGG_CACHE enviroment variable"
1277 )
1278
1279 def safe_name(name):
1280 """Convert an arbitrary string to a standard distribution name
1281
1282 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1283 """
1284 return re.sub('[^A-Za-z0-9.]+', '-', name)
1285
1286
1287 def safe_version(version):
1288 """
1289 Convert an arbitrary string to a standard version string
1290 """
1291 try:
1292 # normalize the version
1293 return str(packaging.version.Version(version))
1294 except packaging.version.InvalidVersion:
1295 version = version.replace(' ','.')
1296 return re.sub('[^A-Za-z0-9.]+', '-', version)
1297
1298
1299 def safe_extra(extra):
1300 """Convert an arbitrary string to a standard 'extra' name
1301
1302 Any runs of non-alphanumeric characters are replaced with a single '_',
1303 and the result is always lowercased.
1304 """
1305 return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
1306
1307
1308 def to_filename(name):
1309 """Convert a project or version name to its filename-escaped form
1310
1311 Any '-' characters are currently replaced with '_'.
1312 """
1313 return name.replace('-','_')
1314
1315
1316 class MarkerEvaluation(object):
1317 values = {
1318 'os_name': lambda: os.name,
1319 'sys_platform': lambda: sys.platform,
1320 'python_full_version': platform.python_version,
1321 'python_version': lambda: platform.python_version()[:3],
1322 'platform_version': platform.version,
1323 'platform_machine': platform.machine,
1324 'python_implementation': platform.python_implementation,
1325 }
1326
1327 @classmethod
1328 def is_invalid_marker(cls, text):
1329 """
1330 Validate text as a PEP 426 environment marker; return an exception
1331 if invalid or False otherwise.
1332 """
1333 try:
1334 cls.evaluate_marker(text)
1335 except SyntaxError:
1336 return cls.normalize_exception(sys.exc_info()[1])
1337 return False
1338
1339 @staticmethod
1340 def normalize_exception(exc):
1341 """
1342 Given a SyntaxError from a marker evaluation, normalize the error
1343 message:
1344 - Remove indications of filename and line number.
1345 - Replace platform-specific error messages with standard error
1346 messages.
1347 """
1348 subs = {
1349 'unexpected EOF while parsing': 'invalid syntax',
1350 'parenthesis is never closed': 'invalid syntax',
1351 }
1352 exc.filename = None
1353 exc.lineno = None
1354 exc.msg = subs.get(exc.msg, exc.msg)
1355 return exc
1356
1357 @classmethod
1358 def and_test(cls, nodelist):
1359 # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
1360 items = [
1361 cls.interpret(nodelist[i])
1362 for i in range(1, len(nodelist), 2)
1363 ]
1364 return functools.reduce(operator.and_, items)
1365
1366 @classmethod
1367 def test(cls, nodelist):
1368 # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
1369 items = [
1370 cls.interpret(nodelist[i])
1371 for i in range(1, len(nodelist), 2)
1372 ]
1373 return functools.reduce(operator.or_, items)
1374
1375 @classmethod
1376 def atom(cls, nodelist):
1377 t = nodelist[1][0]
1378 if t == token.LPAR:
1379 if nodelist[2][0] == token.RPAR:
1380 raise SyntaxError("Empty parentheses")
1381 return cls.interpret(nodelist[2])
1382 msg = "Language feature not supported in environment markers"
1383 raise SyntaxError(msg)
1384
1385 @classmethod
1386 def comparison(cls, nodelist):
1387 if len(nodelist) > 4:
1388 msg = "Chained comparison not allowed in environment markers"
1389 raise SyntaxError(msg)
1390 comp = nodelist[2][1]
1391 cop = comp[1]
1392 if comp[0] == token.NAME:
1393 if len(nodelist[2]) == 3:
1394 if cop == 'not':
1395 cop = 'not in'
1396 else:
1397 cop = 'is not'
1398 try:
1399 cop = cls.get_op(cop)
1400 except KeyError:
1401 msg = repr(cop) + " operator not allowed in environment markers"
1402 raise SyntaxError(msg)
1403 return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3]))
1404
1405 @classmethod
1406 def get_op(cls, op):
1407 ops = {
1408 symbol.test: cls.test,
1409 symbol.and_test: cls.and_test,
1410 symbol.atom: cls.atom,
1411 symbol.comparison: cls.comparison,
1412 'not in': lambda x, y: x not in y,
1413 'in': lambda x, y: x in y,
1414 '==': operator.eq,
1415 '!=': operator.ne,
1416 }
1417 if hasattr(symbol, 'or_test'):
1418 ops[symbol.or_test] = cls.test
1419 return ops[op]
1420
1421 @classmethod
1422 def evaluate_marker(cls, text, extra=None):
1423 """
1424 Evaluate a PEP 426 environment marker on CPython 2.4+.
1425 Return a boolean indicating the marker result in this environment.
1426 Raise SyntaxError if marker is invalid.
1427
1428 This implementation uses the 'parser' module, which is not implemented
1429 on
1430 Jython and has been superseded by the 'ast' module in Python 2.6 and
1431 later.
1432 """
1433 return cls.interpret(parser.expr(text).totuple(1)[1])
1434
1435 @classmethod
1436 def _markerlib_evaluate(cls, text):
1437 """
1438 Evaluate a PEP 426 environment marker using markerlib.
1439 Return a boolean indicating the marker result in this environment.
1440 Raise SyntaxError if marker is invalid.
1441 """
1442 import _markerlib
1443 # markerlib implements Metadata 1.2 (PEP 345) environment markers.
1444 # Translate the variables to Metadata 2.0 (PEP 426).
1445 env = _markerlib.default_environment()
1446 for key in env.keys():
1447 new_key = key.replace('.', '_')
1448 env[new_key] = env.pop(key)
1449 try:
1450 result = _markerlib.interpret(text, env)
1451 except NameError:
1452 e = sys.exc_info()[1]
1453 raise SyntaxError(e.args[0])
1454 return result
1455
1456 if 'parser' not in globals():
1457 # Fall back to less-complete _markerlib implementation if 'parser' modul e
1458 # is not available.
1459 evaluate_marker = _markerlib_evaluate
1460
1461 @classmethod
1462 def interpret(cls, nodelist):
1463 while len(nodelist)==2: nodelist = nodelist[1]
1464 try:
1465 op = cls.get_op(nodelist[0])
1466 except KeyError:
1467 raise SyntaxError("Comparison or logical expression expected")
1468 return op(nodelist)
1469
1470 @classmethod
1471 def evaluate(cls, nodelist):
1472 while len(nodelist)==2: nodelist = nodelist[1]
1473 kind = nodelist[0]
1474 name = nodelist[1]
1475 if kind==token.NAME:
1476 try:
1477 op = cls.values[name]
1478 except KeyError:
1479 raise SyntaxError("Unknown name %r" % name)
1480 return op()
1481 if kind==token.STRING:
1482 s = nodelist[1]
1483 if not cls._safe_string(s):
1484 raise SyntaxError(
1485 "Only plain strings allowed in environment markers")
1486 return s[1:-1]
1487 msg = "Language feature not supported in environment markers"
1488 raise SyntaxError(msg)
1489
1490 @staticmethod
1491 def _safe_string(cand):
1492 return (
1493 cand[:1] in "'\"" and
1494 not cand.startswith('"""') and
1495 not cand.startswith("'''") and
1496 '\\' not in cand
1497 )
1498
1499 invalid_marker = MarkerEvaluation.is_invalid_marker
1500 evaluate_marker = MarkerEvaluation.evaluate_marker
1501
1502 class NullProvider:
1503 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1504
1505 egg_name = None
1506 egg_info = None
1507 loader = None
1508
1509 def __init__(self, module):
1510 self.loader = getattr(module, '__loader__', None)
1511 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1512
1513 def get_resource_filename(self, manager, resource_name):
1514 return self._fn(self.module_path, resource_name)
1515
1516 def get_resource_stream(self, manager, resource_name):
1517 return io.BytesIO(self.get_resource_string(manager, resource_name))
1518
1519 def get_resource_string(self, manager, resource_name):
1520 return self._get(self._fn(self.module_path, resource_name))
1521
1522 def has_resource(self, resource_name):
1523 return self._has(self._fn(self.module_path, resource_name))
1524
1525 def has_metadata(self, name):
1526 return self.egg_info and self._has(self._fn(self.egg_info, name))
1527
1528 if sys.version_info <= (3,):
1529 def get_metadata(self, name):
1530 if not self.egg_info:
1531 return ""
1532 return self._get(self._fn(self.egg_info, name))
1533 else:
1534 def get_metadata(self, name):
1535 if not self.egg_info:
1536 return ""
1537 return self._get(self._fn(self.egg_info, name)).decode("utf-8")
1538
1539 def get_metadata_lines(self, name):
1540 return yield_lines(self.get_metadata(name))
1541
1542 def resource_isdir(self, resource_name):
1543 return self._isdir(self._fn(self.module_path, resource_name))
1544
1545 def metadata_isdir(self, name):
1546 return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1547
1548 def resource_listdir(self, resource_name):
1549 return self._listdir(self._fn(self.module_path, resource_name))
1550
1551 def metadata_listdir(self, name):
1552 if self.egg_info:
1553 return self._listdir(self._fn(self.egg_info, name))
1554 return []
1555
1556 def run_script(self, script_name, namespace):
1557 script = 'scripts/'+script_name
1558 if not self.has_metadata(script):
1559 raise ResolutionError("No script named %r" % script_name)
1560 script_text = self.get_metadata(script).replace('\r\n', '\n')
1561 script_text = script_text.replace('\r', '\n')
1562 script_filename = self._fn(self.egg_info, script)
1563 namespace['__file__'] = script_filename
1564 if os.path.exists(script_filename):
1565 source = open(script_filename).read()
1566 code = compile(source, script_filename, 'exec')
1567 exec(code, namespace, namespace)
1568 else:
1569 from linecache import cache
1570 cache[script_filename] = (
1571 len(script_text), 0, script_text.split('\n'), script_filename
1572 )
1573 script_code = compile(script_text, script_filename,'exec')
1574 exec(script_code, namespace, namespace)
1575
1576 def _has(self, path):
1577 raise NotImplementedError(
1578 "Can't perform this operation for unregistered loader type"
1579 )
1580
1581 def _isdir(self, path):
1582 raise NotImplementedError(
1583 "Can't perform this operation for unregistered loader type"
1584 )
1585
1586 def _listdir(self, path):
1587 raise NotImplementedError(
1588 "Can't perform this operation for unregistered loader type"
1589 )
1590
1591 def _fn(self, base, resource_name):
1592 if resource_name:
1593 return os.path.join(base, *resource_name.split('/'))
1594 return base
1595
1596 def _get(self, path):
1597 if hasattr(self.loader, 'get_data'):
1598 return self.loader.get_data(path)
1599 raise NotImplementedError(
1600 "Can't perform this operation for loaders without 'get_data()'"
1601 )
1602
1603 register_loader_type(object, NullProvider)
1604
1605
1606 class EggProvider(NullProvider):
1607 """Provider based on a virtual filesystem"""
1608
1609 def __init__(self, module):
1610 NullProvider.__init__(self, module)
1611 self._setup_prefix()
1612
1613 def _setup_prefix(self):
1614 # we assume here that our metadata may be nested inside a "basket"
1615 # of multiple eggs; that's why we use module_path instead of .archive
1616 path = self.module_path
1617 old = None
1618 while path!=old:
1619 if path.lower().endswith('.egg'):
1620 self.egg_name = os.path.basename(path)
1621 self.egg_info = os.path.join(path, 'EGG-INFO')
1622 self.egg_root = path
1623 break
1624 old = path
1625 path, base = os.path.split(path)
1626
1627 class DefaultProvider(EggProvider):
1628 """Provides access to package resources in the filesystem"""
1629
1630 def _has(self, path):
1631 return os.path.exists(path)
1632
1633 def _isdir(self, path):
1634 return os.path.isdir(path)
1635
1636 def _listdir(self, path):
1637 return os.listdir(path)
1638
1639 def get_resource_stream(self, manager, resource_name):
1640 return open(self._fn(self.module_path, resource_name), 'rb')
1641
1642 def _get(self, path):
1643 with open(path, 'rb') as stream:
1644 return stream.read()
1645
1646 register_loader_type(type(None), DefaultProvider)
1647
1648 if importlib_bootstrap is not None:
1649 register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider)
1650
1651
1652 class EmptyProvider(NullProvider):
1653 """Provider that returns nothing for all requests"""
1654
1655 _isdir = _has = lambda self, path: False
1656 _get = lambda self, path: ''
1657 _listdir = lambda self, path: []
1658 module_path = None
1659
1660 def __init__(self):
1661 pass
1662
1663 empty_provider = EmptyProvider()
1664
1665
1666 class ZipManifests(dict):
1667 """
1668 zip manifest builder
1669 """
1670
1671 @classmethod
1672 def build(cls, path):
1673 """
1674 Build a dictionary similar to the zipimport directory
1675 caches, except instead of tuples, store ZipInfo objects.
1676
1677 Use a platform-specific path separator (os.sep) for the path keys
1678 for compatibility with pypy on Windows.
1679 """
1680 with ContextualZipFile(path) as zfile:
1681 items = (
1682 (
1683 name.replace('/', os.sep),
1684 zfile.getinfo(name),
1685 )
1686 for name in zfile.namelist()
1687 )
1688 return dict(items)
1689
1690 load = build
1691
1692
1693 class MemoizedZipManifests(ZipManifests):
1694 """
1695 Memoized zipfile manifests.
1696 """
1697 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1698
1699 def load(self, path):
1700 """
1701 Load a manifest at path or return a suitable manifest already loaded.
1702 """
1703 path = os.path.normpath(path)
1704 mtime = os.stat(path).st_mtime
1705
1706 if path not in self or self[path].mtime != mtime:
1707 manifest = self.build(path)
1708 self[path] = self.manifest_mod(manifest, mtime)
1709
1710 return self[path].manifest
1711
1712
1713 class ContextualZipFile(zipfile.ZipFile):
1714 """
1715 Supplement ZipFile class to support context manager for Python 2.6
1716 """
1717
1718 def __enter__(self):
1719 return self
1720
1721 def __exit__(self, type, value, traceback):
1722 self.close()
1723
1724 def __new__(cls, *args, **kwargs):
1725 """
1726 Construct a ZipFile or ContextualZipFile as appropriate
1727 """
1728 if hasattr(zipfile.ZipFile, '__exit__'):
1729 return zipfile.ZipFile(*args, **kwargs)
1730 return super(ContextualZipFile, cls).__new__(cls)
1731
1732
1733 class ZipProvider(EggProvider):
1734 """Resource support for zips and eggs"""
1735
1736 eagers = None
1737 _zip_manifests = MemoizedZipManifests()
1738
1739 def __init__(self, module):
1740 EggProvider.__init__(self, module)
1741 self.zip_pre = self.loader.archive+os.sep
1742
1743 def _zipinfo_name(self, fspath):
1744 # Convert a virtual filename (full path to file) into a zipfile subpath
1745 # usable with the zipimport directory cache for our target archive
1746 if fspath.startswith(self.zip_pre):
1747 return fspath[len(self.zip_pre):]
1748 raise AssertionError(
1749 "%s is not a subpath of %s" % (fspath, self.zip_pre)
1750 )
1751
1752 def _parts(self, zip_path):
1753 # Convert a zipfile subpath into an egg-relative path part list.
1754 # pseudo-fs path
1755 fspath = self.zip_pre+zip_path
1756 if fspath.startswith(self.egg_root+os.sep):
1757 return fspath[len(self.egg_root)+1:].split(os.sep)
1758 raise AssertionError(
1759 "%s is not a subpath of %s" % (fspath, self.egg_root)
1760 )
1761
1762 @property
1763 def zipinfo(self):
1764 return self._zip_manifests.load(self.loader.archive)
1765
1766 def get_resource_filename(self, manager, resource_name):
1767 if not self.egg_name:
1768 raise NotImplementedError(
1769 "resource_filename() only supported for .egg, not .zip"
1770 )
1771 # no need to lock for extraction, since we use temp names
1772 zip_path = self._resource_to_zip(resource_name)
1773 eagers = self._get_eager_resources()
1774 if '/'.join(self._parts(zip_path)) in eagers:
1775 for name in eagers:
1776 self._extract_resource(manager, self._eager_to_zip(name))
1777 return self._extract_resource(manager, zip_path)
1778
1779 @staticmethod
1780 def _get_date_and_size(zip_stat):
1781 size = zip_stat.file_size
1782 # ymdhms+wday, yday, dst
1783 date_time = zip_stat.date_time + (0, 0, -1)
1784 # 1980 offset already done
1785 timestamp = time.mktime(date_time)
1786 return timestamp, size
1787
1788 def _extract_resource(self, manager, zip_path):
1789
1790 if zip_path in self._index():
1791 for name in self._index()[zip_path]:
1792 last = self._extract_resource(
1793 manager, os.path.join(zip_path, name)
1794 )
1795 # return the extracted directory name
1796 return os.path.dirname(last)
1797
1798 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1799
1800 if not WRITE_SUPPORT:
1801 raise IOError('"os.rename" and "os.unlink" are not supported '
1802 'on this platform')
1803 try:
1804
1805 real_path = manager.get_cache_path(
1806 self.egg_name, self._parts(zip_path)
1807 )
1808
1809 if self._is_current(real_path, zip_path):
1810 return real_path
1811
1812 outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1813 os.write(outf, self.loader.get_data(zip_path))
1814 os.close(outf)
1815 utime(tmpnam, (timestamp, timestamp))
1816 manager.postprocess(tmpnam, real_path)
1817
1818 try:
1819 rename(tmpnam, real_path)
1820
1821 except os.error:
1822 if os.path.isfile(real_path):
1823 if self._is_current(real_path, zip_path):
1824 # the file became current since it was checked above,
1825 # so proceed.
1826 return real_path
1827 # Windows, del old file and retry
1828 elif os.name=='nt':
1829 unlink(real_path)
1830 rename(tmpnam, real_path)
1831 return real_path
1832 raise
1833
1834 except os.error:
1835 # report a user-friendly error
1836 manager.extraction_error()
1837
1838 return real_path
1839
1840 def _is_current(self, file_path, zip_path):
1841 """
1842 Return True if the file_path is current for this zip_path
1843 """
1844 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1845 if not os.path.isfile(file_path):
1846 return False
1847 stat = os.stat(file_path)
1848 if stat.st_size!=size or stat.st_mtime!=timestamp:
1849 return False
1850 # check that the contents match
1851 zip_contents = self.loader.get_data(zip_path)
1852 with open(file_path, 'rb') as f:
1853 file_contents = f.read()
1854 return zip_contents == file_contents
1855
1856 def _get_eager_resources(self):
1857 if self.eagers is None:
1858 eagers = []
1859 for name in ('native_libs.txt', 'eager_resources.txt'):
1860 if self.has_metadata(name):
1861 eagers.extend(self.get_metadata_lines(name))
1862 self.eagers = eagers
1863 return self.eagers
1864
1865 def _index(self):
1866 try:
1867 return self._dirindex
1868 except AttributeError:
1869 ind = {}
1870 for path in self.zipinfo:
1871 parts = path.split(os.sep)
1872 while parts:
1873 parent = os.sep.join(parts[:-1])
1874 if parent in ind:
1875 ind[parent].append(parts[-1])
1876 break
1877 else:
1878 ind[parent] = [parts.pop()]
1879 self._dirindex = ind
1880 return ind
1881
1882 def _has(self, fspath):
1883 zip_path = self._zipinfo_name(fspath)
1884 return zip_path in self.zipinfo or zip_path in self._index()
1885
1886 def _isdir(self, fspath):
1887 return self._zipinfo_name(fspath) in self._index()
1888
1889 def _listdir(self, fspath):
1890 return list(self._index().get(self._zipinfo_name(fspath), ()))
1891
1892 def _eager_to_zip(self, resource_name):
1893 return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1894
1895 def _resource_to_zip(self, resource_name):
1896 return self._zipinfo_name(self._fn(self.module_path, resource_name))
1897
1898 register_loader_type(zipimport.zipimporter, ZipProvider)
1899
1900
1901 class FileMetadata(EmptyProvider):
1902 """Metadata handler for standalone PKG-INFO files
1903
1904 Usage::
1905
1906 metadata = FileMetadata("/path/to/PKG-INFO")
1907
1908 This provider rejects all data and metadata requests except for PKG-INFO,
1909 which is treated as existing, and will be the contents of the file at
1910 the provided location.
1911 """
1912
1913 def __init__(self, path):
1914 self.path = path
1915
1916 def has_metadata(self, name):
1917 return name=='PKG-INFO'
1918
1919 def get_metadata(self, name):
1920 if name=='PKG-INFO':
1921 with open(self.path,'rU') as f:
1922 metadata = f.read()
1923 return metadata
1924 raise KeyError("No metadata except PKG-INFO is available")
1925
1926 def get_metadata_lines(self, name):
1927 return yield_lines(self.get_metadata(name))
1928
1929
1930 class PathMetadata(DefaultProvider):
1931 """Metadata provider for egg directories
1932
1933 Usage::
1934
1935 # Development eggs:
1936
1937 egg_info = "/path/to/PackageName.egg-info"
1938 base_dir = os.path.dirname(egg_info)
1939 metadata = PathMetadata(base_dir, egg_info)
1940 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1941 dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1942
1943 # Unpacked egg directories:
1944
1945 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1946 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1947 dist = Distribution.from_filename(egg_path, metadata=metadata)
1948 """
1949
1950 def __init__(self, path, egg_info):
1951 self.module_path = path
1952 self.egg_info = egg_info
1953
1954
1955 class EggMetadata(ZipProvider):
1956 """Metadata provider for .egg files"""
1957
1958 def __init__(self, importer):
1959 """Create a metadata provider from a zipimporter"""
1960
1961 self.zip_pre = importer.archive+os.sep
1962 self.loader = importer
1963 if importer.prefix:
1964 self.module_path = os.path.join(importer.archive, importer.prefix)
1965 else:
1966 self.module_path = importer.archive
1967 self._setup_prefix()
1968
1969 _declare_state('dict', _distribution_finders = {})
1970
1971 def register_finder(importer_type, distribution_finder):
1972 """Register `distribution_finder` to find distributions in sys.path items
1973
1974 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1975 handler), and `distribution_finder` is a callable that, passed a path
1976 item and the importer instance, yields ``Distribution`` instances found on
1977 that path item. See ``pkg_resources.find_on_path`` for an example."""
1978 _distribution_finders[importer_type] = distribution_finder
1979
1980
1981 def find_distributions(path_item, only=False):
1982 """Yield distributions accessible via `path_item`"""
1983 importer = get_importer(path_item)
1984 finder = _find_adapter(_distribution_finders, importer)
1985 return finder(importer, path_item, only)
1986
1987 def find_eggs_in_zip(importer, path_item, only=False):
1988 """
1989 Find eggs in zip files; possibly multiple nested eggs.
1990 """
1991 if importer.archive.endswith('.whl'):
1992 # wheels are not supported with this finder
1993 # they don't have PKG-INFO metadata, and won't ever contain eggs
1994 return
1995 metadata = EggMetadata(importer)
1996 if metadata.has_metadata('PKG-INFO'):
1997 yield Distribution.from_filename(path_item, metadata=metadata)
1998 if only:
1999 # don't yield nested distros
2000 return
2001 for subitem in metadata.resource_listdir('/'):
2002 if subitem.endswith('.egg'):
2003 subpath = os.path.join(path_item, subitem)
2004 for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath ):
2005 yield dist
2006
2007 register_finder(zipimport.zipimporter, find_eggs_in_zip)
2008
2009 def find_nothing(importer, path_item, only=False):
2010 return ()
2011 register_finder(object, find_nothing)
2012
2013 def find_on_path(importer, path_item, only=False):
2014 """Yield distributions accessible on a sys.path directory"""
2015 path_item = _normalize_cached(path_item)
2016
2017 if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
2018 if path_item.lower().endswith('.egg'):
2019 # unpacked egg
2020 yield Distribution.from_filename(
2021 path_item, metadata=PathMetadata(
2022 path_item, os.path.join(path_item,'EGG-INFO')
2023 )
2024 )
2025 else:
2026 # scan for .egg and .egg-info in directory
2027 for entry in os.listdir(path_item):
2028 lower = entry.lower()
2029 if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
2030 fullpath = os.path.join(path_item, entry)
2031 if os.path.isdir(fullpath):
2032 # egg-info directory, allow getting metadata
2033 metadata = PathMetadata(path_item, fullpath)
2034 else:
2035 metadata = FileMetadata(fullpath)
2036 yield Distribution.from_location(
2037 path_item, entry, metadata, precedence=DEVELOP_DIST
2038 )
2039 elif not only and lower.endswith('.egg'):
2040 dists = find_distributions(os.path.join(path_item, entry))
2041 for dist in dists:
2042 yield dist
2043 elif not only and lower.endswith('.egg-link'):
2044 with open(os.path.join(path_item, entry)) as entry_file:
2045 entry_lines = entry_file.readlines()
2046 for line in entry_lines:
2047 if not line.strip():
2048 continue
2049 path = os.path.join(path_item, line.rstrip())
2050 dists = find_distributions(path)
2051 for item in dists:
2052 yield item
2053 break
2054 register_finder(pkgutil.ImpImporter, find_on_path)
2055
2056 if importlib_bootstrap is not None:
2057 register_finder(importlib_bootstrap.FileFinder, find_on_path)
2058
2059 _declare_state('dict', _namespace_handlers={})
2060 _declare_state('dict', _namespace_packages={})
2061
2062
2063 def register_namespace_handler(importer_type, namespace_handler):
2064 """Register `namespace_handler` to declare namespace packages
2065
2066 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2067 handler), and `namespace_handler` is a callable like this::
2068
2069 def namespace_handler(importer, path_entry, moduleName, module):
2070 # return a path_entry to use for child packages
2071
2072 Namespace handlers are only called if the importer object has already
2073 agreed that it can handle the relevant path item, and they should only
2074 return a subpath if the module __path__ does not already contain an
2075 equivalent subpath. For an example namespace handler, see
2076 ``pkg_resources.file_ns_handler``.
2077 """
2078 _namespace_handlers[importer_type] = namespace_handler
2079
2080 def _handle_ns(packageName, path_item):
2081 """Ensure that named package includes a subpath of path_item (if needed)"""
2082
2083 importer = get_importer(path_item)
2084 if importer is None:
2085 return None
2086 loader = importer.find_module(packageName)
2087 if loader is None:
2088 return None
2089 module = sys.modules.get(packageName)
2090 if module is None:
2091 module = sys.modules[packageName] = imp.new_module(packageName)
2092 module.__path__ = []
2093 _set_parent_ns(packageName)
2094 elif not hasattr(module,'__path__'):
2095 raise TypeError("Not a package:", packageName)
2096 handler = _find_adapter(_namespace_handlers, importer)
2097 subpath = handler(importer, path_item, packageName, module)
2098 if subpath is not None:
2099 path = module.__path__
2100 path.append(subpath)
2101 loader.load_module(packageName)
2102 for path_item in path:
2103 if path_item not in module.__path__:
2104 module.__path__.append(path_item)
2105 return subpath
2106
2107 def declare_namespace(packageName):
2108 """Declare that package 'packageName' is a namespace package"""
2109
2110 imp.acquire_lock()
2111 try:
2112 if packageName in _namespace_packages:
2113 return
2114
2115 path, parent = sys.path, None
2116 if '.' in packageName:
2117 parent = '.'.join(packageName.split('.')[:-1])
2118 declare_namespace(parent)
2119 if parent not in _namespace_packages:
2120 __import__(parent)
2121 try:
2122 path = sys.modules[parent].__path__
2123 except AttributeError:
2124 raise TypeError("Not a package:", parent)
2125
2126 # Track what packages are namespaces, so when new path items are added,
2127 # they can be updated
2128 _namespace_packages.setdefault(parent,[]).append(packageName)
2129 _namespace_packages.setdefault(packageName,[])
2130
2131 for path_item in path:
2132 # Ensure all the parent's path items are reflected in the child,
2133 # if they apply
2134 _handle_ns(packageName, path_item)
2135
2136 finally:
2137 imp.release_lock()
2138
2139 def fixup_namespace_packages(path_item, parent=None):
2140 """Ensure that previously-declared namespace packages include path_item"""
2141 imp.acquire_lock()
2142 try:
2143 for package in _namespace_packages.get(parent,()):
2144 subpath = _handle_ns(package, path_item)
2145 if subpath:
2146 fixup_namespace_packages(subpath, package)
2147 finally:
2148 imp.release_lock()
2149
2150 def file_ns_handler(importer, path_item, packageName, module):
2151 """Compute an ns-package subpath for a filesystem or zipfile importer"""
2152
2153 subpath = os.path.join(path_item, packageName.split('.')[-1])
2154 normalized = _normalize_cached(subpath)
2155 for item in module.__path__:
2156 if _normalize_cached(item)==normalized:
2157 break
2158 else:
2159 # Only return the path if it's not already there
2160 return subpath
2161
2162 register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2163 register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2164
2165 if importlib_bootstrap is not None:
2166 register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler)
2167
2168
2169 def null_ns_handler(importer, path_item, packageName, module):
2170 return None
2171
2172 register_namespace_handler(object, null_ns_handler)
2173
2174
2175 def normalize_path(filename):
2176 """Normalize a file/dir name for comparison purposes"""
2177 return os.path.normcase(os.path.realpath(filename))
2178
2179 def _normalize_cached(filename, _cache={}):
2180 try:
2181 return _cache[filename]
2182 except KeyError:
2183 _cache[filename] = result = normalize_path(filename)
2184 return result
2185
2186 def _set_parent_ns(packageName):
2187 parts = packageName.split('.')
2188 name = parts.pop()
2189 if parts:
2190 parent = '.'.join(parts)
2191 setattr(sys.modules[parent], name, sys.modules[packageName])
2192
2193
2194 def yield_lines(strs):
2195 """Yield non-empty/non-comment lines of a string or sequence"""
2196 if isinstance(strs, string_types):
2197 for s in strs.splitlines():
2198 s = s.strip()
2199 # skip blank lines/comments
2200 if s and not s.startswith('#'):
2201 yield s
2202 else:
2203 for ss in strs:
2204 for s in yield_lines(ss):
2205 yield s
2206
2207 # whitespace and comment
2208 LINE_END = re.compile(r"\s*(#.*)?$").match
2209 # line continuation
2210 CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match
2211 # Distribution or extra
2212 DISTRO = re.compile(r"\s*((\w|[-.])+)").match
2213 # ver. info
2214 VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match
2215 # comma between items
2216 COMMA = re.compile(r"\s*,").match
2217 OBRACKET = re.compile(r"\s*\[").match
2218 CBRACKET = re.compile(r"\s*\]").match
2219 MODULE = re.compile(r"\w+(\.\w+)*$").match
2220 EGG_NAME = re.compile(
2221 r"(?P<name>[^-]+)"
2222 r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
2223 re.VERBOSE | re.IGNORECASE
2224 ).match
2225
2226
2227 class EntryPoint(object):
2228 """Object representing an advertised importable object"""
2229
2230 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2231 if not MODULE(module_name):
2232 raise ValueError("Invalid module name", module_name)
2233 self.name = name
2234 self.module_name = module_name
2235 self.attrs = tuple(attrs)
2236 self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
2237 self.dist = dist
2238
2239 def __str__(self):
2240 s = "%s = %s" % (self.name, self.module_name)
2241 if self.attrs:
2242 s += ':' + '.'.join(self.attrs)
2243 if self.extras:
2244 s += ' [%s]' % ','.join(self.extras)
2245 return s
2246
2247 def __repr__(self):
2248 return "EntryPoint.parse(%r)" % str(self)
2249
2250 def load(self, require=True, env=None, installer=None):
2251 if require:
2252 self.require(env, installer)
2253 entry = __import__(self.module_name, globals(), globals(),
2254 ['__name__'])
2255 for attr in self.attrs:
2256 try:
2257 entry = getattr(entry, attr)
2258 except AttributeError:
2259 raise ImportError("%r has no %r attribute" % (entry, attr))
2260 return entry
2261
2262 def require(self, env=None, installer=None):
2263 if self.extras and not self.dist:
2264 raise UnknownExtra("Can't require() without a distribution", self)
2265 reqs = self.dist.requires(self.extras)
2266 items = working_set.resolve(reqs, env, installer)
2267 list(map(working_set.add, items))
2268
2269 @classmethod
2270 def parse(cls, src, dist=None):
2271 """Parse a single entry point from string `src`
2272
2273 Entry point syntax follows the form::
2274
2275 name = some.module:some.attr [extra1, extra2]
2276
2277 The entry name and module name are required, but the ``:attrs`` and
2278 ``[extras]`` parts are optional
2279 """
2280 try:
2281 attrs = extras = ()
2282 name, value = src.split('=', 1)
2283 if '[' in value:
2284 value, extras = value.split('[', 1)
2285 req = Requirement.parse("x[" + extras)
2286 if req.specs:
2287 raise ValueError
2288 extras = req.extras
2289 if ':' in value:
2290 value, attrs = value.split(':', 1)
2291 if not MODULE(attrs.rstrip()):
2292 raise ValueError
2293 attrs = attrs.rstrip().split('.')
2294 except ValueError:
2295 msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2296 raise ValueError(msg, src)
2297 else:
2298 return cls(name.strip(), value.strip(), attrs, extras, dist)
2299
2300 @classmethod
2301 def parse_group(cls, group, lines, dist=None):
2302 """Parse an entry point group"""
2303 if not MODULE(group):
2304 raise ValueError("Invalid group name", group)
2305 this = {}
2306 for line in yield_lines(lines):
2307 ep = cls.parse(line, dist)
2308 if ep.name in this:
2309 raise ValueError("Duplicate entry point", group, ep.name)
2310 this[ep.name]=ep
2311 return this
2312
2313 @classmethod
2314 def parse_map(cls, data, dist=None):
2315 """Parse a map of entry point groups"""
2316 if isinstance(data, dict):
2317 data = data.items()
2318 else:
2319 data = split_sections(data)
2320 maps = {}
2321 for group, lines in data:
2322 if group is None:
2323 if not lines:
2324 continue
2325 raise ValueError("Entry points must be listed in groups")
2326 group = group.strip()
2327 if group in maps:
2328 raise ValueError("Duplicate group name", group)
2329 maps[group] = cls.parse_group(group, lines, dist)
2330 return maps
2331
2332
2333 def _remove_md5_fragment(location):
2334 if not location:
2335 return ''
2336 parsed = urlparse(location)
2337 if parsed[-1].startswith('md5='):
2338 return urlunparse(parsed[:-1] + ('',))
2339 return location
2340
2341
2342 class Distribution(object):
2343 """Wrap an actual or potential sys.path entry w/metadata"""
2344 PKG_INFO = 'PKG-INFO'
2345
2346 def __init__(self, location=None, metadata=None, project_name=None,
2347 version=None, py_version=PY_MAJOR, platform=None,
2348 precedence=EGG_DIST):
2349 self.project_name = safe_name(project_name or 'Unknown')
2350 if version is not None:
2351 self._version = safe_version(version)
2352 self.py_version = py_version
2353 self.platform = platform
2354 self.location = location
2355 self.precedence = precedence
2356 self._provider = metadata or empty_provider
2357
2358 @classmethod
2359 def from_location(cls, location, basename, metadata=None,**kw):
2360 project_name, version, py_version, platform = [None]*4
2361 basename, ext = os.path.splitext(basename)
2362 if ext.lower() in _distributionImpl:
2363 # .dist-info gets much metadata differently
2364 match = EGG_NAME(basename)
2365 if match:
2366 project_name, version, py_version, platform = match.group(
2367 'name','ver','pyver','plat'
2368 )
2369 cls = _distributionImpl[ext.lower()]
2370 return cls(
2371 location, metadata, project_name=project_name, version=version,
2372 py_version=py_version, platform=platform, **kw
2373 )
2374
2375 @property
2376 def hashcmp(self):
2377 return (
2378 self.parsed_version,
2379 self.precedence,
2380 self.key,
2381 _remove_md5_fragment(self.location),
2382 self.py_version,
2383 self.platform,
2384 )
2385
2386 def __hash__(self):
2387 return hash(self.hashcmp)
2388
2389 def __lt__(self, other):
2390 return self.hashcmp < other.hashcmp
2391
2392 def __le__(self, other):
2393 return self.hashcmp <= other.hashcmp
2394
2395 def __gt__(self, other):
2396 return self.hashcmp > other.hashcmp
2397
2398 def __ge__(self, other):
2399 return self.hashcmp >= other.hashcmp
2400
2401 def __eq__(self, other):
2402 if not isinstance(other, self.__class__):
2403 # It's not a Distribution, so they are not equal
2404 return False
2405 return self.hashcmp == other.hashcmp
2406
2407 def __ne__(self, other):
2408 return not self == other
2409
2410 # These properties have to be lazy so that we don't have to load any
2411 # metadata until/unless it's actually needed. (i.e., some distributions
2412 # may not know their name or version without loading PKG-INFO)
2413
2414 @property
2415 def key(self):
2416 try:
2417 return self._key
2418 except AttributeError:
2419 self._key = key = self.project_name.lower()
2420 return key
2421
2422 @property
2423 def parsed_version(self):
2424 if not hasattr(self, "_parsed_version"):
2425 self._parsed_version = parse_version(self.version)
2426 if isinstance(
2427 self._parsed_version, packaging.version.LegacyVersion):
2428 # While an empty version is techincally a legacy version and
2429 # is not a valid PEP 440 version, it's also unlikely to
2430 # actually come from someone and instead it is more likely that
2431 # it comes from setuptools attempting to parse a filename and
2432 # including it in the list. So for that we'll gate this warning
2433 # on if the version is anything at all or not.
2434 if self.version:
2435 warnings.warn(
2436 "'%s (%s)' is being parsed as a legacy, non PEP 440, "
2437 "version. You may find odd behavior and sort order. "
2438 "In particular it will be sorted as less than 0.0. It "
2439 "is recommend to migrate to PEP 440 compatible "
2440 "versions." % (
2441 self.project_name, self.version,
2442 ),
2443 PEP440Warning,
2444 )
2445
2446 return self._parsed_version
2447
2448 @property
2449 def version(self):
2450 try:
2451 return self._version
2452 except AttributeError:
2453 for line in self._get_metadata(self.PKG_INFO):
2454 if line.lower().startswith('version:'):
2455 self._version = safe_version(line.split(':',1)[1].strip())
2456 return self._version
2457 else:
2458 tmpl = "Missing 'Version:' header and/or %s file"
2459 raise ValueError(tmpl % self.PKG_INFO, self)
2460
2461 @property
2462 def _dep_map(self):
2463 try:
2464 return self.__dep_map
2465 except AttributeError:
2466 dm = self.__dep_map = {None: []}
2467 for name in 'requires.txt', 'depends.txt':
2468 for extra, reqs in split_sections(self._get_metadata(name)):
2469 if extra:
2470 if ':' in extra:
2471 extra, marker = extra.split(':', 1)
2472 if invalid_marker(marker):
2473 # XXX warn
2474 reqs=[]
2475 elif not evaluate_marker(marker):
2476 reqs=[]
2477 extra = safe_extra(extra) or None
2478 dm.setdefault(extra,[]).extend(parse_requirements(reqs))
2479 return dm
2480
2481 def requires(self, extras=()):
2482 """List of Requirements needed for this distro if `extras` are used"""
2483 dm = self._dep_map
2484 deps = []
2485 deps.extend(dm.get(None, ()))
2486 for ext in extras:
2487 try:
2488 deps.extend(dm[safe_extra(ext)])
2489 except KeyError:
2490 raise UnknownExtra(
2491 "%s has no such extra feature %r" % (self, ext)
2492 )
2493 return deps
2494
2495 def _get_metadata(self, name):
2496 if self.has_metadata(name):
2497 for line in self.get_metadata_lines(name):
2498 yield line
2499
2500 def activate(self, path=None):
2501 """Ensure distribution is importable on `path` (default=sys.path)"""
2502 if path is None:
2503 path = sys.path
2504 self.insert_on(path)
2505 if path is sys.path:
2506 fixup_namespace_packages(self.location)
2507 for pkg in self._get_metadata('namespace_packages.txt'):
2508 if pkg in sys.modules:
2509 declare_namespace(pkg)
2510
2511 def egg_name(self):
2512 """Return what this distribution's standard .egg filename should be"""
2513 filename = "%s-%s-py%s" % (
2514 to_filename(self.project_name), to_filename(self.version),
2515 self.py_version or PY_MAJOR
2516 )
2517
2518 if self.platform:
2519 filename += '-' + self.platform
2520 return filename
2521
2522 def __repr__(self):
2523 if self.location:
2524 return "%s (%s)" % (self, self.location)
2525 else:
2526 return str(self)
2527
2528 def __str__(self):
2529 try:
2530 version = getattr(self, 'version', None)
2531 except ValueError:
2532 version = None
2533 version = version or "[unknown version]"
2534 return "%s %s" % (self.project_name, version)
2535
2536 def __getattr__(self, attr):
2537 """Delegate all unrecognized public attributes to .metadata provider"""
2538 if attr.startswith('_'):
2539 raise AttributeError(attr)
2540 return getattr(self._provider, attr)
2541
2542 @classmethod
2543 def from_filename(cls, filename, metadata=None, **kw):
2544 return cls.from_location(
2545 _normalize_cached(filename), os.path.basename(filename), metadata,
2546 **kw
2547 )
2548
2549 def as_requirement(self):
2550 """Return a ``Requirement`` that matches this distribution exactly"""
2551 if isinstance(self.parsed_version, packaging.version.Version):
2552 spec = "%s==%s" % (self.project_name, self.parsed_version)
2553 else:
2554 spec = "%s===%s" % (self.project_name, self.parsed_version)
2555
2556 return Requirement.parse(spec)
2557
2558 def load_entry_point(self, group, name):
2559 """Return the `name` entry point of `group` or raise ImportError"""
2560 ep = self.get_entry_info(group, name)
2561 if ep is None:
2562 raise ImportError("Entry point %r not found" % ((group, name),))
2563 return ep.load()
2564
2565 def get_entry_map(self, group=None):
2566 """Return the entry point map for `group`, or the full entry map"""
2567 try:
2568 ep_map = self._ep_map
2569 except AttributeError:
2570 ep_map = self._ep_map = EntryPoint.parse_map(
2571 self._get_metadata('entry_points.txt'), self
2572 )
2573 if group is not None:
2574 return ep_map.get(group,{})
2575 return ep_map
2576
2577 def get_entry_info(self, group, name):
2578 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2579 return self.get_entry_map(group).get(name)
2580
2581 def insert_on(self, path, loc = None):
2582 """Insert self.location in path before its nearest parent directory"""
2583
2584 loc = loc or self.location
2585 if not loc:
2586 return
2587
2588 nloc = _normalize_cached(loc)
2589 bdir = os.path.dirname(nloc)
2590 npath= [(p and _normalize_cached(p) or p) for p in path]
2591
2592 for p, item in enumerate(npath):
2593 if item == nloc:
2594 break
2595 elif item == bdir and self.precedence == EGG_DIST:
2596 # if it's an .egg, give it precedence over its directory
2597 if path is sys.path:
2598 self.check_version_conflict()
2599 path.insert(p, loc)
2600 npath.insert(p, nloc)
2601 break
2602 else:
2603 if path is sys.path:
2604 self.check_version_conflict()
2605 path.append(loc)
2606 return
2607
2608 # p is the spot where we found or inserted loc; now remove duplicates
2609 while True:
2610 try:
2611 np = npath.index(nloc, p+1)
2612 except ValueError:
2613 break
2614 else:
2615 del npath[np], path[np]
2616 # ha!
2617 p = np
2618
2619 return
2620
2621 def check_version_conflict(self):
2622 if self.key == 'setuptools':
2623 # ignore the inevitable setuptools self-conflicts :(
2624 return
2625
2626 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2627 loc = normalize_path(self.location)
2628 for modname in self._get_metadata('top_level.txt'):
2629 if (modname not in sys.modules or modname in nsp
2630 or modname in _namespace_packages):
2631 continue
2632 if modname in ('pkg_resources', 'setuptools', 'site'):
2633 continue
2634 fn = getattr(sys.modules[modname], '__file__', None)
2635 if fn and (normalize_path(fn).startswith(loc) or
2636 fn.startswith(self.location)):
2637 continue
2638 issue_warning(
2639 "Module %s was already imported from %s, but %s is being added"
2640 " to sys.path" % (modname, fn, self.location),
2641 )
2642
2643 def has_version(self):
2644 try:
2645 self.version
2646 except ValueError:
2647 issue_warning("Unbuilt egg for " + repr(self))
2648 return False
2649 return True
2650
2651 def clone(self,**kw):
2652 """Copy this distribution, substituting in any changed keyword args"""
2653 names = 'project_name version py_version platform location precedence'
2654 for attr in names.split():
2655 kw.setdefault(attr, getattr(self, attr, None))
2656 kw.setdefault('metadata', self._provider)
2657 return self.__class__(**kw)
2658
2659 @property
2660 def extras(self):
2661 return [dep for dep in self._dep_map if dep]
2662
2663
2664 class DistInfoDistribution(Distribution):
2665 """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"" "
2666 PKG_INFO = 'METADATA'
2667 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2668
2669 @property
2670 def _parsed_pkg_info(self):
2671 """Parse and cache metadata"""
2672 try:
2673 return self._pkg_info
2674 except AttributeError:
2675 metadata = self.get_metadata(self.PKG_INFO)
2676 self._pkg_info = email.parser.Parser().parsestr(metadata)
2677 return self._pkg_info
2678
2679 @property
2680 def _dep_map(self):
2681 try:
2682 return self.__dep_map
2683 except AttributeError:
2684 self.__dep_map = self._compute_dependencies()
2685 return self.__dep_map
2686
2687 def _preparse_requirement(self, requires_dist):
2688 """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
2689 Split environment marker, add == prefix to version specifiers as
2690 necessary, and remove parenthesis.
2691 """
2692 parts = requires_dist.split(';', 1) + ['']
2693 distvers = parts[0].strip()
2694 mark = parts[1].strip()
2695 distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
2696 distvers = distvers.replace('(', '').replace(')', '')
2697 return (distvers, mark)
2698
2699 def _compute_dependencies(self):
2700 """Recompute this distribution's dependencies."""
2701 from _markerlib import compile as compile_marker
2702 dm = self.__dep_map = {None: []}
2703
2704 reqs = []
2705 # Including any condition expressions
2706 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2707 distvers, mark = self._preparse_requirement(req)
2708 parsed = next(parse_requirements(distvers))
2709 parsed.marker_fn = compile_marker(mark)
2710 reqs.append(parsed)
2711
2712 def reqs_for_extra(extra):
2713 for req in reqs:
2714 if req.marker_fn(override={'extra':extra}):
2715 yield req
2716
2717 common = frozenset(reqs_for_extra(None))
2718 dm[None].extend(common)
2719
2720 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2721 extra = safe_extra(extra.strip())
2722 dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
2723
2724 return dm
2725
2726
2727 _distributionImpl = {
2728 '.egg': Distribution,
2729 '.egg-info': Distribution,
2730 '.dist-info': DistInfoDistribution,
2731 }
2732
2733
2734 def issue_warning(*args,**kw):
2735 level = 1
2736 g = globals()
2737 try:
2738 # find the first stack frame that is *not* code in
2739 # the pkg_resources module, to use for the warning
2740 while sys._getframe(level).f_globals is g:
2741 level += 1
2742 except ValueError:
2743 pass
2744 warnings.warn(stacklevel=level + 1, *args, **kw)
2745
2746
2747 def parse_requirements(strs):
2748 """Yield ``Requirement`` objects for each specification in `strs`
2749
2750 `strs` must be a string, or a (possibly-nested) iterable thereof.
2751 """
2752 # create a steppable iterator, so we can handle \-continuations
2753 lines = iter(yield_lines(strs))
2754
2755 def scan_list(ITEM, TERMINATOR, line, p, groups, item_name):
2756
2757 items = []
2758
2759 while not TERMINATOR(line, p):
2760 if CONTINUE(line, p):
2761 try:
2762 line = next(lines)
2763 p = 0
2764 except StopIteration:
2765 raise ValueError(
2766 "\\ must not appear on the last nonblank line"
2767 )
2768
2769 match = ITEM(line, p)
2770 if not match:
2771 msg = "Expected " + item_name + " in"
2772 raise ValueError(msg, line, "at", line[p:])
2773
2774 items.append(match.group(*groups))
2775 p = match.end()
2776
2777 match = COMMA(line, p)
2778 if match:
2779 # skip the comma
2780 p = match.end()
2781 elif not TERMINATOR(line, p):
2782 msg = "Expected ',' or end-of-list in"
2783 raise ValueError(msg, line, "at", line[p:])
2784
2785 match = TERMINATOR(line, p)
2786 # skip the terminator, if any
2787 if match:
2788 p = match.end()
2789 return line, p, items
2790
2791 for line in lines:
2792 match = DISTRO(line)
2793 if not match:
2794 raise ValueError("Missing distribution spec", line)
2795 project_name = match.group(1)
2796 p = match.end()
2797 extras = []
2798
2799 match = OBRACKET(line, p)
2800 if match:
2801 p = match.end()
2802 line, p, extras = scan_list(
2803 DISTRO, CBRACKET, line, p, (1,), "'extra' name"
2804 )
2805
2806 line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2),
2807 "version spec")
2808 specs = [(op, val) for op, val in specs]
2809 yield Requirement(project_name, specs, extras)
2810
2811
2812 class Requirement:
2813 def __init__(self, project_name, specs, extras):
2814 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2815 self.unsafe_name, project_name = project_name, safe_name(project_name)
2816 self.project_name, self.key = project_name, project_name.lower()
2817 self.specifier = packaging.specifiers.SpecifierSet(
2818 ",".join(["".join([x, y]) for x, y in specs])
2819 )
2820 self.specs = specs
2821 self.extras = tuple(map(safe_extra, extras))
2822 self.hashCmp = (
2823 self.key,
2824 self.specifier,
2825 frozenset(self.extras),
2826 )
2827 self.__hash = hash(self.hashCmp)
2828
2829 def __str__(self):
2830 extras = ','.join(self.extras)
2831 if extras:
2832 extras = '[%s]' % extras
2833 return '%s%s%s' % (self.project_name, extras, self.specifier)
2834
2835 def __eq__(self, other):
2836 return (
2837 isinstance(other, Requirement) and
2838 self.hashCmp == other.hashCmp
2839 )
2840
2841 def __contains__(self, item):
2842 if isinstance(item, Distribution):
2843 if item.key != self.key:
2844 return False
2845
2846 item = item.version
2847
2848 # Allow prereleases always in order to match the previous behavior of
2849 # this method. In the future this should be smarter and follow PEP 440
2850 # more accurately.
2851 return self.specifier.contains(item, prereleases=True)
2852
2853 def __hash__(self):
2854 return self.__hash
2855
2856 def __repr__(self): return "Requirement.parse(%r)" % str(self)
2857
2858 @staticmethod
2859 def parse(s):
2860 reqs = list(parse_requirements(s))
2861 if reqs:
2862 if len(reqs) == 1:
2863 return reqs[0]
2864 raise ValueError("Expected only one requirement", s)
2865 raise ValueError("No requirements found", s)
2866
2867
2868 def _get_mro(cls):
2869 """Get an mro for a type or classic class"""
2870 if not isinstance(cls, type):
2871 class cls(cls, object): pass
2872 return cls.__mro__[1:]
2873 return cls.__mro__
2874
2875 def _find_adapter(registry, ob):
2876 """Return an adapter factory for `ob` from `registry`"""
2877 for t in _get_mro(getattr(ob, '__class__', type(ob))):
2878 if t in registry:
2879 return registry[t]
2880
2881
2882 def ensure_directory(path):
2883 """Ensure that the parent directory of `path` exists"""
2884 dirname = os.path.dirname(path)
2885 if not os.path.isdir(dirname):
2886 os.makedirs(dirname)
2887
2888
2889 def _bypass_ensure_directory(path, mode=0o777):
2890 """Sandbox-bypassing version of ensure_directory()"""
2891 if not WRITE_SUPPORT:
2892 raise IOError('"os.mkdir" not supported on this platform.')
2893 dirname, filename = split(path)
2894 if dirname and filename and not isdir(dirname):
2895 _bypass_ensure_directory(dirname)
2896 mkdir(dirname, mode)
2897
2898
2899 def split_sections(s):
2900 """Split a string or iterable thereof into (section, content) pairs
2901
2902 Each ``section`` is a stripped version of the section header ("[section]")
2903 and each ``content`` is a list of stripped lines excluding blank lines and
2904 comment-only lines. If there are any such lines before the first section
2905 header, they're returned in a first ``section`` of ``None``.
2906 """
2907 section = None
2908 content = []
2909 for line in yield_lines(s):
2910 if line.startswith("["):
2911 if line.endswith("]"):
2912 if section or content:
2913 yield section, content
2914 section = line[1:-1].strip()
2915 content = []
2916 else:
2917 raise ValueError("Invalid section heading", line)
2918 else:
2919 content.append(line)
2920
2921 # wrap up last segment
2922 yield section, content
2923
2924 def _mkstemp(*args,**kw):
2925 old_open = os.open
2926 try:
2927 # temporarily bypass sandboxing
2928 os.open = os_open
2929 return tempfile.mkstemp(*args,**kw)
2930 finally:
2931 # and then put it back
2932 os.open = old_open
2933
2934
2935 # Silence the PEP440Warning by default, so that end users don't get hit by it
2936 # randomly just because they use pkg_resources. We want to append the rule
2937 # because we want earlier uses of filterwarnings to take precedence over this
2938 # one.
2939 warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
2940
2941
2942 # Set up global resource manager (deliberately not state-saved)
2943 _manager = ResourceManager()
2944 def _initialize(g):
2945 for name in dir(_manager):
2946 if not name.startswith('_'):
2947 g[name] = getattr(_manager, name)
2948 _initialize(globals())
2949
2950 # Prepare the master working set and make the ``require()`` API available
2951 working_set = WorkingSet._build_master()
2952 _declare_state('object', working_set=working_set)
2953
2954 require = working_set.require
2955 iter_entry_points = working_set.iter_entry_points
2956 add_activation_listener = working_set.subscribe
2957 run_script = working_set.run_script
2958 # backward compatibility
2959 run_main = run_script
2960 # Activate all distributions already on sys.path, and ensure that
2961 # all distributions added to the working set in the future (e.g. by
2962 # calling ``require()``) will get activated as well.
2963 add_activation_listener(lambda dist: dist.activate())
2964 working_set.entries=[]
2965 # match order
2966 list(map(working_set.add_entry, sys.path))
OLDNEW
« no previous file with comments | « recipe_engine/third_party/mock-1.0.1/mock.py ('k') | recipe_engine/third_party/setuptools/__init__.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698