Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(27)

Side by Side Diff: third_party/google-endpoints/pkg_resources/__init__.py

Issue 2666783008: Add google-endpoints to third_party/. (Closed)
Patch Set: Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # coding: utf-8
2 """
3 Package resource API
4 --------------------
5
6 A resource is a logical file contained within a package, or a logical
7 subdirectory thereof. The package resource API expects resource names
8 to have their path parts separated with ``/``, *not* whatever the local
9 path separator is. Do not use os.path operations to manipulate resource
10 names being passed into the API.
11
12 The package resource API is designed to work with normal filesystem packages,
13 .egg files, and unpacked .egg files. It can also work in a limited way with
14 .zip files and with custom PEP 302 loaders that support the ``get_data()``
15 method.
16 """
17
18 from __future__ import absolute_import
19
20 import sys
21 import os
22 import io
23 import time
24 import re
25 import types
26 import zipfile
27 import zipimport
28 import warnings
29 import stat
30 import functools
31 import pkgutil
32 import operator
33 import platform
34 import collections
35 import plistlib
36 import email.parser
37 import tempfile
38 import textwrap
39 import itertools
40 from pkgutil import get_importer
41
42 try:
43 import _imp
44 except ImportError:
45 # Python 3.2 compatibility
46 import imp as _imp
47
48 import six
49 from six.moves import urllib, map, filter
50
51 # capture these to bypass sandboxing
52 from os import utime
53 try:
54 from os import mkdir, rename, unlink
55 WRITE_SUPPORT = True
56 except ImportError:
57 # no write support, probably under GAE
58 WRITE_SUPPORT = False
59
60 from os import open as os_open
61 from os.path import isdir, split
62
63 try:
64 import importlib.machinery as importlib_machinery
65 # access attribute to force import under delayed import mechanisms.
66 importlib_machinery.__name__
67 except ImportError:
68 importlib_machinery = None
69
70 import packaging.version
71 import packaging.specifiers
72 import packaging.requirements
73 import packaging.markers
74 import appdirs
75
76 if (3, 0) < sys.version_info < (3, 3):
77 raise RuntimeError("Python 3.3 or later is required")
78
79 # declare some globals that will be defined later to
80 # satisfy the linters.
81 require = None
82 working_set = None
83
84
85 class PEP440Warning(RuntimeWarning):
86 """
87 Used when there is an issue with a version or specifier not complying with
88 PEP 440.
89 """
90
91
92 class _SetuptoolsVersionMixin(object):
93 def __hash__(self):
94 return super(_SetuptoolsVersionMixin, self).__hash__()
95
96 def __lt__(self, other):
97 if isinstance(other, tuple):
98 return tuple(self) < other
99 else:
100 return super(_SetuptoolsVersionMixin, self).__lt__(other)
101
102 def __le__(self, other):
103 if isinstance(other, tuple):
104 return tuple(self) <= other
105 else:
106 return super(_SetuptoolsVersionMixin, self).__le__(other)
107
108 def __eq__(self, other):
109 if isinstance(other, tuple):
110 return tuple(self) == other
111 else:
112 return super(_SetuptoolsVersionMixin, self).__eq__(other)
113
114 def __ge__(self, other):
115 if isinstance(other, tuple):
116 return tuple(self) >= other
117 else:
118 return super(_SetuptoolsVersionMixin, self).__ge__(other)
119
120 def __gt__(self, other):
121 if isinstance(other, tuple):
122 return tuple(self) > other
123 else:
124 return super(_SetuptoolsVersionMixin, self).__gt__(other)
125
126 def __ne__(self, other):
127 if isinstance(other, tuple):
128 return tuple(self) != other
129 else:
130 return super(_SetuptoolsVersionMixin, self).__ne__(other)
131
132 def __getitem__(self, key):
133 return tuple(self)[key]
134
135 def __iter__(self):
136 component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
137 replace = {
138 'pre': 'c',
139 'preview': 'c',
140 '-': 'final-',
141 'rc': 'c',
142 'dev': '@',
143 }.get
144
145 def _parse_version_parts(s):
146 for part in component_re.split(s):
147 part = replace(part, part)
148 if not part or part == '.':
149 continue
150 if part[:1] in '0123456789':
151 # pad for numeric comparison
152 yield part.zfill(8)
153 else:
154 yield '*' + part
155
156 # ensure that alpha/beta/candidate are before final
157 yield '*final'
158
159 def old_parse_version(s):
160 parts = []
161 for part in _parse_version_parts(s.lower()):
162 if part.startswith('*'):
163 # remove '-' before a prerelease tag
164 if part < '*final':
165 while parts and parts[-1] == '*final-':
166 parts.pop()
167 # remove trailing zeros from each series of numeric parts
168 while parts and parts[-1] == '00000000':
169 parts.pop()
170 parts.append(part)
171 return tuple(parts)
172
173 # Warn for use of this function
174 warnings.warn(
175 "You have iterated over the result of "
176 "pkg_resources.parse_version. This is a legacy behavior which is "
177 "inconsistent with the new version class introduced in setuptools "
178 "8.0. In most cases, conversion to a tuple is unnecessary. For "
179 "comparison of versions, sort the Version instances directly. If "
180 "you have another use case requiring the tuple, please file a "
181 "bug with the setuptools project describing that need.",
182 RuntimeWarning,
183 stacklevel=1,
184 )
185
186 for part in old_parse_version(str(self)):
187 yield part
188
189
190 class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
191 pass
192
193
194 class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
195 packaging.version.LegacyVersion):
196 pass
197
198
199 def parse_version(v):
200 try:
201 return SetuptoolsVersion(v)
202 except packaging.version.InvalidVersion:
203 return SetuptoolsLegacyVersion(v)
204
205
206 _state_vars = {}
207
208
209 def _declare_state(vartype, **kw):
210 globals().update(kw)
211 _state_vars.update(dict.fromkeys(kw, vartype))
212
213
214 def __getstate__():
215 state = {}
216 g = globals()
217 for k, v in _state_vars.items():
218 state[k] = g['_sget_' + v](g[k])
219 return state
220
221
222 def __setstate__(state):
223 g = globals()
224 for k, v in state.items():
225 g['_sset_' + _state_vars[k]](k, g[k], v)
226 return state
227
228
229 def _sget_dict(val):
230 return val.copy()
231
232
233 def _sset_dict(key, ob, state):
234 ob.clear()
235 ob.update(state)
236
237
238 def _sget_object(val):
239 return val.__getstate__()
240
241
242 def _sset_object(key, ob, state):
243 ob.__setstate__(state)
244
245
246 _sget_none = _sset_none = lambda *args: None
247
248
249 def get_supported_platform():
250 """Return this platform's maximum compatible version.
251
252 distutils.util.get_platform() normally reports the minimum version
253 of Mac OS X that would be required to *use* extensions produced by
254 distutils. But what we want when checking compatibility is to know the
255 version of Mac OS X that we are *running*. To allow usage of packages that
256 explicitly require a newer version of Mac OS X, we must also know the
257 current version of the OS.
258
259 If this condition occurs for any other platform with a version in its
260 platform strings, this function should be extended accordingly.
261 """
262 plat = get_build_platform()
263 m = macosVersionString.match(plat)
264 if m is not None and sys.platform == "darwin":
265 try:
266 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
267 except ValueError:
268 # not Mac OS X
269 pass
270 return plat
271
272
273 __all__ = [
274 # Basic resource access and distribution/entry point discovery
275 'require', 'run_script', 'get_provider', 'get_distribution',
276 'load_entry_point', 'get_entry_map', 'get_entry_info',
277 'iter_entry_points',
278 'resource_string', 'resource_stream', 'resource_filename',
279 'resource_listdir', 'resource_exists', 'resource_isdir',
280
281 # Environmental control
282 'declare_namespace', 'working_set', 'add_activation_listener',
283 'find_distributions', 'set_extraction_path', 'cleanup_resources',
284 'get_default_cache',
285
286 # Primary implementation classes
287 'Environment', 'WorkingSet', 'ResourceManager',
288 'Distribution', 'Requirement', 'EntryPoint',
289
290 # Exceptions
291 'ResolutionError', 'VersionConflict', 'DistributionNotFound',
292 'UnknownExtra', 'ExtractionError',
293
294 # Warnings
295 'PEP440Warning',
296
297 # Parsing functions and string utilities
298 'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
299 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
300 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
301
302 # filesystem utilities
303 'ensure_directory', 'normalize_path',
304
305 # Distribution "precedence" constants
306 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
307
308 # "Provider" interfaces, implementations, and registration/lookup APIs
309 'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
310 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
311 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
312 'register_finder', 'register_namespace_handler', 'register_loader_type',
313 'fixup_namespace_packages', 'get_importer',
314
315 # Deprecated/backward compatibility only
316 'run_main', 'AvailableDistributions',
317 ]
318
319
320 class ResolutionError(Exception):
321 """Abstract base for dependency resolution errors"""
322
323 def __repr__(self):
324 return self.__class__.__name__ + repr(self.args)
325
326
327 class VersionConflict(ResolutionError):
328 """
329 An already-installed version conflicts with the requested version.
330
331 Should be initialized with the installed Distribution and the requested
332 Requirement.
333 """
334
335 _template = "{self.dist} is installed but {self.req} is required"
336
337 @property
338 def dist(self):
339 return self.args[0]
340
341 @property
342 def req(self):
343 return self.args[1]
344
345 def report(self):
346 return self._template.format(**locals())
347
348 def with_context(self, required_by):
349 """
350 If required_by is non-empty, return a version of self that is a
351 ContextualVersionConflict.
352 """
353 if not required_by:
354 return self
355 args = self.args + (required_by,)
356 return ContextualVersionConflict(*args)
357
358
359 class ContextualVersionConflict(VersionConflict):
360 """
361 A VersionConflict that accepts a third parameter, the set of the
362 requirements that required the installed Distribution.
363 """
364
365 _template = VersionConflict._template + ' by {self.required_by}'
366
367 @property
368 def required_by(self):
369 return self.args[2]
370
371
372 class DistributionNotFound(ResolutionError):
373 """A requested distribution was not found"""
374
375 _template = ("The '{self.req}' distribution was not found "
376 "and is required by {self.requirers_str}")
377
378 @property
379 def req(self):
380 return self.args[0]
381
382 @property
383 def requirers(self):
384 return self.args[1]
385
386 @property
387 def requirers_str(self):
388 if not self.requirers:
389 return 'the application'
390 return ', '.join(self.requirers)
391
392 def report(self):
393 return self._template.format(**locals())
394
395 def __str__(self):
396 return self.report()
397
398
399 class UnknownExtra(ResolutionError):
400 """Distribution doesn't have an "extra feature" of the given name"""
401
402
403 _provider_factories = {}
404
405 PY_MAJOR = sys.version[:3]
406 EGG_DIST = 3
407 BINARY_DIST = 2
408 SOURCE_DIST = 1
409 CHECKOUT_DIST = 0
410 DEVELOP_DIST = -1
411
412
413 def register_loader_type(loader_type, provider_factory):
414 """Register `provider_factory` to make providers for `loader_type`
415
416 `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
417 and `provider_factory` is a function that, passed a *module* object,
418 returns an ``IResourceProvider`` for that module.
419 """
420 _provider_factories[loader_type] = provider_factory
421
422
423 def get_provider(moduleOrReq):
424 """Return an IResourceProvider for the named module or requirement"""
425 if isinstance(moduleOrReq, Requirement):
426 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
427 try:
428 module = sys.modules[moduleOrReq]
429 except KeyError:
430 __import__(moduleOrReq)
431 module = sys.modules[moduleOrReq]
432 loader = getattr(module, '__loader__', None)
433 return _find_adapter(_provider_factories, loader)(module)
434
435
436 def _macosx_vers(_cache=[]):
437 if not _cache:
438 version = platform.mac_ver()[0]
439 # fallback for MacPorts
440 if version == '':
441 plist = '/System/Library/CoreServices/SystemVersion.plist'
442 if os.path.exists(plist):
443 if hasattr(plistlib, 'readPlist'):
444 plist_content = plistlib.readPlist(plist)
445 if 'ProductVersion' in plist_content:
446 version = plist_content['ProductVersion']
447
448 _cache.append(version.split('.'))
449 return _cache[0]
450
451
452 def _macosx_arch(machine):
453 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
454
455
456 def get_build_platform():
457 """Return this platform's string for platform-specific distributions
458
459 XXX Currently this is the same as ``distutils.util.get_platform()``, but it
460 needs some hacks for Linux and Mac OS X.
461 """
462 try:
463 # Python 2.7 or >=3.2
464 from sysconfig import get_platform
465 except ImportError:
466 from distutils.util import get_platform
467
468 plat = get_platform()
469 if sys.platform == "darwin" and not plat.startswith('macosx-'):
470 try:
471 version = _macosx_vers()
472 machine = os.uname()[4].replace(" ", "_")
473 return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
474 _macosx_arch(machine))
475 except ValueError:
476 # if someone is running a non-Mac darwin system, this will fall
477 # through to the default implementation
478 pass
479 return plat
480
481
482 macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
483 darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
484 # XXX backward compat
485 get_platform = get_build_platform
486
487
488 def compatible_platforms(provided, required):
489 """Can code for the `provided` platform run on the `required` platform?
490
491 Returns true if either platform is ``None``, or the platforms are equal.
492
493 XXX Needs compatibility checks for Linux and other unixy OSes.
494 """
495 if provided is None or required is None or provided == required:
496 # easy case
497 return True
498
499 # Mac OS X special cases
500 reqMac = macosVersionString.match(required)
501 if reqMac:
502 provMac = macosVersionString.match(provided)
503
504 # is this a Mac package?
505 if not provMac:
506 # this is backwards compatibility for packages built before
507 # setuptools 0.6. All packages built after this point will
508 # use the new macosx designation.
509 provDarwin = darwinVersionString.match(provided)
510 if provDarwin:
511 dversion = int(provDarwin.group(1))
512 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
513 if dversion == 7 and macosversion >= "10.3" or \
514 dversion == 8 and macosversion >= "10.4":
515 return True
516 # egg isn't macosx or legacy darwin
517 return False
518
519 # are they the same major version and machine type?
520 if provMac.group(1) != reqMac.group(1) or \
521 provMac.group(3) != reqMac.group(3):
522 return False
523
524 # is the required OS major update >= the provided one?
525 if int(provMac.group(2)) > int(reqMac.group(2)):
526 return False
527
528 return True
529
530 # XXX Linux and other platforms' special cases should go here
531 return False
532
533
534 def run_script(dist_spec, script_name):
535 """Locate distribution `dist_spec` and run its `script_name` script"""
536 ns = sys._getframe(1).f_globals
537 name = ns['__name__']
538 ns.clear()
539 ns['__name__'] = name
540 require(dist_spec)[0].run_script(script_name, ns)
541
542
543 # backward compatibility
544 run_main = run_script
545
546
547 def get_distribution(dist):
548 """Return a current distribution object for a Requirement or string"""
549 if isinstance(dist, six.string_types):
550 dist = Requirement.parse(dist)
551 if isinstance(dist, Requirement):
552 dist = get_provider(dist)
553 if not isinstance(dist, Distribution):
554 raise TypeError("Expected string, Requirement, or Distribution", dist)
555 return dist
556
557
558 def load_entry_point(dist, group, name):
559 """Return `name` entry point of `group` for `dist` or raise ImportError"""
560 return get_distribution(dist).load_entry_point(group, name)
561
562
563 def get_entry_map(dist, group=None):
564 """Return the entry point map for `group`, or the full entry map"""
565 return get_distribution(dist).get_entry_map(group)
566
567
568 def get_entry_info(dist, group, name):
569 """Return the EntryPoint object for `group`+`name`, or ``None``"""
570 return get_distribution(dist).get_entry_info(group, name)
571
572
573 class IMetadataProvider:
574 def has_metadata(name):
575 """Does the package's distribution contain the named metadata?"""
576
577 def get_metadata(name):
578 """The named metadata resource as a string"""
579
580 def get_metadata_lines(name):
581 """Yield named metadata resource as list of non-blank non-comment lines
582
583 Leading and trailing whitespace is stripped from each line, and lines
584 with ``#`` as the first non-blank character are omitted."""
585
586 def metadata_isdir(name):
587 """Is the named metadata a directory? (like ``os.path.isdir()``)"""
588
589 def metadata_listdir(name):
590 """List of metadata names in the directory (like ``os.listdir()``)"""
591
592 def run_script(script_name, namespace):
593 """Execute the named script in the supplied namespace dictionary"""
594
595
596 class IResourceProvider(IMetadataProvider):
597 """An object that provides access to package resources"""
598
599 def get_resource_filename(manager, resource_name):
600 """Return a true filesystem path for `resource_name`
601
602 `manager` must be an ``IResourceManager``"""
603
604 def get_resource_stream(manager, resource_name):
605 """Return a readable file-like object for `resource_name`
606
607 `manager` must be an ``IResourceManager``"""
608
609 def get_resource_string(manager, resource_name):
610 """Return a string containing the contents of `resource_name`
611
612 `manager` must be an ``IResourceManager``"""
613
614 def has_resource(resource_name):
615 """Does the package contain the named resource?"""
616
617 def resource_isdir(resource_name):
618 """Is the named resource a directory? (like ``os.path.isdir()``)"""
619
620 def resource_listdir(resource_name):
621 """List of resource names in the directory (like ``os.listdir()``)"""
622
623
624 class WorkingSet(object):
625 """A collection of active distributions on sys.path (or a similar list)"""
626
627 def __init__(self, entries=None):
628 """Create working set from list of path entries (default=sys.path)"""
629 self.entries = []
630 self.entry_keys = {}
631 self.by_key = {}
632 self.callbacks = []
633
634 if entries is None:
635 entries = sys.path
636
637 for entry in entries:
638 self.add_entry(entry)
639
640 @classmethod
641 def _build_master(cls):
642 """
643 Prepare the master working set.
644 """
645 ws = cls()
646 try:
647 from __main__ import __requires__
648 except ImportError:
649 # The main program does not list any requirements
650 return ws
651
652 # ensure the requirements are met
653 try:
654 ws.require(__requires__)
655 except VersionConflict:
656 return cls._build_from_requirements(__requires__)
657
658 return ws
659
660 @classmethod
661 def _build_from_requirements(cls, req_spec):
662 """
663 Build a working set from a requirement spec. Rewrites sys.path.
664 """
665 # try it without defaults already on sys.path
666 # by starting with an empty path
667 ws = cls([])
668 reqs = parse_requirements(req_spec)
669 dists = ws.resolve(reqs, Environment())
670 for dist in dists:
671 ws.add(dist)
672
673 # add any missing entries from sys.path
674 for entry in sys.path:
675 if entry not in ws.entries:
676 ws.add_entry(entry)
677
678 # then copy back to sys.path
679 sys.path[:] = ws.entries
680 return ws
681
682 def add_entry(self, entry):
683 """Add a path item to ``.entries``, finding any distributions on it
684
685 ``find_distributions(entry, True)`` is used to find distributions
686 corresponding to the path entry, and they are added. `entry` is
687 always appended to ``.entries``, even if it is already present.
688 (This is because ``sys.path`` can contain the same value more than
689 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
690 equal ``sys.path``.)
691 """
692 self.entry_keys.setdefault(entry, [])
693 self.entries.append(entry)
694 for dist in find_distributions(entry, True):
695 self.add(dist, entry, False)
696
697 def __contains__(self, dist):
698 """True if `dist` is the active distribution for its project"""
699 return self.by_key.get(dist.key) == dist
700
701 def find(self, req):
702 """Find a distribution matching requirement `req`
703
704 If there is an active distribution for the requested project, this
705 returns it as long as it meets the version requirement specified by
706 `req`. But, if there is an active distribution for the project and it
707 does *not* meet the `req` requirement, ``VersionConflict`` is raised.
708 If there is no active distribution for the requested project, ``None``
709 is returned.
710 """
711 dist = self.by_key.get(req.key)
712 if dist is not None and dist not in req:
713 # XXX add more info
714 raise VersionConflict(dist, req)
715 return dist
716
717 def iter_entry_points(self, group, name=None):
718 """Yield entry point objects from `group` matching `name`
719
720 If `name` is None, yields all entry points in `group` from all
721 distributions in the working set, otherwise only ones matching
722 both `group` and `name` are yielded (in distribution order).
723 """
724 for dist in self:
725 entries = dist.get_entry_map(group)
726 if name is None:
727 for ep in entries.values():
728 yield ep
729 elif name in entries:
730 yield entries[name]
731
732 def run_script(self, requires, script_name):
733 """Locate distribution for `requires` and run `script_name` script"""
734 ns = sys._getframe(1).f_globals
735 name = ns['__name__']
736 ns.clear()
737 ns['__name__'] = name
738 self.require(requires)[0].run_script(script_name, ns)
739
740 def __iter__(self):
741 """Yield distributions for non-duplicate projects in the working set
742
743 The yield order is the order in which the items' path entries were
744 added to the working set.
745 """
746 seen = {}
747 for item in self.entries:
748 if item not in self.entry_keys:
749 # workaround a cache issue
750 continue
751
752 for key in self.entry_keys[item]:
753 if key not in seen:
754 seen[key] = 1
755 yield self.by_key[key]
756
757 def add(self, dist, entry=None, insert=True, replace=False):
758 """Add `dist` to working set, associated with `entry`
759
760 If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
761 On exit from this routine, `entry` is added to the end of the working
762 set's ``.entries`` (if it wasn't already present).
763
764 `dist` is only added to the working set if it's for a project that
765 doesn't already have a distribution in the set, unless `replace=True`.
766 If it's added, any callbacks registered with the ``subscribe()`` method
767 will be called.
768 """
769 if insert:
770 dist.insert_on(self.entries, entry, replace=replace)
771
772 if entry is None:
773 entry = dist.location
774 keys = self.entry_keys.setdefault(entry, [])
775 keys2 = self.entry_keys.setdefault(dist.location, [])
776 if not replace and dist.key in self.by_key:
777 # ignore hidden distros
778 return
779
780 self.by_key[dist.key] = dist
781 if dist.key not in keys:
782 keys.append(dist.key)
783 if dist.key not in keys2:
784 keys2.append(dist.key)
785 self._added_new(dist)
786
787 def resolve(self, requirements, env=None, installer=None,
788 replace_conflicting=False, extras=None):
789 """List all distributions needed to (recursively) meet `requirements`
790
791 `requirements` must be a sequence of ``Requirement`` objects. `env`,
792 if supplied, should be an ``Environment`` instance. If
793 not supplied, it defaults to all distributions available within any
794 entry or distribution in the working set. `installer`, if supplied,
795 will be invoked with each requirement that cannot be met by an
796 already-installed distribution; it should return a ``Distribution`` or
797 ``None``.
798
799 Unless `replace_conflicting=True`, raises a VersionConflict exception if
800 any requirements are found on the path that have the correct name but
801 the wrong version. Otherwise, if an `installer` is supplied it will be
802 invoked to obtain the correct version of the requirement and activate
803 it.
804
805 `extras` is a list of the extras to be used with these requirements.
806 This is important because extra requirements may look like `my_req;
807 extra = "my_extra"`, which would otherwise be interpreted as a purely
808 optional requirement. Instead, we want to be able to assert that these
809 requirements are truly required.
810 """
811
812 # set up the stack
813 requirements = list(requirements)[::-1]
814 # set of processed requirements
815 processed = {}
816 # key -> dist
817 best = {}
818 to_activate = []
819
820 req_extras = _ReqExtras()
821
822 # Mapping of requirement to set of distributions that required it;
823 # useful for reporting info about conflicts.
824 required_by = collections.defaultdict(set)
825
826 while requirements:
827 # process dependencies breadth-first
828 req = requirements.pop(0)
829 if req in processed:
830 # Ignore cyclic or redundant dependencies
831 continue
832
833 if not req_extras.markers_pass(req, extras):
834 continue
835
836 dist = best.get(req.key)
837 if dist is None:
838 # Find the best distribution and add it to the map
839 dist = self.by_key.get(req.key)
840 if dist is None or (dist not in req and replace_conflicting):
841 ws = self
842 if env is None:
843 if dist is None:
844 env = Environment(self.entries)
845 else:
846 # Use an empty environment and workingset to avoid
847 # any further conflicts with the conflicting
848 # distribution
849 env = Environment([])
850 ws = WorkingSet([])
851 dist = best[req.key] = env.best_match(req, ws, installer)
852 if dist is None:
853 requirers = required_by.get(req, None)
854 raise DistributionNotFound(req, requirers)
855 to_activate.append(dist)
856 if dist not in req:
857 # Oops, the "best" so far conflicts with a dependency
858 dependent_req = required_by[req]
859 raise VersionConflict(dist, req).with_context(dependent_req)
860
861 # push the new requirements onto the stack
862 new_requirements = dist.requires(req.extras)[::-1]
863 requirements.extend(new_requirements)
864
865 # Register the new requirements needed by req
866 for new_requirement in new_requirements:
867 required_by[new_requirement].add(req.project_name)
868 req_extras[new_requirement] = req.extras
869
870 processed[req] = True
871
872 # return list of distros to activate
873 return to_activate
874
875 def find_plugins(self, plugin_env, full_env=None, installer=None,
876 fallback=True):
877 """Find all activatable distributions in `plugin_env`
878
879 Example usage::
880
881 distributions, errors = working_set.find_plugins(
882 Environment(plugin_dirlist)
883 )
884 # add plugins+libs to sys.path
885 map(working_set.add, distributions)
886 # display errors
887 print('Could not load', errors)
888
889 The `plugin_env` should be an ``Environment`` instance that contains
890 only distributions that are in the project's "plugin directory" or
891 directories. The `full_env`, if supplied, should be an ``Environment``
892 contains all currently-available distributions. If `full_env` is not
893 supplied, one is created automatically from the ``WorkingSet`` this
894 method is called on, which will typically mean that every directory on
895 ``sys.path`` will be scanned for distributions.
896
897 `installer` is a standard installer callback as used by the
898 ``resolve()`` method. The `fallback` flag indicates whether we should
899 attempt to resolve older versions of a plugin if the newest version
900 cannot be resolved.
901
902 This method returns a 2-tuple: (`distributions`, `error_info`), where
903 `distributions` is a list of the distributions found in `plugin_env`
904 that were loadable, along with any other distributions that are needed
905 to resolve their dependencies. `error_info` is a dictionary mapping
906 unloadable plugin distributions to an exception instance describing the
907 error that occurred. Usually this will be a ``DistributionNotFound`` or
908 ``VersionConflict`` instance.
909 """
910
911 plugin_projects = list(plugin_env)
912 # scan project names in alphabetic order
913 plugin_projects.sort()
914
915 error_info = {}
916 distributions = {}
917
918 if full_env is None:
919 env = Environment(self.entries)
920 env += plugin_env
921 else:
922 env = full_env + plugin_env
923
924 shadow_set = self.__class__([])
925 # put all our entries in shadow_set
926 list(map(shadow_set.add, self))
927
928 for project_name in plugin_projects:
929
930 for dist in plugin_env[project_name]:
931
932 req = [dist.as_requirement()]
933
934 try:
935 resolvees = shadow_set.resolve(req, env, installer)
936
937 except ResolutionError as v:
938 # save error info
939 error_info[dist] = v
940 if fallback:
941 # try the next older version of project
942 continue
943 else:
944 # give up on this project, keep going
945 break
946
947 else:
948 list(map(shadow_set.add, resolvees))
949 distributions.update(dict.fromkeys(resolvees))
950
951 # success, no need to try any more versions of this project
952 break
953
954 distributions = list(distributions)
955 distributions.sort()
956
957 return distributions, error_info
958
959 def require(self, *requirements):
960 """Ensure that distributions matching `requirements` are activated
961
962 `requirements` must be a string or a (possibly-nested) sequence
963 thereof, specifying the distributions and versions required. The
964 return value is a sequence of the distributions that needed to be
965 activated to fulfill the requirements; all relevant distributions are
966 included, even if they were already activated in this working set.
967 """
968 needed = self.resolve(parse_requirements(requirements))
969
970 for dist in needed:
971 self.add(dist)
972
973 return needed
974
975 def subscribe(self, callback, existing=True):
976 """Invoke `callback` for all distributions
977
978 If `existing=True` (default),
979 call on all existing ones, as well.
980 """
981 if callback in self.callbacks:
982 return
983 self.callbacks.append(callback)
984 if not existing:
985 return
986 for dist in self:
987 callback(dist)
988
989 def _added_new(self, dist):
990 for callback in self.callbacks:
991 callback(dist)
992
993 def __getstate__(self):
994 return (
995 self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
996 self.callbacks[:]
997 )
998
999 def __setstate__(self, e_k_b_c):
1000 entries, keys, by_key, callbacks = e_k_b_c
1001 self.entries = entries[:]
1002 self.entry_keys = keys.copy()
1003 self.by_key = by_key.copy()
1004 self.callbacks = callbacks[:]
1005
1006
1007 class _ReqExtras(dict):
1008 """
1009 Map each requirement to the extras that demanded it.
1010 """
1011
1012 def markers_pass(self, req, extras=None):
1013 """
1014 Evaluate markers for req against each extra that
1015 demanded it.
1016
1017 Return False if the req has a marker and fails
1018 evaluation. Otherwise, return True.
1019 """
1020 extra_evals = (
1021 req.marker.evaluate({'extra': extra})
1022 for extra in self.get(req, ()) + (extras or (None,))
1023 )
1024 return not req.marker or any(extra_evals)
1025
1026
1027 class Environment(object):
1028 """Searchable snapshot of distributions on a search path"""
1029
1030 def __init__(self, search_path=None, platform=get_supported_platform(),
1031 python=PY_MAJOR):
1032 """Snapshot distributions available on a search path
1033
1034 Any distributions found on `search_path` are added to the environment.
1035 `search_path` should be a sequence of ``sys.path`` items. If not
1036 supplied, ``sys.path`` is used.
1037
1038 `platform` is an optional string specifying the name of the platform
1039 that platform-specific distributions must be compatible with. If
1040 unspecified, it defaults to the current platform. `python` is an
1041 optional string naming the desired version of Python (e.g. ``'3.3'``);
1042 it defaults to the current version.
1043
1044 You may explicitly set `platform` (and/or `python`) to ``None`` if you
1045 wish to map *all* distributions, not just those compatible with the
1046 running platform or Python version.
1047 """
1048 self._distmap = {}
1049 self.platform = platform
1050 self.python = python
1051 self.scan(search_path)
1052
1053 def can_add(self, dist):
1054 """Is distribution `dist` acceptable for this environment?
1055
1056 The distribution must match the platform and python version
1057 requirements specified when this environment was created, or False
1058 is returned.
1059 """
1060 return (self.python is None or dist.py_version is None
1061 or dist.py_version == self.python) \
1062 and compatible_platforms(dist.platform, self.platform)
1063
1064 def remove(self, dist):
1065 """Remove `dist` from the environment"""
1066 self._distmap[dist.key].remove(dist)
1067
1068 def scan(self, search_path=None):
1069 """Scan `search_path` for distributions usable in this environment
1070
1071 Any distributions found are added to the environment.
1072 `search_path` should be a sequence of ``sys.path`` items. If not
1073 supplied, ``sys.path`` is used. Only distributions conforming to
1074 the platform/python version defined at initialization are added.
1075 """
1076 if search_path is None:
1077 search_path = sys.path
1078
1079 for item in search_path:
1080 for dist in find_distributions(item):
1081 self.add(dist)
1082
1083 def __getitem__(self, project_name):
1084 """Return a newest-to-oldest list of distributions for `project_name`
1085
1086 Uses case-insensitive `project_name` comparison, assuming all the
1087 project's distributions use their project's name converted to all
1088 lowercase as their key.
1089
1090 """
1091 distribution_key = project_name.lower()
1092 return self._distmap.get(distribution_key, [])
1093
1094 def add(self, dist):
1095 """Add `dist` if we ``can_add()`` it and it has not already been added
1096 """
1097 if self.can_add(dist) and dist.has_version():
1098 dists = self._distmap.setdefault(dist.key, [])
1099 if dist not in dists:
1100 dists.append(dist)
1101 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1102
1103 def best_match(self, req, working_set, installer=None):
1104 """Find distribution best matching `req` and usable on `working_set`
1105
1106 This calls the ``find(req)`` method of the `working_set` to see if a
1107 suitable distribution is already active. (This may raise
1108 ``VersionConflict`` if an unsuitable version of the project is already
1109 active in the specified `working_set`.) If a suitable distribution
1110 isn't active, this method returns the newest distribution in the
1111 environment that meets the ``Requirement`` in `req`. If no suitable
1112 distribution is found, and `installer` is supplied, then the result of
1113 calling the environment's ``obtain(req, installer)`` method will be
1114 returned.
1115 """
1116 dist = working_set.find(req)
1117 if dist is not None:
1118 return dist
1119 for dist in self[req.key]:
1120 if dist in req:
1121 return dist
1122 # try to download/install
1123 return self.obtain(req, installer)
1124
1125 def obtain(self, requirement, installer=None):
1126 """Obtain a distribution matching `requirement` (e.g. via download)
1127
1128 Obtain a distro that matches requirement (e.g. via download). In the
1129 base ``Environment`` class, this routine just returns
1130 ``installer(requirement)``, unless `installer` is None, in which case
1131 None is returned instead. This method is a hook that allows subclasses
1132 to attempt other ways of obtaining a distribution before falling back
1133 to the `installer` argument."""
1134 if installer is not None:
1135 return installer(requirement)
1136
1137 def __iter__(self):
1138 """Yield the unique project names of the available distributions"""
1139 for key in self._distmap.keys():
1140 if self[key]:
1141 yield key
1142
1143 def __iadd__(self, other):
1144 """In-place addition of a distribution or environment"""
1145 if isinstance(other, Distribution):
1146 self.add(other)
1147 elif isinstance(other, Environment):
1148 for project in other:
1149 for dist in other[project]:
1150 self.add(dist)
1151 else:
1152 raise TypeError("Can't add %r to environment" % (other,))
1153 return self
1154
1155 def __add__(self, other):
1156 """Add an environment or distribution to an environment"""
1157 new = self.__class__([], platform=None, python=None)
1158 for env in self, other:
1159 new += env
1160 return new
1161
1162
1163 # XXX backward compatibility
1164 AvailableDistributions = Environment
1165
1166
1167 class ExtractionError(RuntimeError):
1168 """An error occurred extracting a resource
1169
1170 The following attributes are available from instances of this exception:
1171
1172 manager
1173 The resource manager that raised this exception
1174
1175 cache_path
1176 The base directory for resource extraction
1177
1178 original_error
1179 The exception instance that caused extraction to fail
1180 """
1181
1182
1183 class ResourceManager:
1184 """Manage resource extraction and packages"""
1185 extraction_path = None
1186
1187 def __init__(self):
1188 self.cached_files = {}
1189
1190 def resource_exists(self, package_or_requirement, resource_name):
1191 """Does the named resource exist?"""
1192 return get_provider(package_or_requirement).has_resource(resource_name)
1193
1194 def resource_isdir(self, package_or_requirement, resource_name):
1195 """Is the named resource an existing directory?"""
1196 return get_provider(package_or_requirement).resource_isdir(
1197 resource_name
1198 )
1199
1200 def resource_filename(self, package_or_requirement, resource_name):
1201 """Return a true filesystem path for specified resource"""
1202 return get_provider(package_or_requirement).get_resource_filename(
1203 self, resource_name
1204 )
1205
1206 def resource_stream(self, package_or_requirement, resource_name):
1207 """Return a readable file-like object for specified resource"""
1208 return get_provider(package_or_requirement).get_resource_stream(
1209 self, resource_name
1210 )
1211
1212 def resource_string(self, package_or_requirement, resource_name):
1213 """Return specified resource as a string"""
1214 return get_provider(package_or_requirement).get_resource_string(
1215 self, resource_name
1216 )
1217
1218 def resource_listdir(self, package_or_requirement, resource_name):
1219 """List the contents of the named resource directory"""
1220 return get_provider(package_or_requirement).resource_listdir(
1221 resource_name
1222 )
1223
1224 def extraction_error(self):
1225 """Give an error message for problems extracting file(s)"""
1226
1227 old_exc = sys.exc_info()[1]
1228 cache_path = self.extraction_path or get_default_cache()
1229
1230 tmpl = textwrap.dedent("""
1231 Can't extract file(s) to egg cache
1232
1233 The following error occurred while trying to extract file(s) to the Python egg
1234 cache:
1235
1236 {old_exc}
1237
1238 The Python egg cache directory is currently set to:
1239
1240 {cache_path}
1241
1242 Perhaps your account does not have write access to this directory? You can
1243 change the cache directory by setting the PYTHON_EGG_CACHE environme nt
1244 variable to point to an accessible directory.
1245 """).lstrip()
1246 err = ExtractionError(tmpl.format(**locals()))
1247 err.manager = self
1248 err.cache_path = cache_path
1249 err.original_error = old_exc
1250 raise err
1251
1252 def get_cache_path(self, archive_name, names=()):
1253 """Return absolute location in cache for `archive_name` and `names`
1254
1255 The parent directory of the resulting path will be created if it does
1256 not already exist. `archive_name` should be the base filename of the
1257 enclosing egg (which may not be the name of the enclosing zipfile!),
1258 including its ".egg" extension. `names`, if provided, should be a
1259 sequence of path name parts "under" the egg's extraction location.
1260
1261 This method should only be called by resource providers that need to
1262 obtain an extraction location, and only for names they intend to
1263 extract, as it tracks the generated names for possible cleanup later.
1264 """
1265 extract_path = self.extraction_path or get_default_cache()
1266 target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1267 try:
1268 _bypass_ensure_directory(target_path)
1269 except:
1270 self.extraction_error()
1271
1272 self._warn_unsafe_extraction_path(extract_path)
1273
1274 self.cached_files[target_path] = 1
1275 return target_path
1276
1277 @staticmethod
1278 def _warn_unsafe_extraction_path(path):
1279 """
1280 If the default extraction path is overridden and set to an insecure
1281 location, such as /tmp, it opens up an opportunity for an attacker to
1282 replace an extracted file with an unauthorized payload. Warn the user
1283 if a known insecure location is used.
1284
1285 See Distribute #375 for more details.
1286 """
1287 if os.name == 'nt' and not path.startswith(os.environ['windir']):
1288 # On Windows, permissions are generally restrictive by default
1289 # and temp directories are not writable by other users, so
1290 # bypass the warning.
1291 return
1292 mode = os.stat(path).st_mode
1293 if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1294 msg = ("%s is writable by group/others and vulnerable to attack "
1295 "when "
1296 "used with get_resource_filename. Consider a more secure "
1297 "location (set with .set_extraction_path or the "
1298 "PYTHON_EGG_CACHE environment variable)." % path)
1299 warnings.warn(msg, UserWarning)
1300
1301 def postprocess(self, tempname, filename):
1302 """Perform any platform-specific postprocessing of `tempname`
1303
1304 This is where Mac header rewrites should be done; other platforms don't
1305 have anything special they should do.
1306
1307 Resource providers should call this method ONLY after successfully
1308 extracting a compressed resource. They must NOT call it on resources
1309 that are already in the filesystem.
1310
1311 `tempname` is the current (temporary) name of the file, and `filename`
1312 is the name it will be renamed to by the caller after this routine
1313 returns.
1314 """
1315
1316 if os.name == 'posix':
1317 # Make the resource executable
1318 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1319 os.chmod(tempname, mode)
1320
1321 def set_extraction_path(self, path):
1322 """Set the base path where resources will be extracted to, if needed.
1323
1324 If you do not call this routine before any extractions take place, the
1325 path defaults to the return value of ``get_default_cache()``. (Which
1326 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1327 platform-specific fallbacks. See that routine's documentation for more
1328 details.)
1329
1330 Resources are extracted to subdirectories of this path based upon
1331 information given by the ``IResourceProvider``. You may set this to a
1332 temporary directory, but then you must call ``cleanup_resources()`` to
1333 delete the extracted files when done. There is no guarantee that
1334 ``cleanup_resources()`` will be able to remove all extracted files.
1335
1336 (Note: you may not change the extraction path for a given resource
1337 manager once resources have been extracted, unless you first call
1338 ``cleanup_resources()``.)
1339 """
1340 if self.cached_files:
1341 raise ValueError(
1342 "Can't change extraction path, files already extracted"
1343 )
1344
1345 self.extraction_path = path
1346
1347 def cleanup_resources(self, force=False):
1348 """
1349 Delete all extracted resource files and directories, returning a list
1350 of the file and directory names that could not be successfully removed.
1351 This function does not have any concurrency protection, so it should
1352 generally only be called when the extraction path is a temporary
1353 directory exclusive to a single process. This method is not
1354 automatically called; you must call it explicitly or register it as an
1355 ``atexit`` function if you wish to ensure cleanup of a temporary
1356 directory used for extractions.
1357 """
1358 # XXX
1359
1360
1361 def get_default_cache():
1362 """
1363 Return the ``PYTHON_EGG_CACHE`` environment variable
1364 or a platform-relevant user cache dir for an app
1365 named "Python-Eggs".
1366 """
1367 return (
1368 os.environ.get('PYTHON_EGG_CACHE')
1369 or appdirs.user_cache_dir(appname='Python-Eggs')
1370 )
1371
1372
1373 def safe_name(name):
1374 """Convert an arbitrary string to a standard distribution name
1375
1376 Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1377 """
1378 return re.sub('[^A-Za-z0-9.]+', '-', name)
1379
1380
1381 def safe_version(version):
1382 """
1383 Convert an arbitrary string to a standard version string
1384 """
1385 try:
1386 # normalize the version
1387 return str(packaging.version.Version(version))
1388 except packaging.version.InvalidVersion:
1389 version = version.replace(' ', '.')
1390 return re.sub('[^A-Za-z0-9.]+', '-', version)
1391
1392
1393 def safe_extra(extra):
1394 """Convert an arbitrary string to a standard 'extra' name
1395
1396 Any runs of non-alphanumeric characters are replaced with a single '_',
1397 and the result is always lowercased.
1398 """
1399 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1400
1401
1402 def to_filename(name):
1403 """Convert a project or version name to its filename-escaped form
1404
1405 Any '-' characters are currently replaced with '_'.
1406 """
1407 return name.replace('-', '_')
1408
1409
1410 def invalid_marker(text):
1411 """
1412 Validate text as a PEP 508 environment marker; return an exception
1413 if invalid or False otherwise.
1414 """
1415 try:
1416 evaluate_marker(text)
1417 except SyntaxError as e:
1418 e.filename = None
1419 e.lineno = None
1420 return e
1421 return False
1422
1423
1424 def evaluate_marker(text, extra=None):
1425 """
1426 Evaluate a PEP 508 environment marker.
1427 Return a boolean indicating the marker result in this environment.
1428 Raise SyntaxError if marker is invalid.
1429
1430 This implementation uses the 'pyparsing' module.
1431 """
1432 try:
1433 marker = packaging.markers.Marker(text)
1434 return marker.evaluate()
1435 except packaging.markers.InvalidMarker as e:
1436 raise SyntaxError(e)
1437
1438
1439 class NullProvider:
1440 """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1441
1442 egg_name = None
1443 egg_info = None
1444 loader = None
1445
1446 def __init__(self, module):
1447 self.loader = getattr(module, '__loader__', None)
1448 self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1449
1450 def get_resource_filename(self, manager, resource_name):
1451 return self._fn(self.module_path, resource_name)
1452
1453 def get_resource_stream(self, manager, resource_name):
1454 return io.BytesIO(self.get_resource_string(manager, resource_name))
1455
1456 def get_resource_string(self, manager, resource_name):
1457 return self._get(self._fn(self.module_path, resource_name))
1458
1459 def has_resource(self, resource_name):
1460 return self._has(self._fn(self.module_path, resource_name))
1461
1462 def has_metadata(self, name):
1463 return self.egg_info and self._has(self._fn(self.egg_info, name))
1464
1465 def get_metadata(self, name):
1466 if not self.egg_info:
1467 return ""
1468 value = self._get(self._fn(self.egg_info, name))
1469 return value.decode('utf-8') if six.PY3 else value
1470
1471 def get_metadata_lines(self, name):
1472 return yield_lines(self.get_metadata(name))
1473
1474 def resource_isdir(self, resource_name):
1475 return self._isdir(self._fn(self.module_path, resource_name))
1476
1477 def metadata_isdir(self, name):
1478 return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1479
1480 def resource_listdir(self, resource_name):
1481 return self._listdir(self._fn(self.module_path, resource_name))
1482
1483 def metadata_listdir(self, name):
1484 if self.egg_info:
1485 return self._listdir(self._fn(self.egg_info, name))
1486 return []
1487
1488 def run_script(self, script_name, namespace):
1489 script = 'scripts/' + script_name
1490 if not self.has_metadata(script):
1491 raise ResolutionError("No script named %r" % script_name)
1492 script_text = self.get_metadata(script).replace('\r\n', '\n')
1493 script_text = script_text.replace('\r', '\n')
1494 script_filename = self._fn(self.egg_info, script)
1495 namespace['__file__'] = script_filename
1496 if os.path.exists(script_filename):
1497 source = open(script_filename).read()
1498 code = compile(source, script_filename, 'exec')
1499 exec(code, namespace, namespace)
1500 else:
1501 from linecache import cache
1502 cache[script_filename] = (
1503 len(script_text), 0, script_text.split('\n'), script_filename
1504 )
1505 script_code = compile(script_text, script_filename, 'exec')
1506 exec(script_code, namespace, namespace)
1507
1508 def _has(self, path):
1509 raise NotImplementedError(
1510 "Can't perform this operation for unregistered loader type"
1511 )
1512
1513 def _isdir(self, path):
1514 raise NotImplementedError(
1515 "Can't perform this operation for unregistered loader type"
1516 )
1517
1518 def _listdir(self, path):
1519 raise NotImplementedError(
1520 "Can't perform this operation for unregistered loader type"
1521 )
1522
1523 def _fn(self, base, resource_name):
1524 if resource_name:
1525 return os.path.join(base, *resource_name.split('/'))
1526 return base
1527
1528 def _get(self, path):
1529 if hasattr(self.loader, 'get_data'):
1530 return self.loader.get_data(path)
1531 raise NotImplementedError(
1532 "Can't perform this operation for loaders without 'get_data()'"
1533 )
1534
1535
1536 register_loader_type(object, NullProvider)
1537
1538
1539 class EggProvider(NullProvider):
1540 """Provider based on a virtual filesystem"""
1541
1542 def __init__(self, module):
1543 NullProvider.__init__(self, module)
1544 self._setup_prefix()
1545
1546 def _setup_prefix(self):
1547 # we assume here that our metadata may be nested inside a "basket"
1548 # of multiple eggs; that's why we use module_path instead of .archive
1549 path = self.module_path
1550 old = None
1551 while path != old:
1552 if _is_unpacked_egg(path):
1553 self.egg_name = os.path.basename(path)
1554 self.egg_info = os.path.join(path, 'EGG-INFO')
1555 self.egg_root = path
1556 break
1557 old = path
1558 path, base = os.path.split(path)
1559
1560
1561 class DefaultProvider(EggProvider):
1562 """Provides access to package resources in the filesystem"""
1563
1564 def _has(self, path):
1565 return os.path.exists(path)
1566
1567 def _isdir(self, path):
1568 return os.path.isdir(path)
1569
1570 def _listdir(self, path):
1571 return os.listdir(path)
1572
1573 def get_resource_stream(self, manager, resource_name):
1574 return open(self._fn(self.module_path, resource_name), 'rb')
1575
1576 def _get(self, path):
1577 with open(path, 'rb') as stream:
1578 return stream.read()
1579
1580 @classmethod
1581 def _register(cls):
1582 loader_cls = getattr(importlib_machinery, 'SourceFileLoader',
1583 type(None))
1584 register_loader_type(loader_cls, cls)
1585
1586
1587 DefaultProvider._register()
1588
1589
1590 class EmptyProvider(NullProvider):
1591 """Provider that returns nothing for all requests"""
1592
1593 _isdir = _has = lambda self, path: False
1594 _get = lambda self, path: ''
1595 _listdir = lambda self, path: []
1596 module_path = None
1597
1598 def __init__(self):
1599 pass
1600
1601
1602 empty_provider = EmptyProvider()
1603
1604
1605 class ZipManifests(dict):
1606 """
1607 zip manifest builder
1608 """
1609
1610 @classmethod
1611 def build(cls, path):
1612 """
1613 Build a dictionary similar to the zipimport directory
1614 caches, except instead of tuples, store ZipInfo objects.
1615
1616 Use a platform-specific path separator (os.sep) for the path keys
1617 for compatibility with pypy on Windows.
1618 """
1619 with ContextualZipFile(path) as zfile:
1620 items = (
1621 (
1622 name.replace('/', os.sep),
1623 zfile.getinfo(name),
1624 )
1625 for name in zfile.namelist()
1626 )
1627 return dict(items)
1628
1629 load = build
1630
1631
1632 class MemoizedZipManifests(ZipManifests):
1633 """
1634 Memoized zipfile manifests.
1635 """
1636 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1637
1638 def load(self, path):
1639 """
1640 Load a manifest at path or return a suitable manifest already loaded.
1641 """
1642 path = os.path.normpath(path)
1643 mtime = os.stat(path).st_mtime
1644
1645 if path not in self or self[path].mtime != mtime:
1646 manifest = self.build(path)
1647 self[path] = self.manifest_mod(manifest, mtime)
1648
1649 return self[path].manifest
1650
1651
1652 class ContextualZipFile(zipfile.ZipFile):
1653 """
1654 Supplement ZipFile class to support context manager for Python 2.6
1655 """
1656
1657 def __enter__(self):
1658 return self
1659
1660 def __exit__(self, type, value, traceback):
1661 self.close()
1662
1663 def __new__(cls, *args, **kwargs):
1664 """
1665 Construct a ZipFile or ContextualZipFile as appropriate
1666 """
1667 if hasattr(zipfile.ZipFile, '__exit__'):
1668 return zipfile.ZipFile(*args, **kwargs)
1669 return super(ContextualZipFile, cls).__new__(cls)
1670
1671
1672 class ZipProvider(EggProvider):
1673 """Resource support for zips and eggs"""
1674
1675 eagers = None
1676 _zip_manifests = MemoizedZipManifests()
1677
1678 def __init__(self, module):
1679 EggProvider.__init__(self, module)
1680 self.zip_pre = self.loader.archive + os.sep
1681
1682 def _zipinfo_name(self, fspath):
1683 # Convert a virtual filename (full path to file) into a zipfile subpath
1684 # usable with the zipimport directory cache for our target archive
1685 if fspath.startswith(self.zip_pre):
1686 return fspath[len(self.zip_pre):]
1687 raise AssertionError(
1688 "%s is not a subpath of %s" % (fspath, self.zip_pre)
1689 )
1690
1691 def _parts(self, zip_path):
1692 # Convert a zipfile subpath into an egg-relative path part list.
1693 # pseudo-fs path
1694 fspath = self.zip_pre + zip_path
1695 if fspath.startswith(self.egg_root + os.sep):
1696 return fspath[len(self.egg_root) + 1:].split(os.sep)
1697 raise AssertionError(
1698 "%s is not a subpath of %s" % (fspath, self.egg_root)
1699 )
1700
1701 @property
1702 def zipinfo(self):
1703 return self._zip_manifests.load(self.loader.archive)
1704
1705 def get_resource_filename(self, manager, resource_name):
1706 if not self.egg_name:
1707 raise NotImplementedError(
1708 "resource_filename() only supported for .egg, not .zip"
1709 )
1710 # no need to lock for extraction, since we use temp names
1711 zip_path = self._resource_to_zip(resource_name)
1712 eagers = self._get_eager_resources()
1713 if '/'.join(self._parts(zip_path)) in eagers:
1714 for name in eagers:
1715 self._extract_resource(manager, self._eager_to_zip(name))
1716 return self._extract_resource(manager, zip_path)
1717
1718 @staticmethod
1719 def _get_date_and_size(zip_stat):
1720 size = zip_stat.file_size
1721 # ymdhms+wday, yday, dst
1722 date_time = zip_stat.date_time + (0, 0, -1)
1723 # 1980 offset already done
1724 timestamp = time.mktime(date_time)
1725 return timestamp, size
1726
1727 def _extract_resource(self, manager, zip_path):
1728
1729 if zip_path in self._index():
1730 for name in self._index()[zip_path]:
1731 last = self._extract_resource(
1732 manager, os.path.join(zip_path, name)
1733 )
1734 # return the extracted directory name
1735 return os.path.dirname(last)
1736
1737 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1738
1739 if not WRITE_SUPPORT:
1740 raise IOError('"os.rename" and "os.unlink" are not supported '
1741 'on this platform')
1742 try:
1743
1744 real_path = manager.get_cache_path(
1745 self.egg_name, self._parts(zip_path)
1746 )
1747
1748 if self._is_current(real_path, zip_path):
1749 return real_path
1750
1751 outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1752 os.write(outf, self.loader.get_data(zip_path))
1753 os.close(outf)
1754 utime(tmpnam, (timestamp, timestamp))
1755 manager.postprocess(tmpnam, real_path)
1756
1757 try:
1758 rename(tmpnam, real_path)
1759
1760 except os.error:
1761 if os.path.isfile(real_path):
1762 if self._is_current(real_path, zip_path):
1763 # the file became current since it was checked above,
1764 # so proceed.
1765 return real_path
1766 # Windows, del old file and retry
1767 elif os.name == 'nt':
1768 unlink(real_path)
1769 rename(tmpnam, real_path)
1770 return real_path
1771 raise
1772
1773 except os.error:
1774 # report a user-friendly error
1775 manager.extraction_error()
1776
1777 return real_path
1778
1779 def _is_current(self, file_path, zip_path):
1780 """
1781 Return True if the file_path is current for this zip_path
1782 """
1783 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1784 if not os.path.isfile(file_path):
1785 return False
1786 stat = os.stat(file_path)
1787 if stat.st_size != size or stat.st_mtime != timestamp:
1788 return False
1789 # check that the contents match
1790 zip_contents = self.loader.get_data(zip_path)
1791 with open(file_path, 'rb') as f:
1792 file_contents = f.read()
1793 return zip_contents == file_contents
1794
1795 def _get_eager_resources(self):
1796 if self.eagers is None:
1797 eagers = []
1798 for name in ('native_libs.txt', 'eager_resources.txt'):
1799 if self.has_metadata(name):
1800 eagers.extend(self.get_metadata_lines(name))
1801 self.eagers = eagers
1802 return self.eagers
1803
1804 def _index(self):
1805 try:
1806 return self._dirindex
1807 except AttributeError:
1808 ind = {}
1809 for path in self.zipinfo:
1810 parts = path.split(os.sep)
1811 while parts:
1812 parent = os.sep.join(parts[:-1])
1813 if parent in ind:
1814 ind[parent].append(parts[-1])
1815 break
1816 else:
1817 ind[parent] = [parts.pop()]
1818 self._dirindex = ind
1819 return ind
1820
1821 def _has(self, fspath):
1822 zip_path = self._zipinfo_name(fspath)
1823 return zip_path in self.zipinfo or zip_path in self._index()
1824
1825 def _isdir(self, fspath):
1826 return self._zipinfo_name(fspath) in self._index()
1827
1828 def _listdir(self, fspath):
1829 return list(self._index().get(self._zipinfo_name(fspath), ()))
1830
1831 def _eager_to_zip(self, resource_name):
1832 return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1833
1834 def _resource_to_zip(self, resource_name):
1835 return self._zipinfo_name(self._fn(self.module_path, resource_name))
1836
1837
1838 register_loader_type(zipimport.zipimporter, ZipProvider)
1839
1840
1841 class FileMetadata(EmptyProvider):
1842 """Metadata handler for standalone PKG-INFO files
1843
1844 Usage::
1845
1846 metadata = FileMetadata("/path/to/PKG-INFO")
1847
1848 This provider rejects all data and metadata requests except for PKG-INFO,
1849 which is treated as existing, and will be the contents of the file at
1850 the provided location.
1851 """
1852
1853 def __init__(self, path):
1854 self.path = path
1855
1856 def has_metadata(self, name):
1857 return name == 'PKG-INFO' and os.path.isfile(self.path)
1858
1859 def get_metadata(self, name):
1860 if name != 'PKG-INFO':
1861 raise KeyError("No metadata except PKG-INFO is available")
1862
1863 with io.open(self.path, encoding='utf-8', errors="replace") as f:
1864 metadata = f.read()
1865 self._warn_on_replacement(metadata)
1866 return metadata
1867
1868 def _warn_on_replacement(self, metadata):
1869 # Python 2.6 and 3.2 compat for: replacement_char = '�'
1870 replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
1871 if replacement_char in metadata:
1872 tmpl = "{self.path} could not be properly decoded in UTF-8"
1873 msg = tmpl.format(**locals())
1874 warnings.warn(msg)
1875
1876 def get_metadata_lines(self, name):
1877 return yield_lines(self.get_metadata(name))
1878
1879
1880 class PathMetadata(DefaultProvider):
1881 """Metadata provider for egg directories
1882
1883 Usage::
1884
1885 # Development eggs:
1886
1887 egg_info = "/path/to/PackageName.egg-info"
1888 base_dir = os.path.dirname(egg_info)
1889 metadata = PathMetadata(base_dir, egg_info)
1890 dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1891 dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1892
1893 # Unpacked egg directories:
1894
1895 egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1896 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1897 dist = Distribution.from_filename(egg_path, metadata=metadata)
1898 """
1899
1900 def __init__(self, path, egg_info):
1901 self.module_path = path
1902 self.egg_info = egg_info
1903
1904
1905 class EggMetadata(ZipProvider):
1906 """Metadata provider for .egg files"""
1907
1908 def __init__(self, importer):
1909 """Create a metadata provider from a zipimporter"""
1910
1911 self.zip_pre = importer.archive + os.sep
1912 self.loader = importer
1913 if importer.prefix:
1914 self.module_path = os.path.join(importer.archive, importer.prefix)
1915 else:
1916 self.module_path = importer.archive
1917 self._setup_prefix()
1918
1919
1920 _declare_state('dict', _distribution_finders={})
1921
1922
1923 def register_finder(importer_type, distribution_finder):
1924 """Register `distribution_finder` to find distributions in sys.path items
1925
1926 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1927 handler), and `distribution_finder` is a callable that, passed a path
1928 item and the importer instance, yields ``Distribution`` instances found on
1929 that path item. See ``pkg_resources.find_on_path`` for an example."""
1930 _distribution_finders[importer_type] = distribution_finder
1931
1932
1933 def find_distributions(path_item, only=False):
1934 """Yield distributions accessible via `path_item`"""
1935 importer = get_importer(path_item)
1936 finder = _find_adapter(_distribution_finders, importer)
1937 return finder(importer, path_item, only)
1938
1939
1940 def find_eggs_in_zip(importer, path_item, only=False):
1941 """
1942 Find eggs in zip files; possibly multiple nested eggs.
1943 """
1944 if importer.archive.endswith('.whl'):
1945 # wheels are not supported with this finder
1946 # they don't have PKG-INFO metadata, and won't ever contain eggs
1947 return
1948 metadata = EggMetadata(importer)
1949 if metadata.has_metadata('PKG-INFO'):
1950 yield Distribution.from_filename(path_item, metadata=metadata)
1951 if only:
1952 # don't yield nested distros
1953 return
1954 for subitem in metadata.resource_listdir('/'):
1955 if _is_unpacked_egg(subitem):
1956 subpath = os.path.join(path_item, subitem)
1957 for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath ):
1958 yield dist
1959
1960
1961 register_finder(zipimport.zipimporter, find_eggs_in_zip)
1962
1963
1964 def find_nothing(importer, path_item, only=False):
1965 return ()
1966
1967
1968 register_finder(object, find_nothing)
1969
1970
1971 def _by_version_descending(names):
1972 """
1973 Given a list of filenames, return them in descending order
1974 by version number.
1975
1976 >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
1977 >>> _by_version_descending(names)
1978 ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
1979 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
1980 >>> _by_version_descending(names)
1981 ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
1982 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
1983 >>> _by_version_descending(names)
1984 ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
1985 """
1986 def _by_version(name):
1987 """
1988 Parse each component of the filename
1989 """
1990 name, ext = os.path.splitext(name)
1991 parts = itertools.chain(name.split('-'), [ext])
1992 return [packaging.version.parse(part) for part in parts]
1993
1994 return sorted(names, key=_by_version, reverse=True)
1995
1996
1997 def find_on_path(importer, path_item, only=False):
1998 """Yield distributions accessible on a sys.path directory"""
1999 path_item = _normalize_cached(path_item)
2000
2001 if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
2002 if _is_unpacked_egg(path_item):
2003 yield Distribution.from_filename(
2004 path_item, metadata=PathMetadata(
2005 path_item, os.path.join(path_item, 'EGG-INFO')
2006 )
2007 )
2008 else:
2009 # scan for .egg and .egg-info in directory
2010 path_item_entries = _by_version_descending(os.listdir(path_item))
2011 for entry in path_item_entries:
2012 lower = entry.lower()
2013 if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
2014 fullpath = os.path.join(path_item, entry)
2015 if os.path.isdir(fullpath):
2016 # egg-info directory, allow getting metadata
2017 if len(os.listdir(fullpath)) == 0:
2018 # Empty egg directory, skip.
2019 continue
2020 metadata = PathMetadata(path_item, fullpath)
2021 else:
2022 metadata = FileMetadata(fullpath)
2023 yield Distribution.from_location(
2024 path_item, entry, metadata, precedence=DEVELOP_DIST
2025 )
2026 elif not only and _is_unpacked_egg(entry):
2027 dists = find_distributions(os.path.join(path_item, entry))
2028 for dist in dists:
2029 yield dist
2030 elif not only and lower.endswith('.egg-link'):
2031 with open(os.path.join(path_item, entry)) as entry_file:
2032 entry_lines = entry_file.readlines()
2033 for line in entry_lines:
2034 if not line.strip():
2035 continue
2036 path = os.path.join(path_item, line.rstrip())
2037 dists = find_distributions(path)
2038 for item in dists:
2039 yield item
2040 break
2041
2042
2043 register_finder(pkgutil.ImpImporter, find_on_path)
2044
2045 if hasattr(importlib_machinery, 'FileFinder'):
2046 register_finder(importlib_machinery.FileFinder, find_on_path)
2047
2048 _declare_state('dict', _namespace_handlers={})
2049 _declare_state('dict', _namespace_packages={})
2050
2051
2052 def register_namespace_handler(importer_type, namespace_handler):
2053 """Register `namespace_handler` to declare namespace packages
2054
2055 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2056 handler), and `namespace_handler` is a callable like this::
2057
2058 def namespace_handler(importer, path_entry, moduleName, module):
2059 # return a path_entry to use for child packages
2060
2061 Namespace handlers are only called if the importer object has already
2062 agreed that it can handle the relevant path item, and they should only
2063 return a subpath if the module __path__ does not already contain an
2064 equivalent subpath. For an example namespace handler, see
2065 ``pkg_resources.file_ns_handler``.
2066 """
2067 _namespace_handlers[importer_type] = namespace_handler
2068
2069
2070 def _handle_ns(packageName, path_item):
2071 """Ensure that named package includes a subpath of path_item (if needed)"""
2072
2073 importer = get_importer(path_item)
2074 if importer is None:
2075 return None
2076 loader = importer.find_module(packageName)
2077 if loader is None:
2078 return None
2079 module = sys.modules.get(packageName)
2080 if module is None:
2081 module = sys.modules[packageName] = types.ModuleType(packageName)
2082 module.__path__ = []
2083 _set_parent_ns(packageName)
2084 elif not hasattr(module, '__path__'):
2085 raise TypeError("Not a package:", packageName)
2086 handler = _find_adapter(_namespace_handlers, importer)
2087 subpath = handler(importer, path_item, packageName, module)
2088 if subpath is not None:
2089 path = module.__path__
2090 path.append(subpath)
2091 loader.load_module(packageName)
2092 _rebuild_mod_path(path, packageName, module)
2093 return subpath
2094
2095
2096 def _rebuild_mod_path(orig_path, package_name, module):
2097 """
2098 Rebuild module.__path__ ensuring that all entries are ordered
2099 corresponding to their sys.path order
2100 """
2101 sys_path = [_normalize_cached(p) for p in sys.path]
2102
2103 def safe_sys_path_index(entry):
2104 """
2105 Workaround for #520 and #513.
2106 """
2107 try:
2108 return sys_path.index(entry)
2109 except ValueError:
2110 return float('inf')
2111
2112 def position_in_sys_path(path):
2113 """
2114 Return the ordinal of the path based on its position in sys.path
2115 """
2116 path_parts = path.split(os.sep)
2117 module_parts = package_name.count('.') + 1
2118 parts = path_parts[:-module_parts]
2119 return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2120
2121 if not isinstance(orig_path, list):
2122 # Is this behavior useful when module.__path__ is not a list?
2123 return
2124
2125 orig_path.sort(key=position_in_sys_path)
2126 module.__path__[:] = [_normalize_cached(p) for p in orig_path]
2127
2128
2129 def declare_namespace(packageName):
2130 """Declare that package 'packageName' is a namespace package"""
2131
2132 _imp.acquire_lock()
2133 try:
2134 if packageName in _namespace_packages:
2135 return
2136
2137 path, parent = sys.path, None
2138 if '.' in packageName:
2139 parent = '.'.join(packageName.split('.')[:-1])
2140 declare_namespace(parent)
2141 if parent not in _namespace_packages:
2142 __import__(parent)
2143 try:
2144 path = sys.modules[parent].__path__
2145 except AttributeError:
2146 raise TypeError("Not a package:", parent)
2147
2148 # Track what packages are namespaces, so when new path items are added,
2149 # they can be updated
2150 _namespace_packages.setdefault(parent, []).append(packageName)
2151 _namespace_packages.setdefault(packageName, [])
2152
2153 for path_item in path:
2154 # Ensure all the parent's path items are reflected in the child,
2155 # if they apply
2156 _handle_ns(packageName, path_item)
2157
2158 finally:
2159 _imp.release_lock()
2160
2161
2162 def fixup_namespace_packages(path_item, parent=None):
2163 """Ensure that previously-declared namespace packages include path_item"""
2164 _imp.acquire_lock()
2165 try:
2166 for package in _namespace_packages.get(parent, ()):
2167 subpath = _handle_ns(package, path_item)
2168 if subpath:
2169 fixup_namespace_packages(subpath, package)
2170 finally:
2171 _imp.release_lock()
2172
2173
2174 def file_ns_handler(importer, path_item, packageName, module):
2175 """Compute an ns-package subpath for a filesystem or zipfile importer"""
2176
2177 subpath = os.path.join(path_item, packageName.split('.')[-1])
2178 normalized = _normalize_cached(subpath)
2179 for item in module.__path__:
2180 if _normalize_cached(item) == normalized:
2181 break
2182 else:
2183 # Only return the path if it's not already there
2184 return subpath
2185
2186
2187 register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2188 register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2189
2190 if hasattr(importlib_machinery, 'FileFinder'):
2191 register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2192
2193
2194 def null_ns_handler(importer, path_item, packageName, module):
2195 return None
2196
2197
2198 register_namespace_handler(object, null_ns_handler)
2199
2200
2201 def normalize_path(filename):
2202 """Normalize a file/dir name for comparison purposes"""
2203 return os.path.normcase(os.path.realpath(filename))
2204
2205
2206 def _normalize_cached(filename, _cache={}):
2207 try:
2208 return _cache[filename]
2209 except KeyError:
2210 _cache[filename] = result = normalize_path(filename)
2211 return result
2212
2213
2214 def _is_unpacked_egg(path):
2215 """
2216 Determine if given path appears to be an unpacked egg.
2217 """
2218 return (
2219 path.lower().endswith('.egg')
2220 )
2221
2222
2223 def _set_parent_ns(packageName):
2224 parts = packageName.split('.')
2225 name = parts.pop()
2226 if parts:
2227 parent = '.'.join(parts)
2228 setattr(sys.modules[parent], name, sys.modules[packageName])
2229
2230
2231 def yield_lines(strs):
2232 """Yield non-empty/non-comment lines of a string or sequence"""
2233 if isinstance(strs, six.string_types):
2234 for s in strs.splitlines():
2235 s = s.strip()
2236 # skip blank lines/comments
2237 if s and not s.startswith('#'):
2238 yield s
2239 else:
2240 for ss in strs:
2241 for s in yield_lines(ss):
2242 yield s
2243
2244
2245 MODULE = re.compile(r"\w+(\.\w+)*$").match
2246 EGG_NAME = re.compile(
2247 r"""
2248 (?P<name>[^-]+) (
2249 -(?P<ver>[^-]+) (
2250 -py(?P<pyver>[^-]+) (
2251 -(?P<plat>.+)
2252 )?
2253 )?
2254 )?
2255 """,
2256 re.VERBOSE | re.IGNORECASE,
2257 ).match
2258
2259
2260 class EntryPoint(object):
2261 """Object representing an advertised importable object"""
2262
2263 def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2264 if not MODULE(module_name):
2265 raise ValueError("Invalid module name", module_name)
2266 self.name = name
2267 self.module_name = module_name
2268 self.attrs = tuple(attrs)
2269 self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
2270 self.dist = dist
2271
2272 def __str__(self):
2273 s = "%s = %s" % (self.name, self.module_name)
2274 if self.attrs:
2275 s += ':' + '.'.join(self.attrs)
2276 if self.extras:
2277 s += ' [%s]' % ','.join(self.extras)
2278 return s
2279
2280 def __repr__(self):
2281 return "EntryPoint.parse(%r)" % str(self)
2282
2283 def load(self, require=True, *args, **kwargs):
2284 """
2285 Require packages for this EntryPoint, then resolve it.
2286 """
2287 if not require or args or kwargs:
2288 warnings.warn(
2289 "Parameters to load are deprecated. Call .resolve and "
2290 ".require separately.",
2291 DeprecationWarning,
2292 stacklevel=2,
2293 )
2294 if require:
2295 self.require(*args, **kwargs)
2296 return self.resolve()
2297
2298 def resolve(self):
2299 """
2300 Resolve the entry point from its module and attrs.
2301 """
2302 module = __import__(self.module_name, fromlist=['__name__'], level=0)
2303 try:
2304 return functools.reduce(getattr, self.attrs, module)
2305 except AttributeError as exc:
2306 raise ImportError(str(exc))
2307
2308 def require(self, env=None, installer=None):
2309 if self.extras and not self.dist:
2310 raise UnknownExtra("Can't require() without a distribution", self)
2311
2312 # Get the requirements for this entry point with all its extras and
2313 # then resolve them. We have to pass `extras` along when resolving so
2314 # that the working set knows what extras we want. Otherwise, for
2315 # dist-info distributions, the working set will assume that the
2316 # requirements for that extra are purely optional and skip over them.
2317 reqs = self.dist.requires(self.extras)
2318 items = working_set.resolve(reqs, env, installer, extras=self.extras)
2319 list(map(working_set.add, items))
2320
2321 pattern = re.compile(
2322 r'\s*'
2323 r'(?P<name>.+?)\s*'
2324 r'=\s*'
2325 r'(?P<module>[\w.]+)\s*'
2326 r'(:\s*(?P<attr>[\w.]+))?\s*'
2327 r'(?P<extras>\[.*\])?\s*$'
2328 )
2329
2330 @classmethod
2331 def parse(cls, src, dist=None):
2332 """Parse a single entry point from string `src`
2333
2334 Entry point syntax follows the form::
2335
2336 name = some.module:some.attr [extra1, extra2]
2337
2338 The entry name and module name are required, but the ``:attrs`` and
2339 ``[extras]`` parts are optional
2340 """
2341 m = cls.pattern.match(src)
2342 if not m:
2343 msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2344 raise ValueError(msg, src)
2345 res = m.groupdict()
2346 extras = cls._parse_extras(res['extras'])
2347 attrs = res['attr'].split('.') if res['attr'] else ()
2348 return cls(res['name'], res['module'], attrs, extras, dist)
2349
2350 @classmethod
2351 def _parse_extras(cls, extras_spec):
2352 if not extras_spec:
2353 return ()
2354 req = Requirement.parse('x' + extras_spec)
2355 if req.specs:
2356 raise ValueError()
2357 return req.extras
2358
2359 @classmethod
2360 def parse_group(cls, group, lines, dist=None):
2361 """Parse an entry point group"""
2362 if not MODULE(group):
2363 raise ValueError("Invalid group name", group)
2364 this = {}
2365 for line in yield_lines(lines):
2366 ep = cls.parse(line, dist)
2367 if ep.name in this:
2368 raise ValueError("Duplicate entry point", group, ep.name)
2369 this[ep.name] = ep
2370 return this
2371
2372 @classmethod
2373 def parse_map(cls, data, dist=None):
2374 """Parse a map of entry point groups"""
2375 if isinstance(data, dict):
2376 data = data.items()
2377 else:
2378 data = split_sections(data)
2379 maps = {}
2380 for group, lines in data:
2381 if group is None:
2382 if not lines:
2383 continue
2384 raise ValueError("Entry points must be listed in groups")
2385 group = group.strip()
2386 if group in maps:
2387 raise ValueError("Duplicate group name", group)
2388 maps[group] = cls.parse_group(group, lines, dist)
2389 return maps
2390
2391
2392 def _remove_md5_fragment(location):
2393 if not location:
2394 return ''
2395 parsed = urllib.parse.urlparse(location)
2396 if parsed[-1].startswith('md5='):
2397 return urllib.parse.urlunparse(parsed[:-1] + ('',))
2398 return location
2399
2400
2401 def _version_from_file(lines):
2402 """
2403 Given an iterable of lines from a Metadata file, return
2404 the value of the Version field, if present, or None otherwise.
2405 """
2406 is_version_line = lambda line: line.lower().startswith('version:')
2407 version_lines = filter(is_version_line, lines)
2408 line = next(iter(version_lines), '')
2409 _, _, value = line.partition(':')
2410 return safe_version(value.strip()) or None
2411
2412
2413 class Distribution(object):
2414 """Wrap an actual or potential sys.path entry w/metadata"""
2415 PKG_INFO = 'PKG-INFO'
2416
2417 def __init__(self, location=None, metadata=None, project_name=None,
2418 version=None, py_version=PY_MAJOR, platform=None,
2419 precedence=EGG_DIST):
2420 self.project_name = safe_name(project_name or 'Unknown')
2421 if version is not None:
2422 self._version = safe_version(version)
2423 self.py_version = py_version
2424 self.platform = platform
2425 self.location = location
2426 self.precedence = precedence
2427 self._provider = metadata or empty_provider
2428
2429 @classmethod
2430 def from_location(cls, location, basename, metadata=None, **kw):
2431 project_name, version, py_version, platform = [None] * 4
2432 basename, ext = os.path.splitext(basename)
2433 if ext.lower() in _distributionImpl:
2434 cls = _distributionImpl[ext.lower()]
2435
2436 match = EGG_NAME(basename)
2437 if match:
2438 project_name, version, py_version, platform = match.group(
2439 'name', 'ver', 'pyver', 'plat'
2440 )
2441 return cls(
2442 location, metadata, project_name=project_name, version=version,
2443 py_version=py_version, platform=platform, **kw
2444 )._reload_version()
2445
2446 def _reload_version(self):
2447 return self
2448
2449 @property
2450 def hashcmp(self):
2451 return (
2452 self.parsed_version,
2453 self.precedence,
2454 self.key,
2455 _remove_md5_fragment(self.location),
2456 self.py_version or '',
2457 self.platform or '',
2458 )
2459
2460 def __hash__(self):
2461 return hash(self.hashcmp)
2462
2463 def __lt__(self, other):
2464 return self.hashcmp < other.hashcmp
2465
2466 def __le__(self, other):
2467 return self.hashcmp <= other.hashcmp
2468
2469 def __gt__(self, other):
2470 return self.hashcmp > other.hashcmp
2471
2472 def __ge__(self, other):
2473 return self.hashcmp >= other.hashcmp
2474
2475 def __eq__(self, other):
2476 if not isinstance(other, self.__class__):
2477 # It's not a Distribution, so they are not equal
2478 return False
2479 return self.hashcmp == other.hashcmp
2480
2481 def __ne__(self, other):
2482 return not self == other
2483
2484 # These properties have to be lazy so that we don't have to load any
2485 # metadata until/unless it's actually needed. (i.e., some distributions
2486 # may not know their name or version without loading PKG-INFO)
2487
2488 @property
2489 def key(self):
2490 try:
2491 return self._key
2492 except AttributeError:
2493 self._key = key = self.project_name.lower()
2494 return key
2495
2496 @property
2497 def parsed_version(self):
2498 if not hasattr(self, "_parsed_version"):
2499 self._parsed_version = parse_version(self.version)
2500
2501 return self._parsed_version
2502
2503 def _warn_legacy_version(self):
2504 LV = packaging.version.LegacyVersion
2505 is_legacy = isinstance(self._parsed_version, LV)
2506 if not is_legacy:
2507 return
2508
2509 # While an empty version is technically a legacy version and
2510 # is not a valid PEP 440 version, it's also unlikely to
2511 # actually come from someone and instead it is more likely that
2512 # it comes from setuptools attempting to parse a filename and
2513 # including it in the list. So for that we'll gate this warning
2514 # on if the version is anything at all or not.
2515 if not self.version:
2516 return
2517
2518 tmpl = textwrap.dedent("""
2519 '{project_name} ({version})' is being parsed as a legacy,
2520 non PEP 440,
2521 version. You may find odd behavior and sort order.
2522 In particular it will be sorted as less than 0.0. It
2523 is recommended to migrate to PEP 440 compatible
2524 versions.
2525 """).strip().replace('\n', ' ')
2526
2527 warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2528
2529 @property
2530 def version(self):
2531 try:
2532 return self._version
2533 except AttributeError:
2534 version = _version_from_file(self._get_metadata(self.PKG_INFO))
2535 if version is None:
2536 tmpl = "Missing 'Version:' header and/or %s file"
2537 raise ValueError(tmpl % self.PKG_INFO, self)
2538 return version
2539
2540 @property
2541 def _dep_map(self):
2542 try:
2543 return self.__dep_map
2544 except AttributeError:
2545 dm = self.__dep_map = {None: []}
2546 for name in 'requires.txt', 'depends.txt':
2547 for extra, reqs in split_sections(self._get_metadata(name)):
2548 if extra:
2549 if ':' in extra:
2550 extra, marker = extra.split(':', 1)
2551 if invalid_marker(marker):
2552 # XXX warn
2553 reqs = []
2554 elif not evaluate_marker(marker):
2555 reqs = []
2556 extra = safe_extra(extra) or None
2557 dm.setdefault(extra, []).extend(parse_requirements(reqs))
2558 return dm
2559
2560 def requires(self, extras=()):
2561 """List of Requirements needed for this distro if `extras` are used"""
2562 dm = self._dep_map
2563 deps = []
2564 deps.extend(dm.get(None, ()))
2565 for ext in extras:
2566 try:
2567 deps.extend(dm[safe_extra(ext)])
2568 except KeyError:
2569 raise UnknownExtra(
2570 "%s has no such extra feature %r" % (self, ext)
2571 )
2572 return deps
2573
2574 def _get_metadata(self, name):
2575 if self.has_metadata(name):
2576 for line in self.get_metadata_lines(name):
2577 yield line
2578
2579 def activate(self, path=None, replace=False):
2580 """Ensure distribution is importable on `path` (default=sys.path)"""
2581 if path is None:
2582 path = sys.path
2583 self.insert_on(path, replace=replace)
2584 if path is sys.path:
2585 fixup_namespace_packages(self.location)
2586 for pkg in self._get_metadata('namespace_packages.txt'):
2587 if pkg in sys.modules:
2588 declare_namespace(pkg)
2589
2590 def egg_name(self):
2591 """Return what this distribution's standard .egg filename should be"""
2592 filename = "%s-%s-py%s" % (
2593 to_filename(self.project_name), to_filename(self.version),
2594 self.py_version or PY_MAJOR
2595 )
2596
2597 if self.platform:
2598 filename += '-' + self.platform
2599 return filename
2600
2601 def __repr__(self):
2602 if self.location:
2603 return "%s (%s)" % (self, self.location)
2604 else:
2605 return str(self)
2606
2607 def __str__(self):
2608 try:
2609 version = getattr(self, 'version', None)
2610 except ValueError:
2611 version = None
2612 version = version or "[unknown version]"
2613 return "%s %s" % (self.project_name, version)
2614
2615 def __getattr__(self, attr):
2616 """Delegate all unrecognized public attributes to .metadata provider"""
2617 if attr.startswith('_'):
2618 raise AttributeError(attr)
2619 return getattr(self._provider, attr)
2620
2621 @classmethod
2622 def from_filename(cls, filename, metadata=None, **kw):
2623 return cls.from_location(
2624 _normalize_cached(filename), os.path.basename(filename), metadata,
2625 **kw
2626 )
2627
2628 def as_requirement(self):
2629 """Return a ``Requirement`` that matches this distribution exactly"""
2630 if isinstance(self.parsed_version, packaging.version.Version):
2631 spec = "%s==%s" % (self.project_name, self.parsed_version)
2632 else:
2633 spec = "%s===%s" % (self.project_name, self.parsed_version)
2634
2635 return Requirement.parse(spec)
2636
2637 def load_entry_point(self, group, name):
2638 """Return the `name` entry point of `group` or raise ImportError"""
2639 ep = self.get_entry_info(group, name)
2640 if ep is None:
2641 raise ImportError("Entry point %r not found" % ((group, name),))
2642 return ep.load()
2643
2644 def get_entry_map(self, group=None):
2645 """Return the entry point map for `group`, or the full entry map"""
2646 try:
2647 ep_map = self._ep_map
2648 except AttributeError:
2649 ep_map = self._ep_map = EntryPoint.parse_map(
2650 self._get_metadata('entry_points.txt'), self
2651 )
2652 if group is not None:
2653 return ep_map.get(group, {})
2654 return ep_map
2655
2656 def get_entry_info(self, group, name):
2657 """Return the EntryPoint object for `group`+`name`, or ``None``"""
2658 return self.get_entry_map(group).get(name)
2659
2660 def insert_on(self, path, loc=None, replace=False):
2661 """Ensure self.location is on path
2662
2663 If replace=False (default):
2664 - If location is already in path anywhere, do nothing.
2665 - Else:
2666 - If it's an egg and its parent directory is on path,
2667 insert just ahead of the parent.
2668 - Else: add to the end of path.
2669 If replace=True:
2670 - If location is already on path anywhere (not eggs)
2671 or higher priority than its parent (eggs)
2672 do nothing.
2673 - Else:
2674 - If it's an egg and its parent directory is on path,
2675 insert just ahead of the parent,
2676 removing any lower-priority entries.
2677 - Else: add it to the front of path.
2678 """
2679
2680 loc = loc or self.location
2681 if not loc:
2682 return
2683
2684 nloc = _normalize_cached(loc)
2685 bdir = os.path.dirname(nloc)
2686 npath = [(p and _normalize_cached(p) or p) for p in path]
2687
2688 for p, item in enumerate(npath):
2689 if item == nloc:
2690 if replace:
2691 break
2692 else:
2693 # don't modify path (even removing duplicates) if found and not replace
2694 return
2695 elif item == bdir and self.precedence == EGG_DIST:
2696 # if it's an .egg, give it precedence over its directory
2697 # UNLESS it's already been added to sys.path and replace=False
2698 if (not replace) and nloc in npath[p:]:
2699 return
2700 if path is sys.path:
2701 self.check_version_conflict()
2702 path.insert(p, loc)
2703 npath.insert(p, nloc)
2704 break
2705 else:
2706 if path is sys.path:
2707 self.check_version_conflict()
2708 if replace:
2709 path.insert(0, loc)
2710 else:
2711 path.append(loc)
2712 return
2713
2714 # p is the spot where we found or inserted loc; now remove duplicates
2715 while True:
2716 try:
2717 np = npath.index(nloc, p + 1)
2718 except ValueError:
2719 break
2720 else:
2721 del npath[np], path[np]
2722 # ha!
2723 p = np
2724
2725 return
2726
2727 def check_version_conflict(self):
2728 if self.key == 'setuptools':
2729 # ignore the inevitable setuptools self-conflicts :(
2730 return
2731
2732 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2733 loc = normalize_path(self.location)
2734 for modname in self._get_metadata('top_level.txt'):
2735 if (modname not in sys.modules or modname in nsp
2736 or modname in _namespace_packages):
2737 continue
2738 if modname in ('pkg_resources', 'setuptools', 'site'):
2739 continue
2740 fn = getattr(sys.modules[modname], '__file__', None)
2741 if fn and (normalize_path(fn).startswith(loc) or
2742 fn.startswith(self.location)):
2743 continue
2744 issue_warning(
2745 "Module %s was already imported from %s, but %s is being added"
2746 " to sys.path" % (modname, fn, self.location),
2747 )
2748
2749 def has_version(self):
2750 try:
2751 self.version
2752 except ValueError:
2753 issue_warning("Unbuilt egg for " + repr(self))
2754 return False
2755 return True
2756
2757 def clone(self, **kw):
2758 """Copy this distribution, substituting in any changed keyword args"""
2759 names = 'project_name version py_version platform location precedence'
2760 for attr in names.split():
2761 kw.setdefault(attr, getattr(self, attr, None))
2762 kw.setdefault('metadata', self._provider)
2763 return self.__class__(**kw)
2764
2765 @property
2766 def extras(self):
2767 return [dep for dep in self._dep_map if dep]
2768
2769
2770 class EggInfoDistribution(Distribution):
2771 def _reload_version(self):
2772 """
2773 Packages installed by distutils (e.g. numpy or scipy),
2774 which uses an old safe_version, and so
2775 their version numbers can get mangled when
2776 converted to filenames (e.g., 1.11.0.dev0+2329eae to
2777 1.11.0.dev0_2329eae). These distributions will not be
2778 parsed properly
2779 downstream by Distribution and safe_version, so
2780 take an extra step and try to get the version number from
2781 the metadata file itself instead of the filename.
2782 """
2783 md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
2784 if md_version:
2785 self._version = md_version
2786 return self
2787
2788
2789 class DistInfoDistribution(Distribution):
2790 """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"" "
2791 PKG_INFO = 'METADATA'
2792 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2793
2794 @property
2795 def _parsed_pkg_info(self):
2796 """Parse and cache metadata"""
2797 try:
2798 return self._pkg_info
2799 except AttributeError:
2800 metadata = self.get_metadata(self.PKG_INFO)
2801 self._pkg_info = email.parser.Parser().parsestr(metadata)
2802 return self._pkg_info
2803
2804 @property
2805 def _dep_map(self):
2806 try:
2807 return self.__dep_map
2808 except AttributeError:
2809 self.__dep_map = self._compute_dependencies()
2810 return self.__dep_map
2811
2812 def _compute_dependencies(self):
2813 """Recompute this distribution's dependencies."""
2814 dm = self.__dep_map = {None: []}
2815
2816 reqs = []
2817 # Including any condition expressions
2818 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2819 reqs.extend(parse_requirements(req))
2820
2821 def reqs_for_extra(extra):
2822 for req in reqs:
2823 if not req.marker or req.marker.evaluate({'extra': extra}):
2824 yield req
2825
2826 common = frozenset(reqs_for_extra(None))
2827 dm[None].extend(common)
2828
2829 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2830 s_extra = safe_extra(extra.strip())
2831 dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
2832
2833 return dm
2834
2835
2836 _distributionImpl = {
2837 '.egg': Distribution,
2838 '.egg-info': EggInfoDistribution,
2839 '.dist-info': DistInfoDistribution,
2840 }
2841
2842
2843 def issue_warning(*args, **kw):
2844 level = 1
2845 g = globals()
2846 try:
2847 # find the first stack frame that is *not* code in
2848 # the pkg_resources module, to use for the warning
2849 while sys._getframe(level).f_globals is g:
2850 level += 1
2851 except ValueError:
2852 pass
2853 warnings.warn(stacklevel=level + 1, *args, **kw)
2854
2855
2856 class RequirementParseError(ValueError):
2857 def __str__(self):
2858 return ' '.join(self.args)
2859
2860
2861 def parse_requirements(strs):
2862 """Yield ``Requirement`` objects for each specification in `strs`
2863
2864 `strs` must be a string, or a (possibly-nested) iterable thereof.
2865 """
2866 # create a steppable iterator, so we can handle \-continuations
2867 lines = iter(yield_lines(strs))
2868
2869 for line in lines:
2870 # Drop comments -- a hash without a space may be in a URL.
2871 if ' #' in line:
2872 line = line[:line.find(' #')]
2873 # If there is a line continuation, drop it, and append the next line.
2874 if line.endswith('\\'):
2875 line = line[:-2].strip()
2876 line += next(lines)
2877 yield Requirement(line)
2878
2879
2880 class Requirement(packaging.requirements.Requirement):
2881 def __init__(self, requirement_string):
2882 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2883 try:
2884 super(Requirement, self).__init__(requirement_string)
2885 except packaging.requirements.InvalidRequirement as e:
2886 raise RequirementParseError(str(e))
2887 self.unsafe_name = self.name
2888 project_name = safe_name(self.name)
2889 self.project_name, self.key = project_name, project_name.lower()
2890 self.specs = [
2891 (spec.operator, spec.version) for spec in self.specifier]
2892 self.extras = tuple(map(safe_extra, self.extras))
2893 self.hashCmp = (
2894 self.key,
2895 self.specifier,
2896 frozenset(self.extras),
2897 str(self.marker) if self.marker else None,
2898 )
2899 self.__hash = hash(self.hashCmp)
2900
2901 def __eq__(self, other):
2902 return (
2903 isinstance(other, Requirement) and
2904 self.hashCmp == other.hashCmp
2905 )
2906
2907 def __ne__(self, other):
2908 return not self == other
2909
2910 def __contains__(self, item):
2911 if isinstance(item, Distribution):
2912 if item.key != self.key:
2913 return False
2914
2915 item = item.version
2916
2917 # Allow prereleases always in order to match the previous behavior of
2918 # this method. In the future this should be smarter and follow PEP 440
2919 # more accurately.
2920 return self.specifier.contains(item, prereleases=True)
2921
2922 def __hash__(self):
2923 return self.__hash
2924
2925 def __repr__(self): return "Requirement.parse(%r)" % str(self)
2926
2927 @staticmethod
2928 def parse(s):
2929 req, = parse_requirements(s)
2930 return req
2931
2932
2933 def _get_mro(cls):
2934 """Get an mro for a type or classic class"""
2935 if not isinstance(cls, type):
2936
2937 class cls(cls, object):
2938 pass
2939
2940 return cls.__mro__[1:]
2941 return cls.__mro__
2942
2943
2944 def _find_adapter(registry, ob):
2945 """Return an adapter factory for `ob` from `registry`"""
2946 for t in _get_mro(getattr(ob, '__class__', type(ob))):
2947 if t in registry:
2948 return registry[t]
2949
2950
2951 def ensure_directory(path):
2952 """Ensure that the parent directory of `path` exists"""
2953 dirname = os.path.dirname(path)
2954 if not os.path.isdir(dirname):
2955 os.makedirs(dirname)
2956
2957
2958 def _bypass_ensure_directory(path):
2959 """Sandbox-bypassing version of ensure_directory()"""
2960 if not WRITE_SUPPORT:
2961 raise IOError('"os.mkdir" not supported on this platform.')
2962 dirname, filename = split(path)
2963 if dirname and filename and not isdir(dirname):
2964 _bypass_ensure_directory(dirname)
2965 mkdir(dirname, 0o755)
2966
2967
2968 def split_sections(s):
2969 """Split a string or iterable thereof into (section, content) pairs
2970
2971 Each ``section`` is a stripped version of the section header ("[section]")
2972 and each ``content`` is a list of stripped lines excluding blank lines and
2973 comment-only lines. If there are any such lines before the first section
2974 header, they're returned in a first ``section`` of ``None``.
2975 """
2976 section = None
2977 content = []
2978 for line in yield_lines(s):
2979 if line.startswith("["):
2980 if line.endswith("]"):
2981 if section or content:
2982 yield section, content
2983 section = line[1:-1].strip()
2984 content = []
2985 else:
2986 raise ValueError("Invalid section heading", line)
2987 else:
2988 content.append(line)
2989
2990 # wrap up last segment
2991 yield section, content
2992
2993
2994 def _mkstemp(*args, **kw):
2995 old_open = os.open
2996 try:
2997 # temporarily bypass sandboxing
2998 os.open = os_open
2999 return tempfile.mkstemp(*args, **kw)
3000 finally:
3001 # and then put it back
3002 os.open = old_open
3003
3004
3005 # Silence the PEP440Warning by default, so that end users don't get hit by it
3006 # randomly just because they use pkg_resources. We want to append the rule
3007 # because we want earlier uses of filterwarnings to take precedence over this
3008 # one.
3009 warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3010
3011
3012 # from jaraco.functools 1.3
3013 def _call_aside(f, *args, **kwargs):
3014 f(*args, **kwargs)
3015 return f
3016
3017
3018 @_call_aside
3019 def _initialize(g=globals()):
3020 "Set up global resource manager (deliberately not state-saved)"
3021 manager = ResourceManager()
3022 g['_manager'] = manager
3023 g.update(
3024 (name, getattr(manager, name))
3025 for name in dir(manager)
3026 if not name.startswith('_')
3027 )
3028
3029
3030 @_call_aside
3031 def _initialize_master_working_set():
3032 """
3033 Prepare the master working set and make the ``require()``
3034 API available.
3035
3036 This function has explicit effects on the global state
3037 of pkg_resources. It is intended to be invoked once at
3038 the initialization of this module.
3039
3040 Invocation by other packages is unsupported and done
3041 at their own risk.
3042 """
3043 working_set = WorkingSet._build_master()
3044 _declare_state('object', working_set=working_set)
3045
3046 require = working_set.require
3047 iter_entry_points = working_set.iter_entry_points
3048 add_activation_listener = working_set.subscribe
3049 run_script = working_set.run_script
3050 # backward compatibility
3051 run_main = run_script
3052 # Activate all distributions already on sys.path with replace=False and
3053 # ensure that all distributions added to the working set in the future
3054 # (e.g. by calling ``require()``) will get activated as well,
3055 # with higher priority (replace=True).
3056 tuple(
3057 dist.activate(replace=False)
3058 for dist in working_set
3059 )
3060 add_activation_listener(lambda dist: dist.activate(replace=True), existing=F alse)
3061 working_set.entries = []
3062 # match order
3063 list(map(working_set.add_entry, sys.path))
3064 globals().update(locals())
OLDNEW
« no previous file with comments | « third_party/google-endpoints/past/utils/__init__.py ('k') | third_party/google-endpoints/ply-3.9-py2.7.egg-info/PKG-INFO » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698