Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(651)

Side by Side Diff: third_party/google-endpoints/setuptools/command/egg_info.py

Issue 2666783008: Add google-endpoints to third_party/. (Closed)
Patch Set: Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 """setuptools.command.egg_info
2
3 Create a distribution's .egg-info directory and contents"""
4
5 from distutils.filelist import FileList as _FileList
6 from distutils.errors import DistutilsInternalError
7 from distutils.util import convert_path
8 from distutils import log
9 import distutils.errors
10 import distutils.filelist
11 import os
12 import re
13 import sys
14 import io
15 import warnings
16 import time
17 import collections
18
19 import six
20 from six.moves import map
21
22 from setuptools import Command
23 from setuptools.command.sdist import sdist
24 from setuptools.command.sdist import walk_revctrl
25 from setuptools.command.setopt import edit_config
26 from setuptools.command import bdist_egg
27 from pkg_resources import (
28 parse_requirements, safe_name, parse_version,
29 safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
30 import setuptools.unicode_utils as unicode_utils
31 from setuptools.glob import glob
32
33 import packaging
34
35
36 def translate_pattern(glob):
37 """
38 Translate a file path glob like '*.txt' in to a regular expression.
39 This differs from fnmatch.translate which allows wildcards to match
40 directory separators. It also knows about '**/' which matches any number of
41 directories.
42 """
43 pat = ''
44
45 # This will split on '/' within [character classes]. This is deliberate.
46 chunks = glob.split(os.path.sep)
47
48 sep = re.escape(os.sep)
49 valid_char = '[^%s]' % (sep,)
50
51 for c, chunk in enumerate(chunks):
52 last_chunk = c == len(chunks) - 1
53
54 # Chunks that are a literal ** are globstars. They match anything.
55 if chunk == '**':
56 if last_chunk:
57 # Match anything if this is the last component
58 pat += '.*'
59 else:
60 # Match '(name/)*'
61 pat += '(?:%s+%s)*' % (valid_char, sep)
62 continue # Break here as the whole path component has been handled
63
64 # Find any special characters in the remainder
65 i = 0
66 chunk_len = len(chunk)
67 while i < chunk_len:
68 char = chunk[i]
69 if char == '*':
70 # Match any number of name characters
71 pat += valid_char + '*'
72 elif char == '?':
73 # Match a name character
74 pat += valid_char
75 elif char == '[':
76 # Character class
77 inner_i = i + 1
78 # Skip initial !/] chars
79 if inner_i < chunk_len and chunk[inner_i] == '!':
80 inner_i = inner_i + 1
81 if inner_i < chunk_len and chunk[inner_i] == ']':
82 inner_i = inner_i + 1
83
84 # Loop till the closing ] is found
85 while inner_i < chunk_len and chunk[inner_i] != ']':
86 inner_i = inner_i + 1
87
88 if inner_i >= chunk_len:
89 # Got to the end of the string without finding a closing ]
90 # Do not treat this as a matching group, but as a literal [
91 pat += re.escape(char)
92 else:
93 # Grab the insides of the [brackets]
94 inner = chunk[i + 1:inner_i]
95 char_class = ''
96
97 # Class negation
98 if inner[0] == '!':
99 char_class = '^'
100 inner = inner[1:]
101
102 char_class += re.escape(inner)
103 pat += '[%s]' % (char_class,)
104
105 # Skip to the end ]
106 i = inner_i
107 else:
108 pat += re.escape(char)
109 i += 1
110
111 # Join each chunk with the dir separator
112 if not last_chunk:
113 pat += sep
114
115 return re.compile(pat + r'\Z(?ms)')
116
117
118 class egg_info(Command):
119 description = "create a distribution's .egg-info directory"
120
121 user_options = [
122 ('egg-base=', 'e', "directory containing .egg-info directories"
123 " (default: top of the source tree)"),
124 ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
125 ('tag-build=', 'b', "Specify explicit tag to add to version number"),
126 ('no-date', 'D', "Don't include date stamp [default]"),
127 ]
128
129 boolean_options = ['tag-date']
130 negative_opt = {
131 'no-date': 'tag-date',
132 }
133
134 def initialize_options(self):
135 self.egg_name = None
136 self.egg_version = None
137 self.egg_base = None
138 self.egg_info = None
139 self.tag_build = None
140 self.tag_date = 0
141 self.broken_egg_info = False
142 self.vtags = None
143
144 ####################################
145 # allow the 'tag_svn_revision' to be detected and
146 # set, supporting sdists built on older Setuptools.
147 @property
148 def tag_svn_revision(self):
149 pass
150
151 @tag_svn_revision.setter
152 def tag_svn_revision(self, value):
153 pass
154 ####################################
155
156 def save_version_info(self, filename):
157 """
158 Materialize the value of date into the
159 build tag. Install build keys in a deterministic order
160 to avoid arbitrary reordering on subsequent builds.
161 """
162 # python 2.6 compatibility
163 odict = getattr(collections, 'OrderedDict', dict)
164 egg_info = odict()
165 # follow the order these keys would have been added
166 # when PYTHONHASHSEED=0
167 egg_info['tag_build'] = self.tags()
168 egg_info['tag_date'] = 0
169 edit_config(filename, dict(egg_info=egg_info))
170
171 def finalize_options(self):
172 self.egg_name = safe_name(self.distribution.get_name())
173 self.vtags = self.tags()
174 self.egg_version = self.tagged_version()
175
176 parsed_version = parse_version(self.egg_version)
177
178 try:
179 is_version = isinstance(parsed_version, packaging.version.Version)
180 spec = (
181 "%s==%s" if is_version else "%s===%s"
182 )
183 list(
184 parse_requirements(spec % (self.egg_name, self.egg_version))
185 )
186 except ValueError:
187 raise distutils.errors.DistutilsOptionError(
188 "Invalid distribution name or version syntax: %s-%s" %
189 (self.egg_name, self.egg_version)
190 )
191
192 if self.egg_base is None:
193 dirs = self.distribution.package_dir
194 self.egg_base = (dirs or {}).get('', os.curdir)
195
196 self.ensure_dirname('egg_base')
197 self.egg_info = to_filename(self.egg_name) + '.egg-info'
198 if self.egg_base != os.curdir:
199 self.egg_info = os.path.join(self.egg_base, self.egg_info)
200 if '-' in self.egg_name:
201 self.check_broken_egg_info()
202
203 # Set package version for the benefit of dumber commands
204 # (e.g. sdist, bdist_wininst, etc.)
205 #
206 self.distribution.metadata.version = self.egg_version
207
208 # If we bootstrapped around the lack of a PKG-INFO, as might be the
209 # case in a fresh checkout, make sure that any special tags get added
210 # to the version info
211 #
212 pd = self.distribution._patched_dist
213 if pd is not None and pd.key == self.egg_name.lower():
214 pd._version = self.egg_version
215 pd._parsed_version = parse_version(self.egg_version)
216 self.distribution._patched_dist = None
217
218 def write_or_delete_file(self, what, filename, data, force=False):
219 """Write `data` to `filename` or delete if empty
220
221 If `data` is non-empty, this routine is the same as ``write_file()``.
222 If `data` is empty but not ``None``, this is the same as calling
223 ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
224 unless `filename` exists, in which case a warning is issued about the
225 orphaned file (if `force` is false), or deleted (if `force` is true).
226 """
227 if data:
228 self.write_file(what, filename, data)
229 elif os.path.exists(filename):
230 if data is None and not force:
231 log.warn(
232 "%s not set in setup(), but %s exists", what, filename
233 )
234 return
235 else:
236 self.delete_file(filename)
237
238 def write_file(self, what, filename, data):
239 """Write `data` to `filename` (if not a dry run) after announcing it
240
241 `what` is used in a log message to identify what is being written
242 to the file.
243 """
244 log.info("writing %s to %s", what, filename)
245 if six.PY3:
246 data = data.encode("utf-8")
247 if not self.dry_run:
248 f = open(filename, 'wb')
249 f.write(data)
250 f.close()
251
252 def delete_file(self, filename):
253 """Delete `filename` (if not a dry run) after announcing it"""
254 log.info("deleting %s", filename)
255 if not self.dry_run:
256 os.unlink(filename)
257
258 def tagged_version(self):
259 version = self.distribution.get_version()
260 # egg_info may be called more than once for a distribution,
261 # in which case the version string already contains all tags.
262 if self.vtags and version.endswith(self.vtags):
263 return safe_version(version)
264 return safe_version(version + self.vtags)
265
266 def run(self):
267 self.mkpath(self.egg_info)
268 installer = self.distribution.fetch_build_egg
269 for ep in iter_entry_points('egg_info.writers'):
270 ep.require(installer=installer)
271 writer = ep.resolve()
272 writer(self, ep.name, os.path.join(self.egg_info, ep.name))
273
274 # Get rid of native_libs.txt if it was put there by older bdist_egg
275 nl = os.path.join(self.egg_info, "native_libs.txt")
276 if os.path.exists(nl):
277 self.delete_file(nl)
278
279 self.find_sources()
280
281 def tags(self):
282 version = ''
283 if self.tag_build:
284 version += self.tag_build
285 if self.tag_date:
286 version += time.strftime("-%Y%m%d")
287 return version
288
289 def find_sources(self):
290 """Generate SOURCES.txt manifest file"""
291 manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
292 mm = manifest_maker(self.distribution)
293 mm.manifest = manifest_filename
294 mm.run()
295 self.filelist = mm.filelist
296
297 def check_broken_egg_info(self):
298 bei = self.egg_name + '.egg-info'
299 if self.egg_base != os.curdir:
300 bei = os.path.join(self.egg_base, bei)
301 if os.path.exists(bei):
302 log.warn(
303 "-" * 78 + '\n'
304 "Note: Your current .egg-info directory has a '-' in its name;"
305 '\nthis will not work correctly with "setup.py develop".\n\n'
306 'Please rename %s to %s to correct this problem.\n' + '-' * 78,
307 bei, self.egg_info
308 )
309 self.broken_egg_info = self.egg_info
310 self.egg_info = bei # make it work for now
311
312
313 class FileList(_FileList):
314 # Implementations of the various MANIFEST.in commands
315
316 def process_template_line(self, line):
317 # Parse the line: split it up, make sure the right number of words
318 # is there, and return the relevant words. 'action' is always
319 # defined: it's the first word of the line. Which of the other
320 # three are defined depends on the action; it'll be either
321 # patterns, (dir and patterns), or (dir_pattern).
322 (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
323
324 # OK, now we know that the action is valid and we have the
325 # right number of words on the line for that action -- so we
326 # can proceed with minimal error-checking.
327 if action == 'include':
328 self.debug_print("include " + ' '.join(patterns))
329 for pattern in patterns:
330 if not self.include(pattern):
331 log.warn("warning: no files found matching '%s'", pattern)
332
333 elif action == 'exclude':
334 self.debug_print("exclude " + ' '.join(patterns))
335 for pattern in patterns:
336 if not self.exclude(pattern):
337 log.warn(("warning: no previously-included files "
338 "found matching '%s'"), pattern)
339
340 elif action == 'global-include':
341 self.debug_print("global-include " + ' '.join(patterns))
342 for pattern in patterns:
343 if not self.global_include(pattern):
344 log.warn(("warning: no files found matching '%s' "
345 "anywhere in distribution"), pattern)
346
347 elif action == 'global-exclude':
348 self.debug_print("global-exclude " + ' '.join(patterns))
349 for pattern in patterns:
350 if not self.global_exclude(pattern):
351 log.warn(("warning: no previously-included files matching "
352 "'%s' found anywhere in distribution"),
353 pattern)
354
355 elif action == 'recursive-include':
356 self.debug_print("recursive-include %s %s" %
357 (dir, ' '.join(patterns)))
358 for pattern in patterns:
359 if not self.recursive_include(dir, pattern):
360 log.warn(("warning: no files found matching '%s' "
361 "under directory '%s'"),
362 pattern, dir)
363
364 elif action == 'recursive-exclude':
365 self.debug_print("recursive-exclude %s %s" %
366 (dir, ' '.join(patterns)))
367 for pattern in patterns:
368 if not self.recursive_exclude(dir, pattern):
369 log.warn(("warning: no previously-included files matching "
370 "'%s' found under directory '%s'"),
371 pattern, dir)
372
373 elif action == 'graft':
374 self.debug_print("graft " + dir_pattern)
375 if not self.graft(dir_pattern):
376 log.warn("warning: no directories found matching '%s'",
377 dir_pattern)
378
379 elif action == 'prune':
380 self.debug_print("prune " + dir_pattern)
381 if not self.prune(dir_pattern):
382 log.warn(("no previously-included directories found "
383 "matching '%s'"), dir_pattern)
384
385 else:
386 raise DistutilsInternalError(
387 "this cannot happen: invalid action '%s'" % action)
388
389 def _remove_files(self, predicate):
390 """
391 Remove all files from the file list that match the predicate.
392 Return True if any matching files were removed
393 """
394 found = False
395 for i in range(len(self.files) - 1, -1, -1):
396 if predicate(self.files[i]):
397 self.debug_print(" removing " + self.files[i])
398 del self.files[i]
399 found = True
400 return found
401
402 def include(self, pattern):
403 """Include files that match 'pattern'."""
404 found = [f for f in glob(pattern) if not os.path.isdir(f)]
405 self.extend(found)
406 return bool(found)
407
408 def exclude(self, pattern):
409 """Exclude files that match 'pattern'."""
410 match = translate_pattern(pattern)
411 return self._remove_files(match.match)
412
413 def recursive_include(self, dir, pattern):
414 """
415 Include all files anywhere in 'dir/' that match the pattern.
416 """
417 full_pattern = os.path.join(dir, '**', pattern)
418 found = [f for f in glob(full_pattern, recursive=True)
419 if not os.path.isdir(f)]
420 self.extend(found)
421 return bool(found)
422
423 def recursive_exclude(self, dir, pattern):
424 """
425 Exclude any file anywhere in 'dir/' that match the pattern.
426 """
427 match = translate_pattern(os.path.join(dir, '**', pattern))
428 return self._remove_files(match.match)
429
430 def graft(self, dir):
431 """Include all files from 'dir/'."""
432 found = [
433 item
434 for match_dir in glob(dir)
435 for item in distutils.filelist.findall(match_dir)
436 ]
437 self.extend(found)
438 return bool(found)
439
440 def prune(self, dir):
441 """Filter out files from 'dir/'."""
442 match = translate_pattern(os.path.join(dir, '**'))
443 return self._remove_files(match.match)
444
445 def global_include(self, pattern):
446 """
447 Include all files anywhere in the current directory that match the
448 pattern. This is very inefficient on large file trees.
449 """
450 if self.allfiles is None:
451 self.findall()
452 match = translate_pattern(os.path.join('**', pattern))
453 found = [f for f in self.allfiles if match.match(f)]
454 self.extend(found)
455 return bool(found)
456
457 def global_exclude(self, pattern):
458 """
459 Exclude all files anywhere that match the pattern.
460 """
461 match = translate_pattern(os.path.join('**', pattern))
462 return self._remove_files(match.match)
463
464 def append(self, item):
465 if item.endswith('\r'): # Fix older sdists built on Windows
466 item = item[:-1]
467 path = convert_path(item)
468
469 if self._safe_path(path):
470 self.files.append(path)
471
472 def extend(self, paths):
473 self.files.extend(filter(self._safe_path, paths))
474
475 def _repair(self):
476 """
477 Replace self.files with only safe paths
478
479 Because some owners of FileList manipulate the underlying
480 ``files`` attribute directly, this method must be called to
481 repair those paths.
482 """
483 self.files = list(filter(self._safe_path, self.files))
484
485 def _safe_path(self, path):
486 enc_warn = "'%s' not %s encodable -- skipping"
487
488 # To avoid accidental trans-codings errors, first to unicode
489 u_path = unicode_utils.filesys_decode(path)
490 if u_path is None:
491 log.warn("'%s' in unexpected encoding -- skipping" % path)
492 return False
493
494 # Must ensure utf-8 encodability
495 utf8_path = unicode_utils.try_encode(u_path, "utf-8")
496 if utf8_path is None:
497 log.warn(enc_warn, path, 'utf-8')
498 return False
499
500 try:
501 # accept is either way checks out
502 if os.path.exists(u_path) or os.path.exists(utf8_path):
503 return True
504 # this will catch any encode errors decoding u_path
505 except UnicodeEncodeError:
506 log.warn(enc_warn, path, sys.getfilesystemencoding())
507
508
509 class manifest_maker(sdist):
510 template = "MANIFEST.in"
511
512 def initialize_options(self):
513 self.use_defaults = 1
514 self.prune = 1
515 self.manifest_only = 1
516 self.force_manifest = 1
517
518 def finalize_options(self):
519 pass
520
521 def run(self):
522 self.filelist = FileList()
523 if not os.path.exists(self.manifest):
524 self.write_manifest() # it must exist so it'll get in the list
525 self.add_defaults()
526 if os.path.exists(self.template):
527 self.read_template()
528 self.prune_file_list()
529 self.filelist.sort()
530 self.filelist.remove_duplicates()
531 self.write_manifest()
532
533 def _manifest_normalize(self, path):
534 path = unicode_utils.filesys_decode(path)
535 return path.replace(os.sep, '/')
536
537 def write_manifest(self):
538 """
539 Write the file list in 'self.filelist' to the manifest file
540 named by 'self.manifest'.
541 """
542 self.filelist._repair()
543
544 # Now _repairs should encodability, but not unicode
545 files = [self._manifest_normalize(f) for f in self.filelist.files]
546 msg = "writing manifest file '%s'" % self.manifest
547 self.execute(write_file, (self.manifest, files), msg)
548
549 def warn(self, msg):
550 if not self._should_suppress_warning(msg):
551 sdist.warn(self, msg)
552
553 @staticmethod
554 def _should_suppress_warning(msg):
555 """
556 suppress missing-file warnings from sdist
557 """
558 return re.match(r"standard file .*not found", msg)
559
560 def add_defaults(self):
561 sdist.add_defaults(self)
562 self.filelist.append(self.template)
563 self.filelist.append(self.manifest)
564 rcfiles = list(walk_revctrl())
565 if rcfiles:
566 self.filelist.extend(rcfiles)
567 elif os.path.exists(self.manifest):
568 self.read_manifest()
569 ei_cmd = self.get_finalized_command('egg_info')
570 self.filelist.graft(ei_cmd.egg_info)
571
572 def prune_file_list(self):
573 build = self.get_finalized_command('build')
574 base_dir = self.distribution.get_fullname()
575 self.filelist.prune(build.build_base)
576 self.filelist.prune(base_dir)
577 sep = re.escape(os.sep)
578 self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
579 is_regex=1)
580
581
582 def write_file(filename, contents):
583 """Create a file with the specified name and write 'contents' (a
584 sequence of strings without line terminators) to it.
585 """
586 contents = "\n".join(contents)
587
588 # assuming the contents has been vetted for utf-8 encoding
589 contents = contents.encode("utf-8")
590
591 with open(filename, "wb") as f: # always write POSIX-style manifest
592 f.write(contents)
593
594
595 def write_pkg_info(cmd, basename, filename):
596 log.info("writing %s", filename)
597 if not cmd.dry_run:
598 metadata = cmd.distribution.metadata
599 metadata.version, oldver = cmd.egg_version, metadata.version
600 metadata.name, oldname = cmd.egg_name, metadata.name
601 try:
602 # write unescaped data to PKG-INFO, so older pkg_resources
603 # can still parse it
604 metadata.write_pkg_info(cmd.egg_info)
605 finally:
606 metadata.name, metadata.version = oldname, oldver
607
608 safe = getattr(cmd.distribution, 'zip_safe', None)
609
610 bdist_egg.write_safety_flag(cmd.egg_info, safe)
611
612
613 def warn_depends_obsolete(cmd, basename, filename):
614 if os.path.exists(filename):
615 log.warn(
616 "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
617 "Use the install_requires/extras_require setup() args instead."
618 )
619
620
621 def _write_requirements(stream, reqs):
622 lines = yield_lines(reqs or ())
623 append_cr = lambda line: line + '\n'
624 lines = map(append_cr, lines)
625 stream.writelines(lines)
626
627
628 def write_requirements(cmd, basename, filename):
629 dist = cmd.distribution
630 data = six.StringIO()
631 _write_requirements(data, dist.install_requires)
632 extras_require = dist.extras_require or {}
633 for extra in sorted(extras_require):
634 data.write('\n[{extra}]\n'.format(**vars()))
635 _write_requirements(data, extras_require[extra])
636 cmd.write_or_delete_file("requirements", filename, data.getvalue())
637
638
639 def write_setup_requirements(cmd, basename, filename):
640 data = StringIO()
641 _write_requirements(data, cmd.distribution.setup_requires)
642 cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
643
644
645 def write_toplevel_names(cmd, basename, filename):
646 pkgs = dict.fromkeys(
647 [
648 k.split('.', 1)[0]
649 for k in cmd.distribution.iter_distribution_names()
650 ]
651 )
652 cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
653
654
655 def overwrite_arg(cmd, basename, filename):
656 write_arg(cmd, basename, filename, True)
657
658
659 def write_arg(cmd, basename, filename, force=False):
660 argname = os.path.splitext(basename)[0]
661 value = getattr(cmd.distribution, argname, None)
662 if value is not None:
663 value = '\n'.join(value) + '\n'
664 cmd.write_or_delete_file(argname, filename, value, force)
665
666
667 def write_entries(cmd, basename, filename):
668 ep = cmd.distribution.entry_points
669
670 if isinstance(ep, six.string_types) or ep is None:
671 data = ep
672 elif ep is not None:
673 data = []
674 for section, contents in sorted(ep.items()):
675 if not isinstance(contents, six.string_types):
676 contents = EntryPoint.parse_group(section, contents)
677 contents = '\n'.join(sorted(map(str, contents.values())))
678 data.append('[%s]\n%s\n\n' % (section, contents))
679 data = ''.join(data)
680
681 cmd.write_or_delete_file('entry points', filename, data, True)
682
683
684 def get_pkg_info_revision():
685 """
686 Get a -r### off of PKG-INFO Version in case this is an sdist of
687 a subversion revision.
688 """
689 warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning)
690 if os.path.exists('PKG-INFO'):
691 with io.open('PKG-INFO') as f:
692 for line in f:
693 match = re.match(r"Version:.*-r(\d+)\s*$", line)
694 if match:
695 return int(match.group(1))
696 return 0
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698