]>
Commit | Line | Data |
---|---|---|
53e6db90 DC |
1 | """setuptools.command.egg_info |
2 | ||
3 | Create a distribution's .egg-info directory and contents""" | |
4 | ||
5 | from distutils.filelist import FileList as _FileList | |
6 | from distutils.errors import DistutilsInternalError | |
7 | from distutils.util import convert_path | |
8 | from distutils import log | |
9 | import distutils.errors | |
10 | import distutils.filelist | |
11 | import functools | |
12 | import os | |
13 | import re | |
14 | import sys | |
15 | import io | |
16 | import warnings | |
17 | import time | |
18 | import collections | |
19 | ||
20 | from .._importlib import metadata | |
21 | from .. import _entry_points | |
22 | ||
23 | from setuptools import Command | |
24 | from setuptools.command.sdist import sdist | |
25 | from setuptools.command.sdist import walk_revctrl | |
26 | from setuptools.command.setopt import edit_config | |
27 | from setuptools.command import bdist_egg | |
28 | from pkg_resources import ( | |
29 | Requirement, safe_name, parse_version, | |
30 | safe_version, to_filename) | |
31 | import setuptools.unicode_utils as unicode_utils | |
32 | from setuptools.glob import glob | |
33 | ||
34 | from setuptools.extern import packaging | |
35 | from setuptools.extern.jaraco.text import yield_lines | |
36 | from setuptools import SetuptoolsDeprecationWarning | |
37 | ||
38 | ||
39 | def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME | |
40 | """ | |
41 | Translate a file path glob like '*.txt' in to a regular expression. | |
42 | This differs from fnmatch.translate which allows wildcards to match | |
43 | directory separators. It also knows about '**/' which matches any number of | |
44 | directories. | |
45 | """ | |
46 | pat = '' | |
47 | ||
48 | # This will split on '/' within [character classes]. This is deliberate. | |
49 | chunks = glob.split(os.path.sep) | |
50 | ||
51 | sep = re.escape(os.sep) | |
52 | valid_char = '[^%s]' % (sep,) | |
53 | ||
54 | for c, chunk in enumerate(chunks): | |
55 | last_chunk = c == len(chunks) - 1 | |
56 | ||
57 | # Chunks that are a literal ** are globstars. They match anything. | |
58 | if chunk == '**': | |
59 | if last_chunk: | |
60 | # Match anything if this is the last component | |
61 | pat += '.*' | |
62 | else: | |
63 | # Match '(name/)*' | |
64 | pat += '(?:%s+%s)*' % (valid_char, sep) | |
65 | continue # Break here as the whole path component has been handled | |
66 | ||
67 | # Find any special characters in the remainder | |
68 | i = 0 | |
69 | chunk_len = len(chunk) | |
70 | while i < chunk_len: | |
71 | char = chunk[i] | |
72 | if char == '*': | |
73 | # Match any number of name characters | |
74 | pat += valid_char + '*' | |
75 | elif char == '?': | |
76 | # Match a name character | |
77 | pat += valid_char | |
78 | elif char == '[': | |
79 | # Character class | |
80 | inner_i = i + 1 | |
81 | # Skip initial !/] chars | |
82 | if inner_i < chunk_len and chunk[inner_i] == '!': | |
83 | inner_i = inner_i + 1 | |
84 | if inner_i < chunk_len and chunk[inner_i] == ']': | |
85 | inner_i = inner_i + 1 | |
86 | ||
87 | # Loop till the closing ] is found | |
88 | while inner_i < chunk_len and chunk[inner_i] != ']': | |
89 | inner_i = inner_i + 1 | |
90 | ||
91 | if inner_i >= chunk_len: | |
92 | # Got to the end of the string without finding a closing ] | |
93 | # Do not treat this as a matching group, but as a literal [ | |
94 | pat += re.escape(char) | |
95 | else: | |
96 | # Grab the insides of the [brackets] | |
97 | inner = chunk[i + 1:inner_i] | |
98 | char_class = '' | |
99 | ||
100 | # Class negation | |
101 | if inner[0] == '!': | |
102 | char_class = '^' | |
103 | inner = inner[1:] | |
104 | ||
105 | char_class += re.escape(inner) | |
106 | pat += '[%s]' % (char_class,) | |
107 | ||
108 | # Skip to the end ] | |
109 | i = inner_i | |
110 | else: | |
111 | pat += re.escape(char) | |
112 | i += 1 | |
113 | ||
114 | # Join each chunk with the dir separator | |
115 | if not last_chunk: | |
116 | pat += sep | |
117 | ||
118 | pat += r'\Z' | |
119 | return re.compile(pat, flags=re.MULTILINE | re.DOTALL) | |
120 | ||
121 | ||
122 | class InfoCommon: | |
123 | tag_build = None | |
124 | tag_date = None | |
125 | ||
126 | @property | |
127 | def name(self): | |
128 | return safe_name(self.distribution.get_name()) | |
129 | ||
130 | def tagged_version(self): | |
131 | return safe_version(self._maybe_tag(self.distribution.get_version())) | |
132 | ||
133 | def _maybe_tag(self, version): | |
134 | """ | |
135 | egg_info may be called more than once for a distribution, | |
136 | in which case the version string already contains all tags. | |
137 | """ | |
138 | return ( | |
139 | version if self.vtags and self._already_tagged(version) | |
140 | else version + self.vtags | |
141 | ) | |
142 | ||
143 | def _already_tagged(self, version: str) -> bool: | |
144 | # Depending on their format, tags may change with version normalization. | |
145 | # So in addition the regular tags, we have to search for the normalized ones. | |
146 | return version.endswith(self.vtags) or version.endswith(self._safe_tags()) | |
147 | ||
148 | def _safe_tags(self) -> str: | |
149 | # To implement this we can rely on `safe_version` pretending to be version 0 | |
150 | # followed by tags. Then we simply discard the starting 0 (fake version number) | |
151 | return safe_version(f"0{self.vtags}")[1:] | |
152 | ||
153 | def tags(self) -> str: | |
154 | version = '' | |
155 | if self.tag_build: | |
156 | version += self.tag_build | |
157 | if self.tag_date: | |
158 | version += time.strftime("%Y%m%d") | |
159 | return version | |
160 | vtags = property(tags) | |
161 | ||
162 | ||
163 | class egg_info(InfoCommon, Command): | |
164 | description = "create a distribution's .egg-info directory" | |
165 | ||
166 | user_options = [ | |
167 | ('egg-base=', 'e', "directory containing .egg-info directories" | |
168 | " (default: top of the source tree)"), | |
169 | ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), | |
170 | ('tag-build=', 'b', "Specify explicit tag to add to version number"), | |
171 | ('no-date', 'D', "Don't include date stamp [default]"), | |
172 | ] | |
173 | ||
174 | boolean_options = ['tag-date'] | |
175 | negative_opt = { | |
176 | 'no-date': 'tag-date', | |
177 | } | |
178 | ||
179 | def initialize_options(self): | |
180 | self.egg_base = None | |
181 | self.egg_name = None | |
182 | self.egg_info = None | |
183 | self.egg_version = None | |
184 | self.broken_egg_info = False | |
185 | self.ignore_egg_info_in_manifest = False | |
186 | ||
187 | #################################### | |
188 | # allow the 'tag_svn_revision' to be detected and | |
189 | # set, supporting sdists built on older Setuptools. | |
190 | @property | |
191 | def tag_svn_revision(self): | |
192 | pass | |
193 | ||
194 | @tag_svn_revision.setter | |
195 | def tag_svn_revision(self, value): | |
196 | pass | |
197 | #################################### | |
198 | ||
199 | def save_version_info(self, filename): | |
200 | """ | |
201 | Materialize the value of date into the | |
202 | build tag. Install build keys in a deterministic order | |
203 | to avoid arbitrary reordering on subsequent builds. | |
204 | """ | |
205 | egg_info = collections.OrderedDict() | |
206 | # follow the order these keys would have been added | |
207 | # when PYTHONHASHSEED=0 | |
208 | egg_info['tag_build'] = self.tags() | |
209 | egg_info['tag_date'] = 0 | |
210 | edit_config(filename, dict(egg_info=egg_info)) | |
211 | ||
212 | def finalize_options(self): | |
213 | # Note: we need to capture the current value returned | |
214 | # by `self.tagged_version()`, so we can later update | |
215 | # `self.distribution.metadata.version` without | |
216 | # repercussions. | |
217 | self.egg_name = self.name | |
218 | self.egg_version = self.tagged_version() | |
219 | parsed_version = parse_version(self.egg_version) | |
220 | ||
221 | try: | |
222 | is_version = isinstance(parsed_version, packaging.version.Version) | |
223 | spec = "%s==%s" if is_version else "%s===%s" | |
224 | Requirement(spec % (self.egg_name, self.egg_version)) | |
225 | except ValueError as e: | |
226 | raise distutils.errors.DistutilsOptionError( | |
227 | "Invalid distribution name or version syntax: %s-%s" % | |
228 | (self.egg_name, self.egg_version) | |
229 | ) from e | |
230 | ||
231 | if self.egg_base is None: | |
232 | dirs = self.distribution.package_dir | |
233 | self.egg_base = (dirs or {}).get('', os.curdir) | |
234 | ||
235 | self.ensure_dirname('egg_base') | |
236 | self.egg_info = to_filename(self.egg_name) + '.egg-info' | |
237 | if self.egg_base != os.curdir: | |
238 | self.egg_info = os.path.join(self.egg_base, self.egg_info) | |
239 | if '-' in self.egg_name: | |
240 | self.check_broken_egg_info() | |
241 | ||
242 | # Set package version for the benefit of dumber commands | |
243 | # (e.g. sdist, bdist_wininst, etc.) | |
244 | # | |
245 | self.distribution.metadata.version = self.egg_version | |
246 | ||
247 | # If we bootstrapped around the lack of a PKG-INFO, as might be the | |
248 | # case in a fresh checkout, make sure that any special tags get added | |
249 | # to the version info | |
250 | # | |
251 | pd = self.distribution._patched_dist | |
252 | if pd is not None and pd.key == self.egg_name.lower(): | |
253 | pd._version = self.egg_version | |
254 | pd._parsed_version = parse_version(self.egg_version) | |
255 | self.distribution._patched_dist = None | |
256 | ||
257 | def write_or_delete_file(self, what, filename, data, force=False): | |
258 | """Write `data` to `filename` or delete if empty | |
259 | ||
260 | If `data` is non-empty, this routine is the same as ``write_file()``. | |
261 | If `data` is empty but not ``None``, this is the same as calling | |
262 | ``delete_file(filename)`. If `data` is ``None``, then this is a no-op | |
263 | unless `filename` exists, in which case a warning is issued about the | |
264 | orphaned file (if `force` is false), or deleted (if `force` is true). | |
265 | """ | |
266 | if data: | |
267 | self.write_file(what, filename, data) | |
268 | elif os.path.exists(filename): | |
269 | if data is None and not force: | |
270 | log.warn( | |
271 | "%s not set in setup(), but %s exists", what, filename | |
272 | ) | |
273 | return | |
274 | else: | |
275 | self.delete_file(filename) | |
276 | ||
277 | def write_file(self, what, filename, data): | |
278 | """Write `data` to `filename` (if not a dry run) after announcing it | |
279 | ||
280 | `what` is used in a log message to identify what is being written | |
281 | to the file. | |
282 | """ | |
283 | log.info("writing %s to %s", what, filename) | |
284 | data = data.encode("utf-8") | |
285 | if not self.dry_run: | |
286 | f = open(filename, 'wb') | |
287 | f.write(data) | |
288 | f.close() | |
289 | ||
290 | def delete_file(self, filename): | |
291 | """Delete `filename` (if not a dry run) after announcing it""" | |
292 | log.info("deleting %s", filename) | |
293 | if not self.dry_run: | |
294 | os.unlink(filename) | |
295 | ||
296 | def run(self): | |
297 | self.mkpath(self.egg_info) | |
298 | try: | |
299 | os.utime(self.egg_info, None) | |
300 | except OSError as e: | |
301 | msg = f"Cannot update time stamp of directory '{self.egg_info}'" | |
302 | raise distutils.errors.DistutilsFileError(msg) from e | |
303 | for ep in metadata.entry_points(group='egg_info.writers'): | |
304 | writer = ep.load() | |
305 | writer(self, ep.name, os.path.join(self.egg_info, ep.name)) | |
306 | ||
307 | # Get rid of native_libs.txt if it was put there by older bdist_egg | |
308 | nl = os.path.join(self.egg_info, "native_libs.txt") | |
309 | if os.path.exists(nl): | |
310 | self.delete_file(nl) | |
311 | ||
312 | self.find_sources() | |
313 | ||
314 | def find_sources(self): | |
315 | """Generate SOURCES.txt manifest file""" | |
316 | manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") | |
317 | mm = manifest_maker(self.distribution) | |
318 | mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest | |
319 | mm.manifest = manifest_filename | |
320 | mm.run() | |
321 | self.filelist = mm.filelist | |
322 | ||
323 | def check_broken_egg_info(self): | |
324 | bei = self.egg_name + '.egg-info' | |
325 | if self.egg_base != os.curdir: | |
326 | bei = os.path.join(self.egg_base, bei) | |
327 | if os.path.exists(bei): | |
328 | log.warn( | |
329 | "-" * 78 + '\n' | |
330 | "Note: Your current .egg-info directory has a '-' in its name;" | |
331 | '\nthis will not work correctly with "setup.py develop".\n\n' | |
332 | 'Please rename %s to %s to correct this problem.\n' + '-' * 78, | |
333 | bei, self.egg_info | |
334 | ) | |
335 | self.broken_egg_info = self.egg_info | |
336 | self.egg_info = bei # make it work for now | |
337 | ||
338 | ||
339 | class FileList(_FileList): | |
340 | # Implementations of the various MANIFEST.in commands | |
341 | ||
342 | def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False): | |
343 | super().__init__(warn, debug_print) | |
344 | self.ignore_egg_info_dir = ignore_egg_info_dir | |
345 | ||
346 | def process_template_line(self, line): | |
347 | # Parse the line: split it up, make sure the right number of words | |
348 | # is there, and return the relevant words. 'action' is always | |
349 | # defined: it's the first word of the line. Which of the other | |
350 | # three are defined depends on the action; it'll be either | |
351 | # patterns, (dir and patterns), or (dir_pattern). | |
352 | (action, patterns, dir, dir_pattern) = self._parse_template_line(line) | |
353 | ||
354 | action_map = { | |
355 | 'include': self.include, | |
356 | 'exclude': self.exclude, | |
357 | 'global-include': self.global_include, | |
358 | 'global-exclude': self.global_exclude, | |
359 | 'recursive-include': functools.partial( | |
360 | self.recursive_include, dir, | |
361 | ), | |
362 | 'recursive-exclude': functools.partial( | |
363 | self.recursive_exclude, dir, | |
364 | ), | |
365 | 'graft': self.graft, | |
366 | 'prune': self.prune, | |
367 | } | |
368 | log_map = { | |
369 | 'include': "warning: no files found matching '%s'", | |
370 | 'exclude': ( | |
371 | "warning: no previously-included files found " | |
372 | "matching '%s'" | |
373 | ), | |
374 | 'global-include': ( | |
375 | "warning: no files found matching '%s' " | |
376 | "anywhere in distribution" | |
377 | ), | |
378 | 'global-exclude': ( | |
379 | "warning: no previously-included files matching " | |
380 | "'%s' found anywhere in distribution" | |
381 | ), | |
382 | 'recursive-include': ( | |
383 | "warning: no files found matching '%s' " | |
384 | "under directory '%s'" | |
385 | ), | |
386 | 'recursive-exclude': ( | |
387 | "warning: no previously-included files matching " | |
388 | "'%s' found under directory '%s'" | |
389 | ), | |
390 | 'graft': "warning: no directories found matching '%s'", | |
391 | 'prune': "no previously-included directories found matching '%s'", | |
392 | } | |
393 | ||
394 | try: | |
395 | process_action = action_map[action] | |
396 | except KeyError: | |
397 | raise DistutilsInternalError( | |
398 | "this cannot happen: invalid action '{action!s}'". | |
399 | format(action=action), | |
400 | ) | |
401 | ||
402 | # OK, now we know that the action is valid and we have the | |
403 | # right number of words on the line for that action -- so we | |
404 | # can proceed with minimal error-checking. | |
405 | ||
406 | action_is_recursive = action.startswith('recursive-') | |
407 | if action in {'graft', 'prune'}: | |
408 | patterns = [dir_pattern] | |
409 | extra_log_args = (dir, ) if action_is_recursive else () | |
410 | log_tmpl = log_map[action] | |
411 | ||
412 | self.debug_print( | |
413 | ' '.join( | |
414 | [action] + | |
415 | ([dir] if action_is_recursive else []) + | |
416 | patterns, | |
417 | ) | |
418 | ) | |
419 | for pattern in patterns: | |
420 | if not process_action(pattern): | |
421 | log.warn(log_tmpl, pattern, *extra_log_args) | |
422 | ||
423 | def _remove_files(self, predicate): | |
424 | """ | |
425 | Remove all files from the file list that match the predicate. | |
426 | Return True if any matching files were removed | |
427 | """ | |
428 | found = False | |
429 | for i in range(len(self.files) - 1, -1, -1): | |
430 | if predicate(self.files[i]): | |
431 | self.debug_print(" removing " + self.files[i]) | |
432 | del self.files[i] | |
433 | found = True | |
434 | return found | |
435 | ||
436 | def include(self, pattern): | |
437 | """Include files that match 'pattern'.""" | |
438 | found = [f for f in glob(pattern) if not os.path.isdir(f)] | |
439 | self.extend(found) | |
440 | return bool(found) | |
441 | ||
442 | def exclude(self, pattern): | |
443 | """Exclude files that match 'pattern'.""" | |
444 | match = translate_pattern(pattern) | |
445 | return self._remove_files(match.match) | |
446 | ||
447 | def recursive_include(self, dir, pattern): | |
448 | """ | |
449 | Include all files anywhere in 'dir/' that match the pattern. | |
450 | """ | |
451 | full_pattern = os.path.join(dir, '**', pattern) | |
452 | found = [f for f in glob(full_pattern, recursive=True) | |
453 | if not os.path.isdir(f)] | |
454 | self.extend(found) | |
455 | return bool(found) | |
456 | ||
457 | def recursive_exclude(self, dir, pattern): | |
458 | """ | |
459 | Exclude any file anywhere in 'dir/' that match the pattern. | |
460 | """ | |
461 | match = translate_pattern(os.path.join(dir, '**', pattern)) | |
462 | return self._remove_files(match.match) | |
463 | ||
464 | def graft(self, dir): | |
465 | """Include all files from 'dir/'.""" | |
466 | found = [ | |
467 | item | |
468 | for match_dir in glob(dir) | |
469 | for item in distutils.filelist.findall(match_dir) | |
470 | ] | |
471 | self.extend(found) | |
472 | return bool(found) | |
473 | ||
474 | def prune(self, dir): | |
475 | """Filter out files from 'dir/'.""" | |
476 | match = translate_pattern(os.path.join(dir, '**')) | |
477 | return self._remove_files(match.match) | |
478 | ||
479 | def global_include(self, pattern): | |
480 | """ | |
481 | Include all files anywhere in the current directory that match the | |
482 | pattern. This is very inefficient on large file trees. | |
483 | """ | |
484 | if self.allfiles is None: | |
485 | self.findall() | |
486 | match = translate_pattern(os.path.join('**', pattern)) | |
487 | found = [f for f in self.allfiles if match.match(f)] | |
488 | self.extend(found) | |
489 | return bool(found) | |
490 | ||
491 | def global_exclude(self, pattern): | |
492 | """ | |
493 | Exclude all files anywhere that match the pattern. | |
494 | """ | |
495 | match = translate_pattern(os.path.join('**', pattern)) | |
496 | return self._remove_files(match.match) | |
497 | ||
498 | def append(self, item): | |
499 | if item.endswith('\r'): # Fix older sdists built on Windows | |
500 | item = item[:-1] | |
501 | path = convert_path(item) | |
502 | ||
503 | if self._safe_path(path): | |
504 | self.files.append(path) | |
505 | ||
506 | def extend(self, paths): | |
507 | self.files.extend(filter(self._safe_path, paths)) | |
508 | ||
509 | def _repair(self): | |
510 | """ | |
511 | Replace self.files with only safe paths | |
512 | ||
513 | Because some owners of FileList manipulate the underlying | |
514 | ``files`` attribute directly, this method must be called to | |
515 | repair those paths. | |
516 | """ | |
517 | self.files = list(filter(self._safe_path, self.files)) | |
518 | ||
519 | def _safe_path(self, path): | |
520 | enc_warn = "'%s' not %s encodable -- skipping" | |
521 | ||
522 | # To avoid accidental trans-codings errors, first to unicode | |
523 | u_path = unicode_utils.filesys_decode(path) | |
524 | if u_path is None: | |
525 | log.warn("'%s' in unexpected encoding -- skipping" % path) | |
526 | return False | |
527 | ||
528 | # Must ensure utf-8 encodability | |
529 | utf8_path = unicode_utils.try_encode(u_path, "utf-8") | |
530 | if utf8_path is None: | |
531 | log.warn(enc_warn, path, 'utf-8') | |
532 | return False | |
533 | ||
534 | try: | |
535 | # ignore egg-info paths | |
536 | is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path | |
537 | if self.ignore_egg_info_dir and is_egg_info: | |
538 | return False | |
539 | # accept is either way checks out | |
540 | if os.path.exists(u_path) or os.path.exists(utf8_path): | |
541 | return True | |
542 | # this will catch any encode errors decoding u_path | |
543 | except UnicodeEncodeError: | |
544 | log.warn(enc_warn, path, sys.getfilesystemencoding()) | |
545 | ||
546 | ||
547 | class manifest_maker(sdist): | |
548 | template = "MANIFEST.in" | |
549 | ||
550 | def initialize_options(self): | |
551 | self.use_defaults = 1 | |
552 | self.prune = 1 | |
553 | self.manifest_only = 1 | |
554 | self.force_manifest = 1 | |
555 | self.ignore_egg_info_dir = False | |
556 | ||
557 | def finalize_options(self): | |
558 | pass | |
559 | ||
560 | def run(self): | |
561 | self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir) | |
562 | if not os.path.exists(self.manifest): | |
563 | self.write_manifest() # it must exist so it'll get in the list | |
564 | self.add_defaults() | |
565 | if os.path.exists(self.template): | |
566 | self.read_template() | |
567 | self.add_license_files() | |
568 | self._add_referenced_files() | |
569 | self.prune_file_list() | |
570 | self.filelist.sort() | |
571 | self.filelist.remove_duplicates() | |
572 | self.write_manifest() | |
573 | ||
574 | def _manifest_normalize(self, path): | |
575 | path = unicode_utils.filesys_decode(path) | |
576 | return path.replace(os.sep, '/') | |
577 | ||
578 | def write_manifest(self): | |
579 | """ | |
580 | Write the file list in 'self.filelist' to the manifest file | |
581 | named by 'self.manifest'. | |
582 | """ | |
583 | self.filelist._repair() | |
584 | ||
585 | # Now _repairs should encodability, but not unicode | |
586 | files = [self._manifest_normalize(f) for f in self.filelist.files] | |
587 | msg = "writing manifest file '%s'" % self.manifest | |
588 | self.execute(write_file, (self.manifest, files), msg) | |
589 | ||
590 | def warn(self, msg): | |
591 | if not self._should_suppress_warning(msg): | |
592 | sdist.warn(self, msg) | |
593 | ||
594 | @staticmethod | |
595 | def _should_suppress_warning(msg): | |
596 | """ | |
597 | suppress missing-file warnings from sdist | |
598 | """ | |
599 | return re.match(r"standard file .*not found", msg) | |
600 | ||
601 | def add_defaults(self): | |
602 | sdist.add_defaults(self) | |
603 | self.filelist.append(self.template) | |
604 | self.filelist.append(self.manifest) | |
605 | rcfiles = list(walk_revctrl()) | |
606 | if rcfiles: | |
607 | self.filelist.extend(rcfiles) | |
608 | elif os.path.exists(self.manifest): | |
609 | self.read_manifest() | |
610 | ||
611 | if os.path.exists("setup.py"): | |
612 | # setup.py should be included by default, even if it's not | |
613 | # the script called to create the sdist | |
614 | self.filelist.append("setup.py") | |
615 | ||
616 | ei_cmd = self.get_finalized_command('egg_info') | |
617 | self.filelist.graft(ei_cmd.egg_info) | |
618 | ||
619 | def add_license_files(self): | |
620 | license_files = self.distribution.metadata.license_files or [] | |
621 | for lf in license_files: | |
622 | log.info("adding license file '%s'", lf) | |
623 | self.filelist.extend(license_files) | |
624 | ||
625 | def _add_referenced_files(self): | |
626 | """Add files referenced by the config (e.g. `file:` directive) to filelist""" | |
627 | referenced = getattr(self.distribution, '_referenced_files', []) | |
628 | # ^-- fallback if dist comes from distutils or is a custom class | |
629 | for rf in referenced: | |
630 | log.debug("adding file referenced by config '%s'", rf) | |
631 | self.filelist.extend(referenced) | |
632 | ||
633 | def prune_file_list(self): | |
634 | build = self.get_finalized_command('build') | |
635 | base_dir = self.distribution.get_fullname() | |
636 | self.filelist.prune(build.build_base) | |
637 | self.filelist.prune(base_dir) | |
638 | sep = re.escape(os.sep) | |
639 | self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, | |
640 | is_regex=1) | |
641 | ||
642 | def _safe_data_files(self, build_py): | |
643 | """ | |
644 | The parent class implementation of this method | |
645 | (``sdist``) will try to include data files, which | |
646 | might cause recursion problems when | |
647 | ``include_package_data=True``. | |
648 | ||
649 | Therefore, avoid triggering any attempt of | |
650 | analyzing/building the manifest again. | |
651 | """ | |
652 | if hasattr(build_py, 'get_data_files_without_manifest'): | |
653 | return build_py.get_data_files_without_manifest() | |
654 | ||
655 | warnings.warn( | |
656 | "Custom 'build_py' does not implement " | |
657 | "'get_data_files_without_manifest'.\nPlease extend command classes" | |
658 | " from setuptools instead of distutils.", | |
659 | SetuptoolsDeprecationWarning | |
660 | ) | |
661 | return build_py.get_data_files() | |
662 | ||
663 | ||
664 | def write_file(filename, contents): | |
665 | """Create a file with the specified name and write 'contents' (a | |
666 | sequence of strings without line terminators) to it. | |
667 | """ | |
668 | contents = "\n".join(contents) | |
669 | ||
670 | # assuming the contents has been vetted for utf-8 encoding | |
671 | contents = contents.encode("utf-8") | |
672 | ||
673 | with open(filename, "wb") as f: # always write POSIX-style manifest | |
674 | f.write(contents) | |
675 | ||
676 | ||
677 | def write_pkg_info(cmd, basename, filename): | |
678 | log.info("writing %s", filename) | |
679 | if not cmd.dry_run: | |
680 | metadata = cmd.distribution.metadata | |
681 | metadata.version, oldver = cmd.egg_version, metadata.version | |
682 | metadata.name, oldname = cmd.egg_name, metadata.name | |
683 | ||
684 | try: | |
685 | # write unescaped data to PKG-INFO, so older pkg_resources | |
686 | # can still parse it | |
687 | metadata.write_pkg_info(cmd.egg_info) | |
688 | finally: | |
689 | metadata.name, metadata.version = oldname, oldver | |
690 | ||
691 | safe = getattr(cmd.distribution, 'zip_safe', None) | |
692 | ||
693 | bdist_egg.write_safety_flag(cmd.egg_info, safe) | |
694 | ||
695 | ||
696 | def warn_depends_obsolete(cmd, basename, filename): | |
697 | if os.path.exists(filename): | |
698 | log.warn( | |
699 | "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" | |
700 | "Use the install_requires/extras_require setup() args instead." | |
701 | ) | |
702 | ||
703 | ||
704 | def _write_requirements(stream, reqs): | |
705 | lines = yield_lines(reqs or ()) | |
706 | ||
707 | def append_cr(line): | |
708 | return line + '\n' | |
709 | lines = map(append_cr, sorted(lines)) | |
710 | stream.writelines(lines) | |
711 | ||
712 | ||
713 | def write_requirements(cmd, basename, filename): | |
714 | dist = cmd.distribution | |
715 | data = io.StringIO() | |
716 | _write_requirements(data, dist.install_requires) | |
717 | extras_require = dist.extras_require or {} | |
718 | for extra in sorted(extras_require): | |
719 | data.write('\n[{extra}]\n'.format(**vars())) | |
720 | _write_requirements(data, extras_require[extra]) | |
721 | cmd.write_or_delete_file("requirements", filename, data.getvalue()) | |
722 | ||
723 | ||
724 | def write_setup_requirements(cmd, basename, filename): | |
725 | data = io.StringIO() | |
726 | _write_requirements(data, cmd.distribution.setup_requires) | |
727 | cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) | |
728 | ||
729 | ||
730 | def write_toplevel_names(cmd, basename, filename): | |
731 | pkgs = dict.fromkeys( | |
732 | [ | |
733 | k.split('.', 1)[0] | |
734 | for k in cmd.distribution.iter_distribution_names() | |
735 | ] | |
736 | ) | |
737 | cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') | |
738 | ||
739 | ||
740 | def overwrite_arg(cmd, basename, filename): | |
741 | write_arg(cmd, basename, filename, True) | |
742 | ||
743 | ||
744 | def write_arg(cmd, basename, filename, force=False): | |
745 | argname = os.path.splitext(basename)[0] | |
746 | value = getattr(cmd.distribution, argname, None) | |
747 | if value is not None: | |
748 | value = '\n'.join(value) + '\n' | |
749 | cmd.write_or_delete_file(argname, filename, value, force) | |
750 | ||
751 | ||
752 | def write_entries(cmd, basename, filename): | |
753 | eps = _entry_points.load(cmd.distribution.entry_points) | |
754 | defn = _entry_points.render(eps) | |
755 | cmd.write_or_delete_file('entry points', filename, defn, True) | |
756 | ||
757 | ||
758 | def get_pkg_info_revision(): | |
759 | """ | |
760 | Get a -r### off of PKG-INFO Version in case this is an sdist of | |
761 | a subversion revision. | |
762 | """ | |
763 | warnings.warn( | |
764 | "get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning) | |
765 | if os.path.exists('PKG-INFO'): | |
766 | with io.open('PKG-INFO') as f: | |
767 | for line in f: | |
768 | match = re.match(r"Version:.*-r(\d+)\s*$", line) | |
769 | if match: | |
770 | return int(match.group(1)) | |
771 | return 0 | |
772 | ||
773 | ||
774 | class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning): | |
775 | """Deprecated behavior warning for EggInfo, bypassing suppression.""" |