]>
Commit | Line | Data |
---|---|---|
53e6db90 DC |
1 | __all__ = ['Distribution'] |
2 | ||
3 | import io | |
4 | import sys | |
5 | import re | |
6 | import os | |
7 | import warnings | |
8 | import numbers | |
9 | import distutils.log | |
10 | import distutils.core | |
11 | import distutils.cmd | |
12 | import distutils.dist | |
13 | import distutils.command | |
14 | from distutils.util import strtobool | |
15 | from distutils.debug import DEBUG | |
16 | from distutils.fancy_getopt import translate_longopt | |
17 | from glob import iglob | |
18 | import itertools | |
19 | import textwrap | |
20 | from typing import List, Optional, Set, TYPE_CHECKING | |
21 | from pathlib import Path | |
22 | ||
23 | from collections import defaultdict | |
24 | from email import message_from_file | |
25 | ||
26 | from distutils.errors import DistutilsOptionError, DistutilsSetupError | |
27 | from distutils.util import rfc822_escape | |
28 | ||
29 | from setuptools.extern import packaging | |
30 | from setuptools.extern import ordered_set | |
31 | from setuptools.extern.more_itertools import unique_everseen, partition | |
32 | ||
33 | from ._importlib import metadata | |
34 | ||
35 | from . import SetuptoolsDeprecationWarning | |
36 | ||
37 | import setuptools | |
38 | import setuptools.command | |
39 | from setuptools import windows_support | |
40 | from setuptools.monkey import get_unpatched | |
41 | from setuptools.config import setupcfg, pyprojecttoml | |
42 | from setuptools.discovery import ConfigDiscovery | |
43 | ||
44 | import pkg_resources | |
45 | from setuptools.extern.packaging import version | |
46 | from . import _reqs | |
47 | from . import _entry_points | |
48 | ||
49 | if TYPE_CHECKING: | |
50 | from email.message import Message | |
51 | ||
52 | __import__('setuptools.extern.packaging.specifiers') | |
53 | __import__('setuptools.extern.packaging.version') | |
54 | ||
55 | ||
56 | def _get_unpatched(cls): | |
57 | warnings.warn("Do not call this function", DistDeprecationWarning) | |
58 | return get_unpatched(cls) | |
59 | ||
60 | ||
61 | def get_metadata_version(self): | |
62 | mv = getattr(self, 'metadata_version', None) | |
63 | if mv is None: | |
64 | mv = version.Version('2.1') | |
65 | self.metadata_version = mv | |
66 | return mv | |
67 | ||
68 | ||
69 | def rfc822_unescape(content: str) -> str: | |
70 | """Reverse RFC-822 escaping by removing leading whitespaces from content.""" | |
71 | lines = content.splitlines() | |
72 | if len(lines) == 1: | |
73 | return lines[0].lstrip() | |
74 | return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:])))) | |
75 | ||
76 | ||
77 | def _read_field_from_msg(msg: "Message", field: str) -> Optional[str]: | |
78 | """Read Message header field.""" | |
79 | value = msg[field] | |
80 | if value == 'UNKNOWN': | |
81 | return None | |
82 | return value | |
83 | ||
84 | ||
85 | def _read_field_unescaped_from_msg(msg: "Message", field: str) -> Optional[str]: | |
86 | """Read Message header field and apply rfc822_unescape.""" | |
87 | value = _read_field_from_msg(msg, field) | |
88 | if value is None: | |
89 | return value | |
90 | return rfc822_unescape(value) | |
91 | ||
92 | ||
93 | def _read_list_from_msg(msg: "Message", field: str) -> Optional[List[str]]: | |
94 | """Read Message header field and return all results as list.""" | |
95 | values = msg.get_all(field, None) | |
96 | if values == []: | |
97 | return None | |
98 | return values | |
99 | ||
100 | ||
101 | def _read_payload_from_msg(msg: "Message") -> Optional[str]: | |
102 | value = msg.get_payload().strip() | |
103 | if value == 'UNKNOWN' or not value: | |
104 | return None | |
105 | return value | |
106 | ||
107 | ||
108 | def read_pkg_file(self, file): | |
109 | """Reads the metadata values from a file object.""" | |
110 | msg = message_from_file(file) | |
111 | ||
112 | self.metadata_version = version.Version(msg['metadata-version']) | |
113 | self.name = _read_field_from_msg(msg, 'name') | |
114 | self.version = _read_field_from_msg(msg, 'version') | |
115 | self.description = _read_field_from_msg(msg, 'summary') | |
116 | # we are filling author only. | |
117 | self.author = _read_field_from_msg(msg, 'author') | |
118 | self.maintainer = None | |
119 | self.author_email = _read_field_from_msg(msg, 'author-email') | |
120 | self.maintainer_email = None | |
121 | self.url = _read_field_from_msg(msg, 'home-page') | |
122 | self.download_url = _read_field_from_msg(msg, 'download-url') | |
123 | self.license = _read_field_unescaped_from_msg(msg, 'license') | |
124 | ||
125 | self.long_description = _read_field_unescaped_from_msg(msg, 'description') | |
126 | if ( | |
127 | self.long_description is None and | |
128 | self.metadata_version >= version.Version('2.1') | |
129 | ): | |
130 | self.long_description = _read_payload_from_msg(msg) | |
131 | self.description = _read_field_from_msg(msg, 'summary') | |
132 | ||
133 | if 'keywords' in msg: | |
134 | self.keywords = _read_field_from_msg(msg, 'keywords').split(',') | |
135 | ||
136 | self.platforms = _read_list_from_msg(msg, 'platform') | |
137 | self.classifiers = _read_list_from_msg(msg, 'classifier') | |
138 | ||
139 | # PEP 314 - these fields only exist in 1.1 | |
140 | if self.metadata_version == version.Version('1.1'): | |
141 | self.requires = _read_list_from_msg(msg, 'requires') | |
142 | self.provides = _read_list_from_msg(msg, 'provides') | |
143 | self.obsoletes = _read_list_from_msg(msg, 'obsoletes') | |
144 | else: | |
145 | self.requires = None | |
146 | self.provides = None | |
147 | self.obsoletes = None | |
148 | ||
149 | self.license_files = _read_list_from_msg(msg, 'license-file') | |
150 | ||
151 | ||
152 | def single_line(val): | |
153 | """ | |
154 | Quick and dirty validation for Summary pypa/setuptools#1390. | |
155 | """ | |
156 | if '\n' in val: | |
157 | # TODO: Replace with `raise ValueError("newlines not allowed")` | |
158 | # after reviewing #2893. | |
159 | warnings.warn("newlines not allowed and will break in the future") | |
160 | val = val.strip().split('\n')[0] | |
161 | return val | |
162 | ||
163 | ||
164 | # Based on Python 3.5 version | |
165 | def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME | |
166 | """Write the PKG-INFO format data to a file object.""" | |
167 | version = self.get_metadata_version() | |
168 | ||
169 | def write_field(key, value): | |
170 | file.write("%s: %s\n" % (key, value)) | |
171 | ||
172 | write_field('Metadata-Version', str(version)) | |
173 | write_field('Name', self.get_name()) | |
174 | write_field('Version', self.get_version()) | |
175 | ||
176 | summary = self.get_description() | |
177 | if summary: | |
178 | write_field('Summary', single_line(summary)) | |
179 | ||
180 | optional_fields = ( | |
181 | ('Home-page', 'url'), | |
182 | ('Download-URL', 'download_url'), | |
183 | ('Author', 'author'), | |
184 | ('Author-email', 'author_email'), | |
185 | ('Maintainer', 'maintainer'), | |
186 | ('Maintainer-email', 'maintainer_email'), | |
187 | ) | |
188 | ||
189 | for field, attr in optional_fields: | |
190 | attr_val = getattr(self, attr, None) | |
191 | if attr_val is not None: | |
192 | write_field(field, attr_val) | |
193 | ||
194 | license = self.get_license() | |
195 | if license: | |
196 | write_field('License', rfc822_escape(license)) | |
197 | ||
198 | for project_url in self.project_urls.items(): | |
199 | write_field('Project-URL', '%s, %s' % project_url) | |
200 | ||
201 | keywords = ','.join(self.get_keywords()) | |
202 | if keywords: | |
203 | write_field('Keywords', keywords) | |
204 | ||
205 | platforms = self.get_platforms() or [] | |
206 | for platform in platforms: | |
207 | write_field('Platform', platform) | |
208 | ||
209 | self._write_list(file, 'Classifier', self.get_classifiers()) | |
210 | ||
211 | # PEP 314 | |
212 | self._write_list(file, 'Requires', self.get_requires()) | |
213 | self._write_list(file, 'Provides', self.get_provides()) | |
214 | self._write_list(file, 'Obsoletes', self.get_obsoletes()) | |
215 | ||
216 | # Setuptools specific for PEP 345 | |
217 | if hasattr(self, 'python_requires'): | |
218 | write_field('Requires-Python', self.python_requires) | |
219 | ||
220 | # PEP 566 | |
221 | if self.long_description_content_type: | |
222 | write_field('Description-Content-Type', self.long_description_content_type) | |
223 | if self.provides_extras: | |
224 | for extra in sorted(self.provides_extras): | |
225 | write_field('Provides-Extra', extra) | |
226 | ||
227 | self._write_list(file, 'License-File', self.license_files or []) | |
228 | ||
229 | long_description = self.get_long_description() | |
230 | if long_description: | |
231 | file.write("\n%s" % long_description) | |
232 | if not long_description.endswith("\n"): | |
233 | file.write("\n") | |
234 | ||
235 | ||
236 | sequence = tuple, list | |
237 | ||
238 | ||
239 | def check_importable(dist, attr, value): | |
240 | try: | |
241 | ep = metadata.EntryPoint(value=value, name=None, group=None) | |
242 | assert not ep.extras | |
243 | except (TypeError, ValueError, AttributeError, AssertionError) as e: | |
244 | raise DistutilsSetupError( | |
245 | "%r must be importable 'module:attrs' string (got %r)" % (attr, value) | |
246 | ) from e | |
247 | ||
248 | ||
249 | def assert_string_list(dist, attr, value): | |
250 | """Verify that value is a string list""" | |
251 | try: | |
252 | # verify that value is a list or tuple to exclude unordered | |
253 | # or single-use iterables | |
254 | assert isinstance(value, (list, tuple)) | |
255 | # verify that elements of value are strings | |
256 | assert ''.join(value) != value | |
257 | except (TypeError, ValueError, AttributeError, AssertionError) as e: | |
258 | raise DistutilsSetupError( | |
259 | "%r must be a list of strings (got %r)" % (attr, value) | |
260 | ) from e | |
261 | ||
262 | ||
263 | def check_nsp(dist, attr, value): | |
264 | """Verify that namespace packages are valid""" | |
265 | ns_packages = value | |
266 | assert_string_list(dist, attr, ns_packages) | |
267 | for nsp in ns_packages: | |
268 | if not dist.has_contents_for(nsp): | |
269 | raise DistutilsSetupError( | |
270 | "Distribution contains no modules or packages for " | |
271 | + "namespace package %r" % nsp | |
272 | ) | |
273 | parent, sep, child = nsp.rpartition('.') | |
274 | if parent and parent not in ns_packages: | |
275 | distutils.log.warn( | |
276 | "WARNING: %r is declared as a package namespace, but %r" | |
277 | " is not: please correct this in setup.py", | |
278 | nsp, | |
279 | parent, | |
280 | ) | |
281 | msg = ( | |
282 | "The namespace_packages parameter is deprecated, " | |
283 | "consider using implicit namespaces instead (PEP 420)." | |
284 | ) | |
285 | warnings.warn(msg, SetuptoolsDeprecationWarning) | |
286 | ||
287 | ||
288 | def check_extras(dist, attr, value): | |
289 | """Verify that extras_require mapping is valid""" | |
290 | try: | |
291 | list(itertools.starmap(_check_extra, value.items())) | |
292 | except (TypeError, ValueError, AttributeError) as e: | |
293 | raise DistutilsSetupError( | |
294 | "'extras_require' must be a dictionary whose values are " | |
295 | "strings or lists of strings containing valid project/version " | |
296 | "requirement specifiers." | |
297 | ) from e | |
298 | ||
299 | ||
300 | def _check_extra(extra, reqs): | |
301 | name, sep, marker = extra.partition(':') | |
302 | if marker and pkg_resources.invalid_marker(marker): | |
303 | raise DistutilsSetupError("Invalid environment marker: " + marker) | |
304 | list(_reqs.parse(reqs)) | |
305 | ||
306 | ||
307 | def assert_bool(dist, attr, value): | |
308 | """Verify that value is True, False, 0, or 1""" | |
309 | if bool(value) != value: | |
310 | tmpl = "{attr!r} must be a boolean value (got {value!r})" | |
311 | raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) | |
312 | ||
313 | ||
314 | def invalid_unless_false(dist, attr, value): | |
315 | if not value: | |
316 | warnings.warn(f"{attr} is ignored.", DistDeprecationWarning) | |
317 | return | |
318 | raise DistutilsSetupError(f"{attr} is invalid.") | |
319 | ||
320 | ||
321 | def check_requirements(dist, attr, value): | |
322 | """Verify that install_requires is a valid requirements list""" | |
323 | try: | |
324 | list(_reqs.parse(value)) | |
325 | if isinstance(value, (dict, set)): | |
326 | raise TypeError("Unordered types are not allowed") | |
327 | except (TypeError, ValueError) as error: | |
328 | tmpl = ( | |
329 | "{attr!r} must be a string or list of strings " | |
330 | "containing valid project/version requirement specifiers; {error}" | |
331 | ) | |
332 | raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error | |
333 | ||
334 | ||
335 | def check_specifier(dist, attr, value): | |
336 | """Verify that value is a valid version specifier""" | |
337 | try: | |
338 | packaging.specifiers.SpecifierSet(value) | |
339 | except (packaging.specifiers.InvalidSpecifier, AttributeError) as error: | |
340 | tmpl = ( | |
341 | "{attr!r} must be a string " "containing valid version specifiers; {error}" | |
342 | ) | |
343 | raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) from error | |
344 | ||
345 | ||
346 | def check_entry_points(dist, attr, value): | |
347 | """Verify that entry_points map is parseable""" | |
348 | try: | |
349 | _entry_points.load(value) | |
350 | except Exception as e: | |
351 | raise DistutilsSetupError(e) from e | |
352 | ||
353 | ||
354 | def check_test_suite(dist, attr, value): | |
355 | if not isinstance(value, str): | |
356 | raise DistutilsSetupError("test_suite must be a string") | |
357 | ||
358 | ||
359 | def check_package_data(dist, attr, value): | |
360 | """Verify that value is a dictionary of package names to glob lists""" | |
361 | if not isinstance(value, dict): | |
362 | raise DistutilsSetupError( | |
363 | "{!r} must be a dictionary mapping package names to lists of " | |
364 | "string wildcard patterns".format(attr) | |
365 | ) | |
366 | for k, v in value.items(): | |
367 | if not isinstance(k, str): | |
368 | raise DistutilsSetupError( | |
369 | "keys of {!r} dict must be strings (got {!r})".format(attr, k) | |
370 | ) | |
371 | assert_string_list(dist, 'values of {!r} dict'.format(attr), v) | |
372 | ||
373 | ||
374 | def check_packages(dist, attr, value): | |
375 | for pkgname in value: | |
376 | if not re.match(r'\w+(\.\w+)*', pkgname): | |
377 | distutils.log.warn( | |
378 | "WARNING: %r not a valid package name; please use only " | |
379 | ".-separated package names in setup.py", | |
380 | pkgname, | |
381 | ) | |
382 | ||
383 | ||
384 | _Distribution = get_unpatched(distutils.core.Distribution) | |
385 | ||
386 | ||
387 | class Distribution(_Distribution): | |
388 | """Distribution with support for tests and package data | |
389 | ||
390 | This is an enhanced version of 'distutils.dist.Distribution' that | |
391 | effectively adds the following new optional keyword arguments to 'setup()': | |
392 | ||
393 | 'install_requires' -- a string or sequence of strings specifying project | |
394 | versions that the distribution requires when installed, in the format | |
395 | used by 'pkg_resources.require()'. They will be installed | |
396 | automatically when the package is installed. If you wish to use | |
397 | packages that are not available in PyPI, or want to give your users an | |
398 | alternate download location, you can add a 'find_links' option to the | |
399 | '[easy_install]' section of your project's 'setup.cfg' file, and then | |
400 | setuptools will scan the listed web pages for links that satisfy the | |
401 | requirements. | |
402 | ||
403 | 'extras_require' -- a dictionary mapping names of optional "extras" to the | |
404 | additional requirement(s) that using those extras incurs. For example, | |
405 | this:: | |
406 | ||
407 | extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) | |
408 | ||
409 | indicates that the distribution can optionally provide an extra | |
410 | capability called "reST", but it can only be used if docutils and | |
411 | reSTedit are installed. If the user installs your package using | |
412 | EasyInstall and requests one of your extras, the corresponding | |
413 | additional requirements will be installed if needed. | |
414 | ||
415 | 'test_suite' -- the name of a test suite to run for the 'test' command. | |
416 | If the user runs 'python setup.py test', the package will be installed, | |
417 | and the named test suite will be run. The format is the same as | |
418 | would be used on a 'unittest.py' command line. That is, it is the | |
419 | dotted name of an object to import and call to generate a test suite. | |
420 | ||
421 | 'package_data' -- a dictionary mapping package names to lists of filenames | |
422 | or globs to use to find data files contained in the named packages. | |
423 | If the dictionary has filenames or globs listed under '""' (the empty | |
424 | string), those names will be searched for in every package, in addition | |
425 | to any names for the specific package. Data files found using these | |
426 | names/globs will be installed along with the package, in the same | |
427 | location as the package. Note that globs are allowed to reference | |
428 | the contents of non-package subdirectories, as long as you use '/' as | |
429 | a path separator. (Globs are automatically converted to | |
430 | platform-specific paths at runtime.) | |
431 | ||
432 | In addition to these new keywords, this class also has several new methods | |
433 | for manipulating the distribution's contents. For example, the 'include()' | |
434 | and 'exclude()' methods can be thought of as in-place add and subtract | |
435 | commands that add or remove packages, modules, extensions, and so on from | |
436 | the distribution. | |
437 | """ | |
438 | ||
439 | _DISTUTILS_UNSUPPORTED_METADATA = { | |
440 | 'long_description_content_type': lambda: None, | |
441 | 'project_urls': dict, | |
442 | 'provides_extras': ordered_set.OrderedSet, | |
443 | 'license_file': lambda: None, | |
444 | 'license_files': lambda: None, | |
445 | } | |
446 | ||
447 | _patched_dist = None | |
448 | ||
449 | def patch_missing_pkg_info(self, attrs): | |
450 | # Fake up a replacement for the data that would normally come from | |
451 | # PKG-INFO, but which might not yet be built if this is a fresh | |
452 | # checkout. | |
453 | # | |
454 | if not attrs or 'name' not in attrs or 'version' not in attrs: | |
455 | return | |
456 | key = pkg_resources.safe_name(str(attrs['name'])).lower() | |
457 | dist = pkg_resources.working_set.by_key.get(key) | |
458 | if dist is not None and not dist.has_metadata('PKG-INFO'): | |
459 | dist._version = pkg_resources.safe_version(str(attrs['version'])) | |
460 | self._patched_dist = dist | |
461 | ||
462 | def __init__(self, attrs=None): | |
463 | have_package_data = hasattr(self, "package_data") | |
464 | if not have_package_data: | |
465 | self.package_data = {} | |
466 | attrs = attrs or {} | |
467 | self.dist_files = [] | |
468 | # Filter-out setuptools' specific options. | |
469 | self.src_root = attrs.pop("src_root", None) | |
470 | self.patch_missing_pkg_info(attrs) | |
471 | self.dependency_links = attrs.pop('dependency_links', []) | |
472 | self.setup_requires = attrs.pop('setup_requires', []) | |
473 | for ep in metadata.entry_points(group='distutils.setup_keywords'): | |
474 | vars(self).setdefault(ep.name, None) | |
475 | _Distribution.__init__( | |
476 | self, | |
477 | { | |
478 | k: v | |
479 | for k, v in attrs.items() | |
480 | if k not in self._DISTUTILS_UNSUPPORTED_METADATA | |
481 | }, | |
482 | ) | |
483 | ||
484 | # Private API (setuptools-use only, not restricted to Distribution) | |
485 | # Stores files that are referenced by the configuration and need to be in the | |
486 | # sdist (e.g. `version = file: VERSION.txt`) | |
487 | self._referenced_files: Set[str] = set() | |
488 | ||
489 | # Save the original dependencies before they are processed into the egg format | |
490 | self._orig_extras_require = {} | |
491 | self._orig_install_requires = [] | |
492 | self._tmp_extras_require = defaultdict(ordered_set.OrderedSet) | |
493 | ||
494 | self.set_defaults = ConfigDiscovery(self) | |
495 | ||
496 | self._set_metadata_defaults(attrs) | |
497 | ||
498 | self.metadata.version = self._normalize_version( | |
499 | self._validate_version(self.metadata.version) | |
500 | ) | |
501 | self._finalize_requires() | |
502 | ||
503 | def _validate_metadata(self): | |
504 | required = {"name"} | |
505 | provided = { | |
506 | key | |
507 | for key in vars(self.metadata) | |
508 | if getattr(self.metadata, key, None) is not None | |
509 | } | |
510 | missing = required - provided | |
511 | ||
512 | if missing: | |
513 | msg = f"Required package metadata is missing: {missing}" | |
514 | raise DistutilsSetupError(msg) | |
515 | ||
516 | def _set_metadata_defaults(self, attrs): | |
517 | """ | |
518 | Fill-in missing metadata fields not supported by distutils. | |
519 | Some fields may have been set by other tools (e.g. pbr). | |
520 | Those fields (vars(self.metadata)) take precedence to | |
521 | supplied attrs. | |
522 | """ | |
523 | for option, default in self._DISTUTILS_UNSUPPORTED_METADATA.items(): | |
524 | vars(self.metadata).setdefault(option, attrs.get(option, default())) | |
525 | ||
526 | @staticmethod | |
527 | def _normalize_version(version): | |
528 | if isinstance(version, setuptools.sic) or version is None: | |
529 | return version | |
530 | ||
531 | normalized = str(packaging.version.Version(version)) | |
532 | if version != normalized: | |
533 | tmpl = "Normalizing '{version}' to '{normalized}'" | |
534 | warnings.warn(tmpl.format(**locals())) | |
535 | return normalized | |
536 | return version | |
537 | ||
538 | @staticmethod | |
539 | def _validate_version(version): | |
540 | if isinstance(version, numbers.Number): | |
541 | # Some people apparently take "version number" too literally :) | |
542 | version = str(version) | |
543 | ||
544 | if version is not None: | |
545 | try: | |
546 | packaging.version.Version(version) | |
547 | except (packaging.version.InvalidVersion, TypeError): | |
548 | warnings.warn( | |
549 | "The version specified (%r) is an invalid version, this " | |
550 | "may not work as expected with newer versions of " | |
551 | "setuptools, pip, and PyPI. Please see PEP 440 for more " | |
552 | "details." % version | |
553 | ) | |
554 | return setuptools.sic(version) | |
555 | return version | |
556 | ||
557 | def _finalize_requires(self): | |
558 | """ | |
559 | Set `metadata.python_requires` and fix environment markers | |
560 | in `install_requires` and `extras_require`. | |
561 | """ | |
562 | if getattr(self, 'python_requires', None): | |
563 | self.metadata.python_requires = self.python_requires | |
564 | ||
565 | if getattr(self, 'extras_require', None): | |
566 | # Save original before it is messed by _convert_extras_requirements | |
567 | self._orig_extras_require = self._orig_extras_require or self.extras_require | |
568 | for extra in self.extras_require.keys(): | |
569 | # Since this gets called multiple times at points where the | |
570 | # keys have become 'converted' extras, ensure that we are only | |
571 | # truly adding extras we haven't seen before here. | |
572 | extra = extra.split(':')[0] | |
573 | if extra: | |
574 | self.metadata.provides_extras.add(extra) | |
575 | ||
576 | if getattr(self, 'install_requires', None) and not self._orig_install_requires: | |
577 | # Save original before it is messed by _move_install_requirements_markers | |
578 | self._orig_install_requires = self.install_requires | |
579 | ||
580 | self._convert_extras_requirements() | |
581 | self._move_install_requirements_markers() | |
582 | ||
583 | def _convert_extras_requirements(self): | |
584 | """ | |
585 | Convert requirements in `extras_require` of the form | |
586 | `"extra": ["barbazquux; {marker}"]` to | |
587 | `"extra:{marker}": ["barbazquux"]`. | |
588 | """ | |
589 | spec_ext_reqs = getattr(self, 'extras_require', None) or {} | |
590 | tmp = defaultdict(ordered_set.OrderedSet) | |
591 | self._tmp_extras_require = getattr(self, '_tmp_extras_require', tmp) | |
592 | for section, v in spec_ext_reqs.items(): | |
593 | # Do not strip empty sections. | |
594 | self._tmp_extras_require[section] | |
595 | for r in _reqs.parse(v): | |
596 | suffix = self._suffix_for(r) | |
597 | self._tmp_extras_require[section + suffix].append(r) | |
598 | ||
599 | @staticmethod | |
600 | def _suffix_for(req): | |
601 | """ | |
602 | For a requirement, return the 'extras_require' suffix for | |
603 | that requirement. | |
604 | """ | |
605 | return ':' + str(req.marker) if req.marker else '' | |
606 | ||
607 | def _move_install_requirements_markers(self): | |
608 | """ | |
609 | Move requirements in `install_requires` that are using environment | |
610 | markers `extras_require`. | |
611 | """ | |
612 | ||
613 | # divide the install_requires into two sets, simple ones still | |
614 | # handled by install_requires and more complex ones handled | |
615 | # by extras_require. | |
616 | ||
617 | def is_simple_req(req): | |
618 | return not req.marker | |
619 | ||
620 | spec_inst_reqs = getattr(self, 'install_requires', None) or () | |
621 | inst_reqs = list(_reqs.parse(spec_inst_reqs)) | |
622 | simple_reqs = filter(is_simple_req, inst_reqs) | |
623 | complex_reqs = itertools.filterfalse(is_simple_req, inst_reqs) | |
624 | self.install_requires = list(map(str, simple_reqs)) | |
625 | ||
626 | for r in complex_reqs: | |
627 | self._tmp_extras_require[':' + str(r.marker)].append(r) | |
628 | self.extras_require = dict( | |
629 | # list(dict.fromkeys(...)) ensures a list of unique strings | |
630 | (k, list(dict.fromkeys(str(r) for r in map(self._clean_req, v)))) | |
631 | for k, v in self._tmp_extras_require.items() | |
632 | ) | |
633 | ||
634 | def _clean_req(self, req): | |
635 | """ | |
636 | Given a Requirement, remove environment markers and return it. | |
637 | """ | |
638 | req.marker = None | |
639 | return req | |
640 | ||
641 | def _finalize_license_files(self): | |
642 | """Compute names of all license files which should be included.""" | |
643 | license_files: Optional[List[str]] = self.metadata.license_files | |
644 | patterns: List[str] = license_files if license_files else [] | |
645 | ||
646 | license_file: Optional[str] = self.metadata.license_file | |
647 | if license_file and license_file not in patterns: | |
648 | patterns.append(license_file) | |
649 | ||
650 | if license_files is None and license_file is None: | |
651 | # Default patterns match the ones wheel uses | |
652 | # See https://wheel.readthedocs.io/en/stable/user_guide.html | |
653 | # -> 'Including license files in the generated wheel file' | |
654 | patterns = ('LICEN[CS]E*', 'COPYING*', 'NOTICE*', 'AUTHORS*') | |
655 | ||
656 | self.metadata.license_files = list( | |
657 | unique_everseen(self._expand_patterns(patterns)) | |
658 | ) | |
659 | ||
660 | @staticmethod | |
661 | def _expand_patterns(patterns): | |
662 | """ | |
663 | >>> list(Distribution._expand_patterns(['LICENSE'])) | |
664 | ['LICENSE'] | |
665 | >>> list(Distribution._expand_patterns(['setup.cfg', 'LIC*'])) | |
666 | ['setup.cfg', 'LICENSE'] | |
667 | """ | |
668 | return ( | |
669 | path | |
670 | for pattern in patterns | |
671 | for path in sorted(iglob(pattern)) | |
672 | if not path.endswith('~') and os.path.isfile(path) | |
673 | ) | |
674 | ||
675 | # FIXME: 'Distribution._parse_config_files' is too complex (14) | |
676 | def _parse_config_files(self, filenames=None): # noqa: C901 | |
677 | """ | |
678 | Adapted from distutils.dist.Distribution.parse_config_files, | |
679 | this method provides the same functionality in subtly-improved | |
680 | ways. | |
681 | """ | |
682 | from configparser import ConfigParser | |
683 | ||
684 | # Ignore install directory options if we have a venv | |
685 | ignore_options = ( | |
686 | [] | |
687 | if sys.prefix == sys.base_prefix | |
688 | else [ | |
689 | 'install-base', | |
690 | 'install-platbase', | |
691 | 'install-lib', | |
692 | 'install-platlib', | |
693 | 'install-purelib', | |
694 | 'install-headers', | |
695 | 'install-scripts', | |
696 | 'install-data', | |
697 | 'prefix', | |
698 | 'exec-prefix', | |
699 | 'home', | |
700 | 'user', | |
701 | 'root', | |
702 | ] | |
703 | ) | |
704 | ||
705 | ignore_options = frozenset(ignore_options) | |
706 | ||
707 | if filenames is None: | |
708 | filenames = self.find_config_files() | |
709 | ||
710 | if DEBUG: | |
711 | self.announce("Distribution.parse_config_files():") | |
712 | ||
713 | parser = ConfigParser() | |
714 | parser.optionxform = str | |
715 | for filename in filenames: | |
716 | with io.open(filename, encoding='utf-8') as reader: | |
717 | if DEBUG: | |
718 | self.announce(" reading {filename}".format(**locals())) | |
719 | parser.read_file(reader) | |
720 | for section in parser.sections(): | |
721 | options = parser.options(section) | |
722 | opt_dict = self.get_option_dict(section) | |
723 | ||
724 | for opt in options: | |
725 | if opt == '__name__' or opt in ignore_options: | |
726 | continue | |
727 | ||
728 | val = parser.get(section, opt) | |
729 | opt = self.warn_dash_deprecation(opt, section) | |
730 | opt = self.make_option_lowercase(opt, section) | |
731 | opt_dict[opt] = (filename, val) | |
732 | ||
733 | # Make the ConfigParser forget everything (so we retain | |
734 | # the original filenames that options come from) | |
735 | parser.__init__() | |
736 | ||
737 | if 'global' not in self.command_options: | |
738 | return | |
739 | ||
740 | # If there was a "global" section in the config file, use it | |
741 | # to set Distribution options. | |
742 | ||
743 | for (opt, (src, val)) in self.command_options['global'].items(): | |
744 | alias = self.negative_opt.get(opt) | |
745 | if alias: | |
746 | val = not strtobool(val) | |
747 | elif opt in ('verbose', 'dry_run'): # ugh! | |
748 | val = strtobool(val) | |
749 | ||
750 | try: | |
751 | setattr(self, alias or opt, val) | |
752 | except ValueError as e: | |
753 | raise DistutilsOptionError(e) from e | |
754 | ||
755 | def warn_dash_deprecation(self, opt, section): | |
756 | if section in ( | |
757 | 'options.extras_require', | |
758 | 'options.data_files', | |
759 | ): | |
760 | return opt | |
761 | ||
762 | underscore_opt = opt.replace('-', '_') | |
763 | commands = list(itertools.chain( | |
764 | distutils.command.__all__, | |
765 | self._setuptools_commands(), | |
766 | )) | |
767 | if ( | |
768 | not section.startswith('options') | |
769 | and section != 'metadata' | |
770 | and section not in commands | |
771 | ): | |
772 | return underscore_opt | |
773 | ||
774 | if '-' in opt: | |
775 | warnings.warn( | |
776 | "Usage of dash-separated '%s' will not be supported in future " | |
777 | "versions. Please use the underscore name '%s' instead" | |
778 | % (opt, underscore_opt) | |
779 | ) | |
780 | return underscore_opt | |
781 | ||
782 | def _setuptools_commands(self): | |
783 | try: | |
784 | return metadata.distribution('setuptools').entry_points.names | |
785 | except metadata.PackageNotFoundError: | |
786 | # during bootstrapping, distribution doesn't exist | |
787 | return [] | |
788 | ||
789 | def make_option_lowercase(self, opt, section): | |
790 | if section != 'metadata' or opt.islower(): | |
791 | return opt | |
792 | ||
793 | lowercase_opt = opt.lower() | |
794 | warnings.warn( | |
795 | "Usage of uppercase key '%s' in '%s' will be deprecated in future " | |
796 | "versions. Please use lowercase '%s' instead" | |
797 | % (opt, section, lowercase_opt) | |
798 | ) | |
799 | return lowercase_opt | |
800 | ||
801 | # FIXME: 'Distribution._set_command_options' is too complex (14) | |
802 | def _set_command_options(self, command_obj, option_dict=None): # noqa: C901 | |
803 | """ | |
804 | Set the options for 'command_obj' from 'option_dict'. Basically | |
805 | this means copying elements of a dictionary ('option_dict') to | |
806 | attributes of an instance ('command'). | |
807 | ||
808 | 'command_obj' must be a Command instance. If 'option_dict' is not | |
809 | supplied, uses the standard option dictionary for this command | |
810 | (from 'self.command_options'). | |
811 | ||
812 | (Adopted from distutils.dist.Distribution._set_command_options) | |
813 | """ | |
814 | command_name = command_obj.get_command_name() | |
815 | if option_dict is None: | |
816 | option_dict = self.get_option_dict(command_name) | |
817 | ||
818 | if DEBUG: | |
819 | self.announce(" setting options for '%s' command:" % command_name) | |
820 | for (option, (source, value)) in option_dict.items(): | |
821 | if DEBUG: | |
822 | self.announce(" %s = %s (from %s)" % (option, value, source)) | |
823 | try: | |
824 | bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] | |
825 | except AttributeError: | |
826 | bool_opts = [] | |
827 | try: | |
828 | neg_opt = command_obj.negative_opt | |
829 | except AttributeError: | |
830 | neg_opt = {} | |
831 | ||
832 | try: | |
833 | is_string = isinstance(value, str) | |
834 | if option in neg_opt and is_string: | |
835 | setattr(command_obj, neg_opt[option], not strtobool(value)) | |
836 | elif option in bool_opts and is_string: | |
837 | setattr(command_obj, option, strtobool(value)) | |
838 | elif hasattr(command_obj, option): | |
839 | setattr(command_obj, option, value) | |
840 | else: | |
841 | raise DistutilsOptionError( | |
842 | "error in %s: command '%s' has no such option '%s'" | |
843 | % (source, command_name, option) | |
844 | ) | |
845 | except ValueError as e: | |
846 | raise DistutilsOptionError(e) from e | |
847 | ||
848 | def _get_project_config_files(self, filenames): | |
849 | """Add default file and split between INI and TOML""" | |
850 | tomlfiles = [] | |
851 | standard_project_metadata = Path(self.src_root or os.curdir, "pyproject.toml") | |
852 | if filenames is not None: | |
853 | parts = partition(lambda f: Path(f).suffix == ".toml", filenames) | |
854 | filenames = list(parts[0]) # 1st element => predicate is False | |
855 | tomlfiles = list(parts[1]) # 2nd element => predicate is True | |
856 | elif standard_project_metadata.exists(): | |
857 | tomlfiles = [standard_project_metadata] | |
858 | return filenames, tomlfiles | |
859 | ||
860 | def parse_config_files(self, filenames=None, ignore_option_errors=False): | |
861 | """Parses configuration files from various levels | |
862 | and loads configuration. | |
863 | """ | |
864 | inifiles, tomlfiles = self._get_project_config_files(filenames) | |
865 | ||
866 | self._parse_config_files(filenames=inifiles) | |
867 | ||
868 | setupcfg.parse_configuration( | |
869 | self, self.command_options, ignore_option_errors=ignore_option_errors | |
870 | ) | |
871 | for filename in tomlfiles: | |
872 | pyprojecttoml.apply_configuration(self, filename, ignore_option_errors) | |
873 | ||
874 | self._finalize_requires() | |
875 | self._finalize_license_files() | |
876 | ||
877 | def fetch_build_eggs(self, requires): | |
878 | """Resolve pre-setup requirements""" | |
879 | resolved_dists = pkg_resources.working_set.resolve( | |
880 | _reqs.parse(requires), | |
881 | installer=self.fetch_build_egg, | |
882 | replace_conflicting=True, | |
883 | ) | |
884 | for dist in resolved_dists: | |
885 | pkg_resources.working_set.add(dist, replace=True) | |
886 | return resolved_dists | |
887 | ||
888 | def finalize_options(self): | |
889 | """ | |
890 | Allow plugins to apply arbitrary operations to the | |
891 | distribution. Each hook may optionally define a 'order' | |
892 | to influence the order of execution. Smaller numbers | |
893 | go first and the default is 0. | |
894 | """ | |
895 | group = 'setuptools.finalize_distribution_options' | |
896 | ||
897 | def by_order(hook): | |
898 | return getattr(hook, 'order', 0) | |
899 | ||
900 | defined = metadata.entry_points(group=group) | |
901 | filtered = itertools.filterfalse(self._removed, defined) | |
902 | loaded = map(lambda e: e.load(), filtered) | |
903 | for ep in sorted(loaded, key=by_order): | |
904 | ep(self) | |
905 | ||
906 | @staticmethod | |
907 | def _removed(ep): | |
908 | """ | |
909 | When removing an entry point, if metadata is loaded | |
910 | from an older version of Setuptools, that removed | |
911 | entry point will attempt to be loaded and will fail. | |
912 | See #2765 for more details. | |
913 | """ | |
914 | removed = { | |
915 | # removed 2021-09-05 | |
916 | '2to3_doctests', | |
917 | } | |
918 | return ep.name in removed | |
919 | ||
920 | def _finalize_setup_keywords(self): | |
921 | for ep in metadata.entry_points(group='distutils.setup_keywords'): | |
922 | value = getattr(self, ep.name, None) | |
923 | if value is not None: | |
924 | ep.load()(self, ep.name, value) | |
925 | ||
926 | def get_egg_cache_dir(self): | |
927 | egg_cache_dir = os.path.join(os.curdir, '.eggs') | |
928 | if not os.path.exists(egg_cache_dir): | |
929 | os.mkdir(egg_cache_dir) | |
930 | windows_support.hide_file(egg_cache_dir) | |
931 | readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') | |
932 | with open(readme_txt_filename, 'w') as f: | |
933 | f.write( | |
934 | 'This directory contains eggs that were downloaded ' | |
935 | 'by setuptools to build, test, and run plug-ins.\n\n' | |
936 | ) | |
937 | f.write( | |
938 | 'This directory caches those eggs to prevent ' | |
939 | 'repeated downloads.\n\n' | |
940 | ) | |
941 | f.write('However, it is safe to delete this directory.\n\n') | |
942 | ||
943 | return egg_cache_dir | |
944 | ||
945 | def fetch_build_egg(self, req): | |
946 | """Fetch an egg needed for building""" | |
947 | from setuptools.installer import fetch_build_egg | |
948 | ||
949 | return fetch_build_egg(self, req) | |
950 | ||
951 | def get_command_class(self, command): | |
952 | """Pluggable version of get_command_class()""" | |
953 | if command in self.cmdclass: | |
954 | return self.cmdclass[command] | |
955 | ||
956 | eps = metadata.entry_points(group='distutils.commands', name=command) | |
957 | for ep in eps: | |
958 | self.cmdclass[command] = cmdclass = ep.load() | |
959 | return cmdclass | |
960 | else: | |
961 | return _Distribution.get_command_class(self, command) | |
962 | ||
963 | def print_commands(self): | |
964 | for ep in metadata.entry_points(group='distutils.commands'): | |
965 | if ep.name not in self.cmdclass: | |
966 | cmdclass = ep.load() | |
967 | self.cmdclass[ep.name] = cmdclass | |
968 | return _Distribution.print_commands(self) | |
969 | ||
970 | def get_command_list(self): | |
971 | for ep in metadata.entry_points(group='distutils.commands'): | |
972 | if ep.name not in self.cmdclass: | |
973 | cmdclass = ep.load() | |
974 | self.cmdclass[ep.name] = cmdclass | |
975 | return _Distribution.get_command_list(self) | |
976 | ||
977 | def include(self, **attrs): | |
978 | """Add items to distribution that are named in keyword arguments | |
979 | ||
980 | For example, 'dist.include(py_modules=["x"])' would add 'x' to | |
981 | the distribution's 'py_modules' attribute, if it was not already | |
982 | there. | |
983 | ||
984 | Currently, this method only supports inclusion for attributes that are | |
985 | lists or tuples. If you need to add support for adding to other | |
986 | attributes in this or a subclass, you can add an '_include_X' method, | |
987 | where 'X' is the name of the attribute. The method will be called with | |
988 | the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' | |
989 | will try to call 'dist._include_foo({"bar":"baz"})', which can then | |
990 | handle whatever special inclusion logic is needed. | |
991 | """ | |
992 | for k, v in attrs.items(): | |
993 | include = getattr(self, '_include_' + k, None) | |
994 | if include: | |
995 | include(v) | |
996 | else: | |
997 | self._include_misc(k, v) | |
998 | ||
999 | def exclude_package(self, package): | |
1000 | """Remove packages, modules, and extensions in named package""" | |
1001 | ||
1002 | pfx = package + '.' | |
1003 | if self.packages: | |
1004 | self.packages = [ | |
1005 | p for p in self.packages if p != package and not p.startswith(pfx) | |
1006 | ] | |
1007 | ||
1008 | if self.py_modules: | |
1009 | self.py_modules = [ | |
1010 | p for p in self.py_modules if p != package and not p.startswith(pfx) | |
1011 | ] | |
1012 | ||
1013 | if self.ext_modules: | |
1014 | self.ext_modules = [ | |
1015 | p | |
1016 | for p in self.ext_modules | |
1017 | if p.name != package and not p.name.startswith(pfx) | |
1018 | ] | |
1019 | ||
1020 | def has_contents_for(self, package): | |
1021 | """Return true if 'exclude_package(package)' would do something""" | |
1022 | ||
1023 | pfx = package + '.' | |
1024 | ||
1025 | for p in self.iter_distribution_names(): | |
1026 | if p == package or p.startswith(pfx): | |
1027 | return True | |
1028 | ||
1029 | def _exclude_misc(self, name, value): | |
1030 | """Handle 'exclude()' for list/tuple attrs without a special handler""" | |
1031 | if not isinstance(value, sequence): | |
1032 | raise DistutilsSetupError( | |
1033 | "%s: setting must be a list or tuple (%r)" % (name, value) | |
1034 | ) | |
1035 | try: | |
1036 | old = getattr(self, name) | |
1037 | except AttributeError as e: | |
1038 | raise DistutilsSetupError("%s: No such distribution setting" % name) from e | |
1039 | if old is not None and not isinstance(old, sequence): | |
1040 | raise DistutilsSetupError( | |
1041 | name + ": this setting cannot be changed via include/exclude" | |
1042 | ) | |
1043 | elif old: | |
1044 | setattr(self, name, [item for item in old if item not in value]) | |
1045 | ||
1046 | def _include_misc(self, name, value): | |
1047 | """Handle 'include()' for list/tuple attrs without a special handler""" | |
1048 | ||
1049 | if not isinstance(value, sequence): | |
1050 | raise DistutilsSetupError("%s: setting must be a list (%r)" % (name, value)) | |
1051 | try: | |
1052 | old = getattr(self, name) | |
1053 | except AttributeError as e: | |
1054 | raise DistutilsSetupError("%s: No such distribution setting" % name) from e | |
1055 | if old is None: | |
1056 | setattr(self, name, value) | |
1057 | elif not isinstance(old, sequence): | |
1058 | raise DistutilsSetupError( | |
1059 | name + ": this setting cannot be changed via include/exclude" | |
1060 | ) | |
1061 | else: | |
1062 | new = [item for item in value if item not in old] | |
1063 | setattr(self, name, old + new) | |
1064 | ||
1065 | def exclude(self, **attrs): | |
1066 | """Remove items from distribution that are named in keyword arguments | |
1067 | ||
1068 | For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from | |
1069 | the distribution's 'py_modules' attribute. Excluding packages uses | |
1070 | the 'exclude_package()' method, so all of the package's contained | |
1071 | packages, modules, and extensions are also excluded. | |
1072 | ||
1073 | Currently, this method only supports exclusion from attributes that are | |
1074 | lists or tuples. If you need to add support for excluding from other | |
1075 | attributes in this or a subclass, you can add an '_exclude_X' method, | |
1076 | where 'X' is the name of the attribute. The method will be called with | |
1077 | the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' | |
1078 | will try to call 'dist._exclude_foo({"bar":"baz"})', which can then | |
1079 | handle whatever special exclusion logic is needed. | |
1080 | """ | |
1081 | for k, v in attrs.items(): | |
1082 | exclude = getattr(self, '_exclude_' + k, None) | |
1083 | if exclude: | |
1084 | exclude(v) | |
1085 | else: | |
1086 | self._exclude_misc(k, v) | |
1087 | ||
1088 | def _exclude_packages(self, packages): | |
1089 | if not isinstance(packages, sequence): | |
1090 | raise DistutilsSetupError( | |
1091 | "packages: setting must be a list or tuple (%r)" % (packages,) | |
1092 | ) | |
1093 | list(map(self.exclude_package, packages)) | |
1094 | ||
1095 | def _parse_command_opts(self, parser, args): | |
1096 | # Remove --with-X/--without-X options when processing command args | |
1097 | self.global_options = self.__class__.global_options | |
1098 | self.negative_opt = self.__class__.negative_opt | |
1099 | ||
1100 | # First, expand any aliases | |
1101 | command = args[0] | |
1102 | aliases = self.get_option_dict('aliases') | |
1103 | while command in aliases: | |
1104 | src, alias = aliases[command] | |
1105 | del aliases[command] # ensure each alias can expand only once! | |
1106 | import shlex | |
1107 | ||
1108 | args[:1] = shlex.split(alias, True) | |
1109 | command = args[0] | |
1110 | ||
1111 | nargs = _Distribution._parse_command_opts(self, parser, args) | |
1112 | ||
1113 | # Handle commands that want to consume all remaining arguments | |
1114 | cmd_class = self.get_command_class(command) | |
1115 | if getattr(cmd_class, 'command_consumes_arguments', None): | |
1116 | self.get_option_dict(command)['args'] = ("command line", nargs) | |
1117 | if nargs is not None: | |
1118 | return [] | |
1119 | ||
1120 | return nargs | |
1121 | ||
1122 | def get_cmdline_options(self): | |
1123 | """Return a '{cmd: {opt:val}}' map of all command-line options | |
1124 | ||
1125 | Option names are all long, but do not include the leading '--', and | |
1126 | contain dashes rather than underscores. If the option doesn't take | |
1127 | an argument (e.g. '--quiet'), the 'val' is 'None'. | |
1128 | ||
1129 | Note that options provided by config files are intentionally excluded. | |
1130 | """ | |
1131 | ||
1132 | d = {} | |
1133 | ||
1134 | for cmd, opts in self.command_options.items(): | |
1135 | ||
1136 | for opt, (src, val) in opts.items(): | |
1137 | ||
1138 | if src != "command line": | |
1139 | continue | |
1140 | ||
1141 | opt = opt.replace('_', '-') | |
1142 | ||
1143 | if val == 0: | |
1144 | cmdobj = self.get_command_obj(cmd) | |
1145 | neg_opt = self.negative_opt.copy() | |
1146 | neg_opt.update(getattr(cmdobj, 'negative_opt', {})) | |
1147 | for neg, pos in neg_opt.items(): | |
1148 | if pos == opt: | |
1149 | opt = neg | |
1150 | val = None | |
1151 | break | |
1152 | else: | |
1153 | raise AssertionError("Shouldn't be able to get here") | |
1154 | ||
1155 | elif val == 1: | |
1156 | val = None | |
1157 | ||
1158 | d.setdefault(cmd, {})[opt] = val | |
1159 | ||
1160 | return d | |
1161 | ||
1162 | def iter_distribution_names(self): | |
1163 | """Yield all packages, modules, and extension names in distribution""" | |
1164 | ||
1165 | for pkg in self.packages or (): | |
1166 | yield pkg | |
1167 | ||
1168 | for module in self.py_modules or (): | |
1169 | yield module | |
1170 | ||
1171 | for ext in self.ext_modules or (): | |
1172 | if isinstance(ext, tuple): | |
1173 | name, buildinfo = ext | |
1174 | else: | |
1175 | name = ext.name | |
1176 | if name.endswith('module'): | |
1177 | name = name[:-6] | |
1178 | yield name | |
1179 | ||
1180 | def handle_display_options(self, option_order): | |
1181 | """If there were any non-global "display-only" options | |
1182 | (--help-commands or the metadata display options) on the command | |
1183 | line, display the requested info and return true; else return | |
1184 | false. | |
1185 | """ | |
1186 | import sys | |
1187 | ||
1188 | if self.help_commands: | |
1189 | return _Distribution.handle_display_options(self, option_order) | |
1190 | ||
1191 | # Stdout may be StringIO (e.g. in tests) | |
1192 | if not isinstance(sys.stdout, io.TextIOWrapper): | |
1193 | return _Distribution.handle_display_options(self, option_order) | |
1194 | ||
1195 | # Don't wrap stdout if utf-8 is already the encoding. Provides | |
1196 | # workaround for #334. | |
1197 | if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): | |
1198 | return _Distribution.handle_display_options(self, option_order) | |
1199 | ||
1200 | # Print metadata in UTF-8 no matter the platform | |
1201 | encoding = sys.stdout.encoding | |
1202 | sys.stdout.reconfigure(encoding='utf-8') | |
1203 | try: | |
1204 | return _Distribution.handle_display_options(self, option_order) | |
1205 | finally: | |
1206 | sys.stdout.reconfigure(encoding=encoding) | |
1207 | ||
1208 | def run_command(self, command): | |
1209 | self.set_defaults() | |
1210 | # Postpone defaults until all explicit configuration is considered | |
1211 | # (setup() args, config files, command line and plugins) | |
1212 | ||
1213 | super().run_command(command) | |
1214 | ||
1215 | ||
1216 | class DistDeprecationWarning(SetuptoolsDeprecationWarning): | |
1217 | """Class for warning about deprecations in dist in | |
1218 | setuptools. Not ignored by default, unlike DeprecationWarning.""" |