]>
Commit | Line | Data |
---|---|---|
53e6db90 DC |
1 | # -*- coding: utf-8 -*- |
2 | import codecs | |
3 | import warnings | |
4 | import re | |
5 | from contextlib import contextmanager | |
6 | ||
7 | from parso.normalizer import Normalizer, NormalizerConfig, Issue, Rule | |
8 | from parso.python.tokenize import _get_token_collection | |
9 | ||
10 | _BLOCK_STMTS = ('if_stmt', 'while_stmt', 'for_stmt', 'try_stmt', 'with_stmt') | |
11 | _STAR_EXPR_PARENTS = ('testlist_star_expr', 'testlist_comp', 'exprlist') | |
12 | # This is the maximal block size given by python. | |
13 | _MAX_BLOCK_SIZE = 20 | |
14 | _MAX_INDENT_COUNT = 100 | |
15 | ALLOWED_FUTURES = ( | |
16 | 'nested_scopes', 'generators', 'division', 'absolute_import', | |
17 | 'with_statement', 'print_function', 'unicode_literals', 'generator_stop', | |
18 | ) | |
19 | _COMP_FOR_TYPES = ('comp_for', 'sync_comp_for') | |
20 | ||
21 | ||
22 | def _get_rhs_name(node, version): | |
23 | type_ = node.type | |
24 | if type_ == "lambdef": | |
25 | return "lambda" | |
26 | elif type_ == "atom": | |
27 | comprehension = _get_comprehension_type(node) | |
28 | first, second = node.children[:2] | |
29 | if comprehension is not None: | |
30 | return comprehension | |
31 | elif second.type == "dictorsetmaker": | |
32 | if version < (3, 8): | |
33 | return "literal" | |
34 | else: | |
35 | if second.children[1] == ":" or second.children[0] == "**": | |
36 | return "dict display" | |
37 | else: | |
38 | return "set display" | |
39 | elif ( | |
40 | first == "(" | |
41 | and (second == ")" | |
42 | or (len(node.children) == 3 and node.children[1].type == "testlist_comp")) | |
43 | ): | |
44 | return "tuple" | |
45 | elif first == "(": | |
46 | return _get_rhs_name(_remove_parens(node), version=version) | |
47 | elif first == "[": | |
48 | return "list" | |
49 | elif first == "{" and second == "}": | |
50 | return "dict display" | |
51 | elif first == "{" and len(node.children) > 2: | |
52 | return "set display" | |
53 | elif type_ == "keyword": | |
54 | if "yield" in node.value: | |
55 | return "yield expression" | |
56 | if version < (3, 8): | |
57 | return "keyword" | |
58 | else: | |
59 | return str(node.value) | |
60 | elif type_ == "operator" and node.value == "...": | |
61 | return "Ellipsis" | |
62 | elif type_ == "comparison": | |
63 | return "comparison" | |
64 | elif type_ in ("string", "number", "strings"): | |
65 | return "literal" | |
66 | elif type_ == "yield_expr": | |
67 | return "yield expression" | |
68 | elif type_ == "test": | |
69 | return "conditional expression" | |
70 | elif type_ in ("atom_expr", "power"): | |
71 | if node.children[0] == "await": | |
72 | return "await expression" | |
73 | elif node.children[-1].type == "trailer": | |
74 | trailer = node.children[-1] | |
75 | if trailer.children[0] == "(": | |
76 | return "function call" | |
77 | elif trailer.children[0] == "[": | |
78 | return "subscript" | |
79 | elif trailer.children[0] == ".": | |
80 | return "attribute" | |
81 | elif ( | |
82 | ("expr" in type_ and "star_expr" not in type_) # is a substring | |
83 | or "_test" in type_ | |
84 | or type_ in ("term", "factor") | |
85 | ): | |
86 | return "operator" | |
87 | elif type_ == "star_expr": | |
88 | return "starred" | |
89 | elif type_ == "testlist_star_expr": | |
90 | return "tuple" | |
91 | elif type_ == "fstring": | |
92 | return "f-string expression" | |
93 | return type_ # shouldn't reach here | |
94 | ||
95 | ||
96 | def _iter_stmts(scope): | |
97 | """ | |
98 | Iterates over all statements and splits up simple_stmt. | |
99 | """ | |
100 | for child in scope.children: | |
101 | if child.type == 'simple_stmt': | |
102 | for child2 in child.children: | |
103 | if child2.type == 'newline' or child2 == ';': | |
104 | continue | |
105 | yield child2 | |
106 | else: | |
107 | yield child | |
108 | ||
109 | ||
110 | def _get_comprehension_type(atom): | |
111 | first, second = atom.children[:2] | |
112 | if second.type == 'testlist_comp' and second.children[1].type in _COMP_FOR_TYPES: | |
113 | if first == '[': | |
114 | return 'list comprehension' | |
115 | else: | |
116 | return 'generator expression' | |
117 | elif second.type == 'dictorsetmaker' and second.children[-1].type in _COMP_FOR_TYPES: | |
118 | if second.children[1] == ':': | |
119 | return 'dict comprehension' | |
120 | else: | |
121 | return 'set comprehension' | |
122 | return None | |
123 | ||
124 | ||
125 | def _is_future_import(import_from): | |
126 | # It looks like a __future__ import that is relative is still a future | |
127 | # import. That feels kind of odd, but whatever. | |
128 | # if import_from.level != 0: | |
129 | # return False | |
130 | from_names = import_from.get_from_names() | |
131 | return [n.value for n in from_names] == ['__future__'] | |
132 | ||
133 | ||
134 | def _remove_parens(atom): | |
135 | """ | |
136 | Returns the inner part of an expression like `(foo)`. Also removes nested | |
137 | parens. | |
138 | """ | |
139 | try: | |
140 | children = atom.children | |
141 | except AttributeError: | |
142 | pass | |
143 | else: | |
144 | if len(children) == 3 and children[0] == '(': | |
145 | return _remove_parens(atom.children[1]) | |
146 | return atom | |
147 | ||
148 | ||
149 | def _skip_parens_bottom_up(node): | |
150 | """ | |
151 | Returns an ancestor node of an expression, skipping all levels of parens | |
152 | bottom-up. | |
153 | """ | |
154 | while node.parent is not None: | |
155 | node = node.parent | |
156 | if node.type != 'atom' or node.children[0] != '(': | |
157 | return node | |
158 | return None | |
159 | ||
160 | ||
161 | def _iter_params(parent_node): | |
162 | return (n for n in parent_node.children if n.type == 'param' or n.type == 'operator') | |
163 | ||
164 | ||
165 | def _is_future_import_first(import_from): | |
166 | """ | |
167 | Checks if the import is the first statement of a file. | |
168 | """ | |
169 | found_docstring = False | |
170 | for stmt in _iter_stmts(import_from.get_root_node()): | |
171 | if stmt.type == 'string' and not found_docstring: | |
172 | continue | |
173 | found_docstring = True | |
174 | ||
175 | if stmt == import_from: | |
176 | return True | |
177 | if stmt.type == 'import_from' and _is_future_import(stmt): | |
178 | continue | |
179 | return False | |
180 | ||
181 | ||
182 | def _iter_definition_exprs_from_lists(exprlist): | |
183 | def check_expr(child): | |
184 | if child.type == 'atom': | |
185 | if child.children[0] == '(': | |
186 | testlist_comp = child.children[1] | |
187 | if testlist_comp.type == 'testlist_comp': | |
188 | yield from _iter_definition_exprs_from_lists(testlist_comp) | |
189 | return | |
190 | else: | |
191 | # It's a paren that doesn't do anything, like 1 + (1) | |
192 | yield from check_expr(testlist_comp) | |
193 | return | |
194 | elif child.children[0] == '[': | |
195 | yield testlist_comp | |
196 | return | |
197 | yield child | |
198 | ||
199 | if exprlist.type in _STAR_EXPR_PARENTS: | |
200 | for child in exprlist.children[::2]: | |
201 | yield from check_expr(child) | |
202 | else: | |
203 | yield from check_expr(exprlist) | |
204 | ||
205 | ||
206 | def _get_expr_stmt_definition_exprs(expr_stmt): | |
207 | exprs = [] | |
208 | for list_ in expr_stmt.children[:-2:2]: | |
209 | if list_.type in ('testlist_star_expr', 'testlist'): | |
210 | exprs += _iter_definition_exprs_from_lists(list_) | |
211 | else: | |
212 | exprs.append(list_) | |
213 | return exprs | |
214 | ||
215 | ||
216 | def _get_for_stmt_definition_exprs(for_stmt): | |
217 | exprlist = for_stmt.children[1] | |
218 | return list(_iter_definition_exprs_from_lists(exprlist)) | |
219 | ||
220 | ||
221 | def _is_argument_comprehension(argument): | |
222 | return argument.children[1].type in _COMP_FOR_TYPES | |
223 | ||
224 | ||
225 | def _any_fstring_error(version, node): | |
226 | if version < (3, 9) or node is None: | |
227 | return False | |
228 | if node.type == "error_node": | |
229 | return any(child.type == "fstring_start" for child in node.children) | |
230 | elif node.type == "fstring": | |
231 | return True | |
232 | else: | |
233 | return node.search_ancestor("fstring") | |
234 | ||
235 | ||
236 | class _Context: | |
237 | def __init__(self, node, add_syntax_error, parent_context=None): | |
238 | self.node = node | |
239 | self.blocks = [] | |
240 | self.parent_context = parent_context | |
241 | self._used_name_dict = {} | |
242 | self._global_names = [] | |
243 | self._local_params_names = [] | |
244 | self._nonlocal_names = [] | |
245 | self._nonlocal_names_in_subscopes = [] | |
246 | self._add_syntax_error = add_syntax_error | |
247 | ||
248 | def is_async_funcdef(self): | |
249 | # Stupidly enough async funcdefs can have two different forms, | |
250 | # depending if a decorator is used or not. | |
251 | return self.is_function() \ | |
252 | and self.node.parent.type in ('async_funcdef', 'async_stmt') | |
253 | ||
254 | def is_function(self): | |
255 | return self.node.type == 'funcdef' | |
256 | ||
257 | def add_name(self, name): | |
258 | parent_type = name.parent.type | |
259 | if parent_type == 'trailer': | |
260 | # We are only interested in first level names. | |
261 | return | |
262 | ||
263 | if parent_type == 'global_stmt': | |
264 | self._global_names.append(name) | |
265 | elif parent_type == 'nonlocal_stmt': | |
266 | self._nonlocal_names.append(name) | |
267 | elif parent_type == 'funcdef': | |
268 | self._local_params_names.extend( | |
269 | [param.name.value for param in name.parent.get_params()] | |
270 | ) | |
271 | else: | |
272 | self._used_name_dict.setdefault(name.value, []).append(name) | |
273 | ||
274 | def finalize(self): | |
275 | """ | |
276 | Returns a list of nonlocal names that need to be part of that scope. | |
277 | """ | |
278 | self._analyze_names(self._global_names, 'global') | |
279 | self._analyze_names(self._nonlocal_names, 'nonlocal') | |
280 | ||
281 | global_name_strs = {n.value: n for n in self._global_names} | |
282 | for nonlocal_name in self._nonlocal_names: | |
283 | try: | |
284 | global_name = global_name_strs[nonlocal_name.value] | |
285 | except KeyError: | |
286 | continue | |
287 | ||
288 | message = "name '%s' is nonlocal and global" % global_name.value | |
289 | if global_name.start_pos < nonlocal_name.start_pos: | |
290 | error_name = global_name | |
291 | else: | |
292 | error_name = nonlocal_name | |
293 | self._add_syntax_error(error_name, message) | |
294 | ||
295 | nonlocals_not_handled = [] | |
296 | for nonlocal_name in self._nonlocal_names_in_subscopes: | |
297 | search = nonlocal_name.value | |
298 | if search in self._local_params_names: | |
299 | continue | |
300 | if search in global_name_strs or self.parent_context is None: | |
301 | message = "no binding for nonlocal '%s' found" % nonlocal_name.value | |
302 | self._add_syntax_error(nonlocal_name, message) | |
303 | elif not self.is_function() or \ | |
304 | nonlocal_name.value not in self._used_name_dict: | |
305 | nonlocals_not_handled.append(nonlocal_name) | |
306 | return self._nonlocal_names + nonlocals_not_handled | |
307 | ||
308 | def _analyze_names(self, globals_or_nonlocals, type_): | |
309 | def raise_(message): | |
310 | self._add_syntax_error(base_name, message % (base_name.value, type_)) | |
311 | ||
312 | params = [] | |
313 | if self.node.type == 'funcdef': | |
314 | params = self.node.get_params() | |
315 | ||
316 | for base_name in globals_or_nonlocals: | |
317 | found_global_or_nonlocal = False | |
318 | # Somehow Python does it the reversed way. | |
319 | for name in reversed(self._used_name_dict.get(base_name.value, [])): | |
320 | if name.start_pos > base_name.start_pos: | |
321 | # All following names don't have to be checked. | |
322 | found_global_or_nonlocal = True | |
323 | ||
324 | parent = name.parent | |
325 | if parent.type == 'param' and parent.name == name: | |
326 | # Skip those here, these definitions belong to the next | |
327 | # scope. | |
328 | continue | |
329 | ||
330 | if name.is_definition(): | |
331 | if parent.type == 'expr_stmt' \ | |
332 | and parent.children[1].type == 'annassign': | |
333 | if found_global_or_nonlocal: | |
334 | # If it's after the global the error seems to be | |
335 | # placed there. | |
336 | base_name = name | |
337 | raise_("annotated name '%s' can't be %s") | |
338 | break | |
339 | else: | |
340 | message = "name '%s' is assigned to before %s declaration" | |
341 | else: | |
342 | message = "name '%s' is used prior to %s declaration" | |
343 | ||
344 | if not found_global_or_nonlocal: | |
345 | raise_(message) | |
346 | # Only add an error for the first occurence. | |
347 | break | |
348 | ||
349 | for param in params: | |
350 | if param.name.value == base_name.value: | |
351 | raise_("name '%s' is parameter and %s"), | |
352 | ||
353 | @contextmanager | |
354 | def add_block(self, node): | |
355 | self.blocks.append(node) | |
356 | yield | |
357 | self.blocks.pop() | |
358 | ||
359 | def add_context(self, node): | |
360 | return _Context(node, self._add_syntax_error, parent_context=self) | |
361 | ||
362 | def close_child_context(self, child_context): | |
363 | self._nonlocal_names_in_subscopes += child_context.finalize() | |
364 | ||
365 | ||
366 | class ErrorFinder(Normalizer): | |
367 | """ | |
368 | Searches for errors in the syntax tree. | |
369 | """ | |
370 | def __init__(self, *args, **kwargs): | |
371 | super().__init__(*args, **kwargs) | |
372 | self._error_dict = {} | |
373 | self.version = self.grammar.version_info | |
374 | ||
375 | def initialize(self, node): | |
376 | def create_context(node): | |
377 | if node is None: | |
378 | return None | |
379 | ||
380 | parent_context = create_context(node.parent) | |
381 | if node.type in ('classdef', 'funcdef', 'file_input'): | |
382 | return _Context(node, self._add_syntax_error, parent_context) | |
383 | return parent_context | |
384 | ||
385 | self.context = create_context(node) or _Context(node, self._add_syntax_error) | |
386 | self._indentation_count = 0 | |
387 | ||
388 | def visit(self, node): | |
389 | if node.type == 'error_node': | |
390 | with self.visit_node(node): | |
391 | # Don't need to investigate the inners of an error node. We | |
392 | # might find errors in there that should be ignored, because | |
393 | # the error node itself already shows that there's an issue. | |
394 | return '' | |
395 | return super().visit(node) | |
396 | ||
397 | @contextmanager | |
398 | def visit_node(self, node): | |
399 | self._check_type_rules(node) | |
400 | ||
401 | if node.type in _BLOCK_STMTS: | |
402 | with self.context.add_block(node): | |
403 | if len(self.context.blocks) == _MAX_BLOCK_SIZE: | |
404 | self._add_syntax_error(node, "too many statically nested blocks") | |
405 | yield | |
406 | return | |
407 | elif node.type == 'suite': | |
408 | self._indentation_count += 1 | |
409 | if self._indentation_count == _MAX_INDENT_COUNT: | |
410 | self._add_indentation_error(node.children[1], "too many levels of indentation") | |
411 | ||
412 | yield | |
413 | ||
414 | if node.type == 'suite': | |
415 | self._indentation_count -= 1 | |
416 | elif node.type in ('classdef', 'funcdef'): | |
417 | context = self.context | |
418 | self.context = context.parent_context | |
419 | self.context.close_child_context(context) | |
420 | ||
421 | def visit_leaf(self, leaf): | |
422 | if leaf.type == 'error_leaf': | |
423 | if leaf.token_type in ('INDENT', 'ERROR_DEDENT'): | |
424 | # Indents/Dedents itself never have a prefix. They are just | |
425 | # "pseudo" tokens that get removed by the syntax tree later. | |
426 | # Therefore in case of an error we also have to check for this. | |
427 | spacing = list(leaf.get_next_leaf()._split_prefix())[-1] | |
428 | if leaf.token_type == 'INDENT': | |
429 | message = 'unexpected indent' | |
430 | else: | |
431 | message = 'unindent does not match any outer indentation level' | |
432 | self._add_indentation_error(spacing, message) | |
433 | else: | |
434 | if leaf.value.startswith('\\'): | |
435 | message = 'unexpected character after line continuation character' | |
436 | else: | |
437 | match = re.match('\\w{,2}("{1,3}|\'{1,3})', leaf.value) | |
438 | if match is None: | |
439 | message = 'invalid syntax' | |
440 | if ( | |
441 | self.version >= (3, 9) | |
442 | and leaf.value in _get_token_collection( | |
443 | self.version | |
444 | ).always_break_tokens | |
445 | ): | |
446 | message = "f-string: " + message | |
447 | else: | |
448 | if len(match.group(1)) == 1: | |
449 | message = 'EOL while scanning string literal' | |
450 | else: | |
451 | message = 'EOF while scanning triple-quoted string literal' | |
452 | self._add_syntax_error(leaf, message) | |
453 | return '' | |
454 | elif leaf.value == ':': | |
455 | parent = leaf.parent | |
456 | if parent.type in ('classdef', 'funcdef'): | |
457 | self.context = self.context.add_context(parent) | |
458 | ||
459 | # The rest is rule based. | |
460 | return super().visit_leaf(leaf) | |
461 | ||
462 | def _add_indentation_error(self, spacing, message): | |
463 | self.add_issue(spacing, 903, "IndentationError: " + message) | |
464 | ||
465 | def _add_syntax_error(self, node, message): | |
466 | self.add_issue(node, 901, "SyntaxError: " + message) | |
467 | ||
468 | def add_issue(self, node, code, message): | |
469 | # Overwrite the default behavior. | |
470 | # Check if the issues are on the same line. | |
471 | line = node.start_pos[0] | |
472 | args = (code, message, node) | |
473 | self._error_dict.setdefault(line, args) | |
474 | ||
475 | def finalize(self): | |
476 | self.context.finalize() | |
477 | ||
478 | for code, message, node in self._error_dict.values(): | |
479 | self.issues.append(Issue(node, code, message)) | |
480 | ||
481 | ||
482 | class IndentationRule(Rule): | |
483 | code = 903 | |
484 | ||
485 | def _get_message(self, message, node): | |
486 | message = super()._get_message(message, node) | |
487 | return "IndentationError: " + message | |
488 | ||
489 | ||
490 | @ErrorFinder.register_rule(type='error_node') | |
491 | class _ExpectIndentedBlock(IndentationRule): | |
492 | message = 'expected an indented block' | |
493 | ||
494 | def get_node(self, node): | |
495 | leaf = node.get_next_leaf() | |
496 | return list(leaf._split_prefix())[-1] | |
497 | ||
498 | def is_issue(self, node): | |
499 | # This is the beginning of a suite that is not indented. | |
500 | return node.children[-1].type == 'newline' | |
501 | ||
502 | ||
503 | class ErrorFinderConfig(NormalizerConfig): | |
504 | normalizer_class = ErrorFinder | |
505 | ||
506 | ||
507 | class SyntaxRule(Rule): | |
508 | code = 901 | |
509 | ||
510 | def _get_message(self, message, node): | |
511 | message = super()._get_message(message, node) | |
512 | if ( | |
513 | "f-string" not in message | |
514 | and _any_fstring_error(self._normalizer.version, node) | |
515 | ): | |
516 | message = "f-string: " + message | |
517 | return "SyntaxError: " + message | |
518 | ||
519 | ||
520 | @ErrorFinder.register_rule(type='error_node') | |
521 | class _InvalidSyntaxRule(SyntaxRule): | |
522 | message = "invalid syntax" | |
523 | fstring_message = "f-string: invalid syntax" | |
524 | ||
525 | def get_node(self, node): | |
526 | return node.get_next_leaf() | |
527 | ||
528 | def is_issue(self, node): | |
529 | error = node.get_next_leaf().type != 'error_leaf' | |
530 | if ( | |
531 | error | |
532 | and _any_fstring_error(self._normalizer.version, node) | |
533 | ): | |
534 | self.add_issue(node, message=self.fstring_message) | |
535 | else: | |
536 | # Error leafs will be added later as an error. | |
537 | return error | |
538 | ||
539 | ||
540 | @ErrorFinder.register_rule(value='await') | |
541 | class _AwaitOutsideAsync(SyntaxRule): | |
542 | message = "'await' outside async function" | |
543 | ||
544 | def is_issue(self, leaf): | |
545 | return not self._normalizer.context.is_async_funcdef() | |
546 | ||
547 | def get_error_node(self, node): | |
548 | # Return the whole await statement. | |
549 | return node.parent | |
550 | ||
551 | ||
552 | @ErrorFinder.register_rule(value='break') | |
553 | class _BreakOutsideLoop(SyntaxRule): | |
554 | message = "'break' outside loop" | |
555 | ||
556 | def is_issue(self, leaf): | |
557 | in_loop = False | |
558 | for block in self._normalizer.context.blocks: | |
559 | if block.type in ('for_stmt', 'while_stmt'): | |
560 | in_loop = True | |
561 | return not in_loop | |
562 | ||
563 | ||
564 | @ErrorFinder.register_rule(value='continue') | |
565 | class _ContinueChecks(SyntaxRule): | |
566 | message = "'continue' not properly in loop" | |
567 | message_in_finally = "'continue' not supported inside 'finally' clause" | |
568 | ||
569 | def is_issue(self, leaf): | |
570 | in_loop = False | |
571 | for block in self._normalizer.context.blocks: | |
572 | if block.type in ('for_stmt', 'while_stmt'): | |
573 | in_loop = True | |
574 | if block.type == 'try_stmt': | |
575 | last_block = block.children[-3] | |
576 | if ( | |
577 | last_block == "finally" | |
578 | and leaf.start_pos > last_block.start_pos | |
579 | and self._normalizer.version < (3, 8) | |
580 | ): | |
581 | self.add_issue(leaf, message=self.message_in_finally) | |
582 | return False # Error already added | |
583 | if not in_loop: | |
584 | return True | |
585 | ||
586 | ||
587 | @ErrorFinder.register_rule(value='from') | |
588 | class _YieldFromCheck(SyntaxRule): | |
589 | message = "'yield from' inside async function" | |
590 | ||
591 | def get_node(self, leaf): | |
592 | return leaf.parent.parent # This is the actual yield statement. | |
593 | ||
594 | def is_issue(self, leaf): | |
595 | return leaf.parent.type == 'yield_arg' \ | |
596 | and self._normalizer.context.is_async_funcdef() | |
597 | ||
598 | ||
599 | @ErrorFinder.register_rule(type='name') | |
600 | class _NameChecks(SyntaxRule): | |
601 | message = 'cannot assign to __debug__' | |
602 | message_none = 'cannot assign to None' | |
603 | ||
604 | def is_issue(self, leaf): | |
605 | self._normalizer.context.add_name(leaf) | |
606 | ||
607 | if leaf.value == '__debug__' and leaf.is_definition(): | |
608 | return True | |
609 | ||
610 | ||
611 | @ErrorFinder.register_rule(type='string') | |
612 | class _StringChecks(SyntaxRule): | |
613 | message = "bytes can only contain ASCII literal characters." | |
614 | ||
615 | def is_issue(self, leaf): | |
616 | string_prefix = leaf.string_prefix.lower() | |
617 | if 'b' in string_prefix \ | |
618 | and any(c for c in leaf.value if ord(c) > 127): | |
619 | # b'ä' | |
620 | return True | |
621 | ||
622 | if 'r' not in string_prefix: | |
623 | # Raw strings don't need to be checked if they have proper | |
624 | # escaping. | |
625 | ||
626 | payload = leaf._get_payload() | |
627 | if 'b' in string_prefix: | |
628 | payload = payload.encode('utf-8') | |
629 | func = codecs.escape_decode | |
630 | else: | |
631 | func = codecs.unicode_escape_decode | |
632 | ||
633 | try: | |
634 | with warnings.catch_warnings(): | |
635 | # The warnings from parsing strings are not relevant. | |
636 | warnings.filterwarnings('ignore') | |
637 | func(payload) | |
638 | except UnicodeDecodeError as e: | |
639 | self.add_issue(leaf, message='(unicode error) ' + str(e)) | |
640 | except ValueError as e: | |
641 | self.add_issue(leaf, message='(value error) ' + str(e)) | |
642 | ||
643 | ||
644 | @ErrorFinder.register_rule(value='*') | |
645 | class _StarCheck(SyntaxRule): | |
646 | message = "named arguments must follow bare *" | |
647 | ||
648 | def is_issue(self, leaf): | |
649 | params = leaf.parent | |
650 | if params.type == 'parameters' and params: | |
651 | after = params.children[params.children.index(leaf) + 1:] | |
652 | after = [child for child in after | |
653 | if child not in (',', ')') and not child.star_count] | |
654 | return len(after) == 0 | |
655 | ||
656 | ||
657 | @ErrorFinder.register_rule(value='**') | |
658 | class _StarStarCheck(SyntaxRule): | |
659 | # e.g. {**{} for a in [1]} | |
660 | # TODO this should probably get a better end_pos including | |
661 | # the next sibling of leaf. | |
662 | message = "dict unpacking cannot be used in dict comprehension" | |
663 | ||
664 | def is_issue(self, leaf): | |
665 | if leaf.parent.type == 'dictorsetmaker': | |
666 | comp_for = leaf.get_next_sibling().get_next_sibling() | |
667 | return comp_for is not None and comp_for.type in _COMP_FOR_TYPES | |
668 | ||
669 | ||
670 | @ErrorFinder.register_rule(value='yield') | |
671 | @ErrorFinder.register_rule(value='return') | |
672 | class _ReturnAndYieldChecks(SyntaxRule): | |
673 | message = "'return' with value in async generator" | |
674 | message_async_yield = "'yield' inside async function" | |
675 | ||
676 | def get_node(self, leaf): | |
677 | return leaf.parent | |
678 | ||
679 | def is_issue(self, leaf): | |
680 | if self._normalizer.context.node.type != 'funcdef': | |
681 | self.add_issue(self.get_node(leaf), message="'%s' outside function" % leaf.value) | |
682 | elif self._normalizer.context.is_async_funcdef() \ | |
683 | and any(self._normalizer.context.node.iter_yield_exprs()): | |
684 | if leaf.value == 'return' and leaf.parent.type == 'return_stmt': | |
685 | return True | |
686 | ||
687 | ||
688 | @ErrorFinder.register_rule(type='strings') | |
689 | class _BytesAndStringMix(SyntaxRule): | |
690 | # e.g. 's' b'' | |
691 | message = "cannot mix bytes and nonbytes literals" | |
692 | ||
693 | def _is_bytes_literal(self, string): | |
694 | if string.type == 'fstring': | |
695 | return False | |
696 | return 'b' in string.string_prefix.lower() | |
697 | ||
698 | def is_issue(self, node): | |
699 | first = node.children[0] | |
700 | first_is_bytes = self._is_bytes_literal(first) | |
701 | for string in node.children[1:]: | |
702 | if first_is_bytes != self._is_bytes_literal(string): | |
703 | return True | |
704 | ||
705 | ||
706 | @ErrorFinder.register_rule(type='import_as_names') | |
707 | class _TrailingImportComma(SyntaxRule): | |
708 | # e.g. from foo import a, | |
709 | message = "trailing comma not allowed without surrounding parentheses" | |
710 | ||
711 | def is_issue(self, node): | |
712 | if node.children[-1] == ',' and node.parent.children[-1] != ')': | |
713 | return True | |
714 | ||
715 | ||
716 | @ErrorFinder.register_rule(type='import_from') | |
717 | class _ImportStarInFunction(SyntaxRule): | |
718 | message = "import * only allowed at module level" | |
719 | ||
720 | def is_issue(self, node): | |
721 | return node.is_star_import() and self._normalizer.context.parent_context is not None | |
722 | ||
723 | ||
724 | @ErrorFinder.register_rule(type='import_from') | |
725 | class _FutureImportRule(SyntaxRule): | |
726 | message = "from __future__ imports must occur at the beginning of the file" | |
727 | ||
728 | def is_issue(self, node): | |
729 | if _is_future_import(node): | |
730 | if not _is_future_import_first(node): | |
731 | return True | |
732 | ||
733 | for from_name, future_name in node.get_paths(): | |
734 | name = future_name.value | |
735 | allowed_futures = list(ALLOWED_FUTURES) | |
736 | if self._normalizer.version >= (3, 7): | |
737 | allowed_futures.append('annotations') | |
738 | if name == 'braces': | |
739 | self.add_issue(node, message="not a chance") | |
740 | elif name == 'barry_as_FLUFL': | |
741 | m = "Seriously I'm not implementing this :) ~ Dave" | |
742 | self.add_issue(node, message=m) | |
743 | elif name not in allowed_futures: | |
744 | message = "future feature %s is not defined" % name | |
745 | self.add_issue(node, message=message) | |
746 | ||
747 | ||
748 | @ErrorFinder.register_rule(type='star_expr') | |
749 | class _StarExprRule(SyntaxRule): | |
750 | message_iterable_unpacking = "iterable unpacking cannot be used in comprehension" | |
751 | ||
752 | def is_issue(self, node): | |
753 | def check_delete_starred(node): | |
754 | while node.parent is not None: | |
755 | node = node.parent | |
756 | if node.type == 'del_stmt': | |
757 | return True | |
758 | if node.type not in (*_STAR_EXPR_PARENTS, 'atom'): | |
759 | return False | |
760 | return False | |
761 | ||
762 | if self._normalizer.version >= (3, 9): | |
763 | ancestor = node.parent | |
764 | else: | |
765 | ancestor = _skip_parens_bottom_up(node) | |
766 | # starred expression not in tuple/list/set | |
767 | if ancestor.type not in (*_STAR_EXPR_PARENTS, 'dictorsetmaker') \ | |
768 | and not (ancestor.type == 'atom' and ancestor.children[0] != '('): | |
769 | self.add_issue(node, message="can't use starred expression here") | |
770 | return | |
771 | ||
772 | if check_delete_starred(node): | |
773 | if self._normalizer.version >= (3, 9): | |
774 | self.add_issue(node, message="cannot delete starred") | |
775 | else: | |
776 | self.add_issue(node, message="can't use starred expression here") | |
777 | return | |
778 | ||
779 | if node.parent.type == 'testlist_comp': | |
780 | # [*[] for a in [1]] | |
781 | if node.parent.children[1].type in _COMP_FOR_TYPES: | |
782 | self.add_issue(node, message=self.message_iterable_unpacking) | |
783 | ||
784 | ||
785 | @ErrorFinder.register_rule(types=_STAR_EXPR_PARENTS) | |
786 | class _StarExprParentRule(SyntaxRule): | |
787 | def is_issue(self, node): | |
788 | def is_definition(node, ancestor): | |
789 | if ancestor is None: | |
790 | return False | |
791 | ||
792 | type_ = ancestor.type | |
793 | if type_ == 'trailer': | |
794 | return False | |
795 | ||
796 | if type_ == 'expr_stmt': | |
797 | return node.start_pos < ancestor.children[-1].start_pos | |
798 | ||
799 | return is_definition(node, ancestor.parent) | |
800 | ||
801 | if is_definition(node, node.parent): | |
802 | args = [c for c in node.children if c != ','] | |
803 | starred = [c for c in args if c.type == 'star_expr'] | |
804 | if len(starred) > 1: | |
805 | if self._normalizer.version < (3, 9): | |
806 | message = "two starred expressions in assignment" | |
807 | else: | |
808 | message = "multiple starred expressions in assignment" | |
809 | self.add_issue(starred[1], message=message) | |
810 | elif starred: | |
811 | count = args.index(starred[0]) | |
812 | if count >= 256: | |
813 | message = "too many expressions in star-unpacking assignment" | |
814 | self.add_issue(starred[0], message=message) | |
815 | ||
816 | ||
817 | @ErrorFinder.register_rule(type='annassign') | |
818 | class _AnnotatorRule(SyntaxRule): | |
819 | # True: int | |
820 | # {}: float | |
821 | message = "illegal target for annotation" | |
822 | ||
823 | def get_node(self, node): | |
824 | return node.parent | |
825 | ||
826 | def is_issue(self, node): | |
827 | type_ = None | |
828 | lhs = node.parent.children[0] | |
829 | lhs = _remove_parens(lhs) | |
830 | try: | |
831 | children = lhs.children | |
832 | except AttributeError: | |
833 | pass | |
834 | else: | |
835 | if ',' in children or lhs.type == 'atom' and children[0] == '(': | |
836 | type_ = 'tuple' | |
837 | elif lhs.type == 'atom' and children[0] == '[': | |
838 | type_ = 'list' | |
839 | trailer = children[-1] | |
840 | ||
841 | if type_ is None: | |
842 | if not (lhs.type == 'name' | |
843 | # subscript/attributes are allowed | |
844 | or lhs.type in ('atom_expr', 'power') | |
845 | and trailer.type == 'trailer' | |
846 | and trailer.children[0] != '('): | |
847 | return True | |
848 | else: | |
849 | # x, y: str | |
850 | message = "only single target (not %s) can be annotated" | |
851 | self.add_issue(lhs.parent, message=message % type_) | |
852 | ||
853 | ||
854 | @ErrorFinder.register_rule(type='argument') | |
855 | class _ArgumentRule(SyntaxRule): | |
856 | def is_issue(self, node): | |
857 | first = node.children[0] | |
858 | if self._normalizer.version < (3, 8): | |
859 | # a((b)=c) is valid in <3.8 | |
860 | first = _remove_parens(first) | |
861 | if node.children[1] == '=' and first.type != 'name': | |
862 | if first.type == 'lambdef': | |
863 | # f(lambda: 1=1) | |
864 | if self._normalizer.version < (3, 8): | |
865 | message = "lambda cannot contain assignment" | |
866 | else: | |
867 | message = 'expression cannot contain assignment, perhaps you meant "=="?' | |
868 | else: | |
869 | # f(+x=1) | |
870 | if self._normalizer.version < (3, 8): | |
871 | message = "keyword can't be an expression" | |
872 | else: | |
873 | message = 'expression cannot contain assignment, perhaps you meant "=="?' | |
874 | self.add_issue(first, message=message) | |
875 | ||
876 | if _is_argument_comprehension(node) and node.parent.type == 'classdef': | |
877 | self.add_issue(node, message='invalid syntax') | |
878 | ||
879 | ||
880 | @ErrorFinder.register_rule(type='nonlocal_stmt') | |
881 | class _NonlocalModuleLevelRule(SyntaxRule): | |
882 | message = "nonlocal declaration not allowed at module level" | |
883 | ||
884 | def is_issue(self, node): | |
885 | return self._normalizer.context.parent_context is None | |
886 | ||
887 | ||
888 | @ErrorFinder.register_rule(type='arglist') | |
889 | class _ArglistRule(SyntaxRule): | |
890 | @property | |
891 | def message(self): | |
892 | if self._normalizer.version < (3, 7): | |
893 | return "Generator expression must be parenthesized if not sole argument" | |
894 | else: | |
895 | return "Generator expression must be parenthesized" | |
896 | ||
897 | def is_issue(self, node): | |
898 | arg_set = set() | |
899 | kw_only = False | |
900 | kw_unpacking_only = False | |
901 | for argument in node.children: | |
902 | if argument == ',': | |
903 | continue | |
904 | ||
905 | if argument.type == 'argument': | |
906 | first = argument.children[0] | |
907 | if _is_argument_comprehension(argument) and len(node.children) >= 2: | |
908 | # a(a, b for b in c) | |
909 | return True | |
910 | ||
911 | if first in ('*', '**'): | |
912 | if first == '*': | |
913 | if kw_unpacking_only: | |
914 | # foo(**kwargs, *args) | |
915 | message = "iterable argument unpacking " \ | |
916 | "follows keyword argument unpacking" | |
917 | self.add_issue(argument, message=message) | |
918 | else: | |
919 | kw_unpacking_only = True | |
920 | else: # Is a keyword argument. | |
921 | kw_only = True | |
922 | if first.type == 'name': | |
923 | if first.value in arg_set: | |
924 | # f(x=1, x=2) | |
925 | message = "keyword argument repeated" | |
926 | if self._normalizer.version >= (3, 9): | |
927 | message += ": {}".format(first.value) | |
928 | self.add_issue(first, message=message) | |
929 | else: | |
930 | arg_set.add(first.value) | |
931 | else: | |
932 | if kw_unpacking_only: | |
933 | # f(**x, y) | |
934 | message = "positional argument follows keyword argument unpacking" | |
935 | self.add_issue(argument, message=message) | |
936 | elif kw_only: | |
937 | # f(x=2, y) | |
938 | message = "positional argument follows keyword argument" | |
939 | self.add_issue(argument, message=message) | |
940 | ||
941 | ||
942 | @ErrorFinder.register_rule(type='parameters') | |
943 | @ErrorFinder.register_rule(type='lambdef') | |
944 | class _ParameterRule(SyntaxRule): | |
945 | # def f(x=3, y): pass | |
946 | message = "non-default argument follows default argument" | |
947 | ||
948 | def is_issue(self, node): | |
949 | param_names = set() | |
950 | default_only = False | |
951 | star_seen = False | |
952 | for p in _iter_params(node): | |
953 | if p.type == 'operator': | |
954 | if p.value == '*': | |
955 | star_seen = True | |
956 | default_only = False | |
957 | continue | |
958 | ||
959 | if p.name.value in param_names: | |
960 | message = "duplicate argument '%s' in function definition" | |
961 | self.add_issue(p.name, message=message % p.name.value) | |
962 | param_names.add(p.name.value) | |
963 | ||
964 | if not star_seen: | |
965 | if p.default is None and not p.star_count: | |
966 | if default_only: | |
967 | return True | |
968 | elif p.star_count: | |
969 | star_seen = True | |
970 | default_only = False | |
971 | else: | |
972 | default_only = True | |
973 | ||
974 | ||
975 | @ErrorFinder.register_rule(type='try_stmt') | |
976 | class _TryStmtRule(SyntaxRule): | |
977 | message = "default 'except:' must be last" | |
978 | ||
979 | def is_issue(self, try_stmt): | |
980 | default_except = None | |
981 | for except_clause in try_stmt.children[3::3]: | |
982 | if except_clause in ('else', 'finally'): | |
983 | break | |
984 | if except_clause == 'except': | |
985 | default_except = except_clause | |
986 | elif default_except is not None: | |
987 | self.add_issue(default_except, message=self.message) | |
988 | ||
989 | ||
990 | @ErrorFinder.register_rule(type='fstring') | |
991 | class _FStringRule(SyntaxRule): | |
992 | _fstring_grammar = None | |
993 | message_expr = "f-string expression part cannot include a backslash" | |
994 | message_nested = "f-string: expressions nested too deeply" | |
995 | message_conversion = "f-string: invalid conversion character: expected 's', 'r', or 'a'" | |
996 | ||
997 | def _check_format_spec(self, format_spec, depth): | |
998 | self._check_fstring_contents(format_spec.children[1:], depth) | |
999 | ||
1000 | def _check_fstring_expr(self, fstring_expr, depth): | |
1001 | if depth >= 2: | |
1002 | self.add_issue(fstring_expr, message=self.message_nested) | |
1003 | ||
1004 | expr = fstring_expr.children[1] | |
1005 | if '\\' in expr.get_code(): | |
1006 | self.add_issue(expr, message=self.message_expr) | |
1007 | ||
1008 | children_2 = fstring_expr.children[2] | |
1009 | if children_2.type == 'operator' and children_2.value == '=': | |
1010 | conversion = fstring_expr.children[3] | |
1011 | else: | |
1012 | conversion = children_2 | |
1013 | if conversion.type == 'fstring_conversion': | |
1014 | name = conversion.children[1] | |
1015 | if name.value not in ('s', 'r', 'a'): | |
1016 | self.add_issue(name, message=self.message_conversion) | |
1017 | ||
1018 | format_spec = fstring_expr.children[-2] | |
1019 | if format_spec.type == 'fstring_format_spec': | |
1020 | self._check_format_spec(format_spec, depth + 1) | |
1021 | ||
1022 | def is_issue(self, fstring): | |
1023 | self._check_fstring_contents(fstring.children[1:-1]) | |
1024 | ||
1025 | def _check_fstring_contents(self, children, depth=0): | |
1026 | for fstring_content in children: | |
1027 | if fstring_content.type == 'fstring_expr': | |
1028 | self._check_fstring_expr(fstring_content, depth) | |
1029 | ||
1030 | ||
1031 | class _CheckAssignmentRule(SyntaxRule): | |
1032 | def _check_assignment(self, node, is_deletion=False, is_namedexpr=False, is_aug_assign=False): | |
1033 | error = None | |
1034 | type_ = node.type | |
1035 | if type_ == 'lambdef': | |
1036 | error = 'lambda' | |
1037 | elif type_ == 'atom': | |
1038 | first, second = node.children[:2] | |
1039 | error = _get_comprehension_type(node) | |
1040 | if error is None: | |
1041 | if second.type == 'dictorsetmaker': | |
1042 | if self._normalizer.version < (3, 8): | |
1043 | error = 'literal' | |
1044 | else: | |
1045 | if second.children[1] == ':': | |
1046 | error = 'dict display' | |
1047 | else: | |
1048 | error = 'set display' | |
1049 | elif first == "{" and second == "}": | |
1050 | if self._normalizer.version < (3, 8): | |
1051 | error = 'literal' | |
1052 | else: | |
1053 | error = "dict display" | |
1054 | elif first == "{" and len(node.children) > 2: | |
1055 | if self._normalizer.version < (3, 8): | |
1056 | error = 'literal' | |
1057 | else: | |
1058 | error = "set display" | |
1059 | elif first in ('(', '['): | |
1060 | if second.type == 'yield_expr': | |
1061 | error = 'yield expression' | |
1062 | elif second.type == 'testlist_comp': | |
1063 | # ([a, b] := [1, 2]) | |
1064 | # ((a, b) := [1, 2]) | |
1065 | if is_namedexpr: | |
1066 | if first == '(': | |
1067 | error = 'tuple' | |
1068 | elif first == '[': | |
1069 | error = 'list' | |
1070 | ||
1071 | # This is not a comprehension, they were handled | |
1072 | # further above. | |
1073 | for child in second.children[::2]: | |
1074 | self._check_assignment(child, is_deletion, is_namedexpr, is_aug_assign) | |
1075 | else: # Everything handled, must be useless brackets. | |
1076 | self._check_assignment(second, is_deletion, is_namedexpr, is_aug_assign) | |
1077 | elif type_ == 'keyword': | |
1078 | if node.value == "yield": | |
1079 | error = "yield expression" | |
1080 | elif self._normalizer.version < (3, 8): | |
1081 | error = 'keyword' | |
1082 | else: | |
1083 | error = str(node.value) | |
1084 | elif type_ == 'operator': | |
1085 | if node.value == '...': | |
1086 | error = 'Ellipsis' | |
1087 | elif type_ == 'comparison': | |
1088 | error = 'comparison' | |
1089 | elif type_ in ('string', 'number', 'strings'): | |
1090 | error = 'literal' | |
1091 | elif type_ == 'yield_expr': | |
1092 | # This one seems to be a slightly different warning in Python. | |
1093 | message = 'assignment to yield expression not possible' | |
1094 | self.add_issue(node, message=message) | |
1095 | elif type_ == 'test': | |
1096 | error = 'conditional expression' | |
1097 | elif type_ in ('atom_expr', 'power'): | |
1098 | if node.children[0] == 'await': | |
1099 | error = 'await expression' | |
1100 | elif node.children[-2] == '**': | |
1101 | error = 'operator' | |
1102 | else: | |
1103 | # Has a trailer | |
1104 | trailer = node.children[-1] | |
1105 | assert trailer.type == 'trailer' | |
1106 | if trailer.children[0] == '(': | |
1107 | error = 'function call' | |
1108 | elif is_namedexpr and trailer.children[0] == '[': | |
1109 | error = 'subscript' | |
1110 | elif is_namedexpr and trailer.children[0] == '.': | |
1111 | error = 'attribute' | |
1112 | elif type_ == "fstring": | |
1113 | if self._normalizer.version < (3, 8): | |
1114 | error = 'literal' | |
1115 | else: | |
1116 | error = "f-string expression" | |
1117 | elif type_ in ('testlist_star_expr', 'exprlist', 'testlist'): | |
1118 | for child in node.children[::2]: | |
1119 | self._check_assignment(child, is_deletion, is_namedexpr, is_aug_assign) | |
1120 | elif ('expr' in type_ and type_ != 'star_expr' # is a substring | |
1121 | or '_test' in type_ | |
1122 | or type_ in ('term', 'factor')): | |
1123 | error = 'operator' | |
1124 | elif type_ == "star_expr": | |
1125 | if is_deletion: | |
1126 | if self._normalizer.version >= (3, 9): | |
1127 | error = "starred" | |
1128 | else: | |
1129 | self.add_issue(node, message="can't use starred expression here") | |
1130 | else: | |
1131 | if self._normalizer.version >= (3, 9): | |
1132 | ancestor = node.parent | |
1133 | else: | |
1134 | ancestor = _skip_parens_bottom_up(node) | |
1135 | if ancestor.type not in _STAR_EXPR_PARENTS and not is_aug_assign \ | |
1136 | and not (ancestor.type == 'atom' and ancestor.children[0] == '['): | |
1137 | message = "starred assignment target must be in a list or tuple" | |
1138 | self.add_issue(node, message=message) | |
1139 | ||
1140 | self._check_assignment(node.children[1]) | |
1141 | ||
1142 | if error is not None: | |
1143 | if is_namedexpr: | |
1144 | message = 'cannot use assignment expressions with %s' % error | |
1145 | else: | |
1146 | cannot = "can't" if self._normalizer.version < (3, 8) else "cannot" | |
1147 | message = ' '.join([cannot, "delete" if is_deletion else "assign to", error]) | |
1148 | self.add_issue(node, message=message) | |
1149 | ||
1150 | ||
1151 | @ErrorFinder.register_rule(type='sync_comp_for') | |
1152 | class _CompForRule(_CheckAssignmentRule): | |
1153 | message = "asynchronous comprehension outside of an asynchronous function" | |
1154 | ||
1155 | def is_issue(self, node): | |
1156 | expr_list = node.children[1] | |
1157 | if expr_list.type != 'expr_list': # Already handled. | |
1158 | self._check_assignment(expr_list) | |
1159 | ||
1160 | return node.parent.children[0] == 'async' \ | |
1161 | and not self._normalizer.context.is_async_funcdef() | |
1162 | ||
1163 | ||
1164 | @ErrorFinder.register_rule(type='expr_stmt') | |
1165 | class _ExprStmtRule(_CheckAssignmentRule): | |
1166 | message = "illegal expression for augmented assignment" | |
1167 | extended_message = "'{target}' is an " + message | |
1168 | ||
1169 | def is_issue(self, node): | |
1170 | augassign = node.children[1] | |
1171 | is_aug_assign = augassign != '=' and augassign.type != 'annassign' | |
1172 | ||
1173 | if self._normalizer.version <= (3, 8) or not is_aug_assign: | |
1174 | for before_equal in node.children[:-2:2]: | |
1175 | self._check_assignment(before_equal, is_aug_assign=is_aug_assign) | |
1176 | ||
1177 | if is_aug_assign: | |
1178 | target = _remove_parens(node.children[0]) | |
1179 | # a, a[b], a.b | |
1180 | ||
1181 | if target.type == "name" or ( | |
1182 | target.type in ("atom_expr", "power") | |
1183 | and target.children[1].type == "trailer" | |
1184 | and target.children[-1].children[0] != "(" | |
1185 | ): | |
1186 | return False | |
1187 | ||
1188 | if self._normalizer.version <= (3, 8): | |
1189 | return True | |
1190 | else: | |
1191 | self.add_issue( | |
1192 | node, | |
1193 | message=self.extended_message.format( | |
1194 | target=_get_rhs_name(node.children[0], self._normalizer.version) | |
1195 | ), | |
1196 | ) | |
1197 | ||
1198 | ||
1199 | @ErrorFinder.register_rule(type='with_item') | |
1200 | class _WithItemRule(_CheckAssignmentRule): | |
1201 | def is_issue(self, with_item): | |
1202 | self._check_assignment(with_item.children[2]) | |
1203 | ||
1204 | ||
1205 | @ErrorFinder.register_rule(type='del_stmt') | |
1206 | class _DelStmtRule(_CheckAssignmentRule): | |
1207 | def is_issue(self, del_stmt): | |
1208 | child = del_stmt.children[1] | |
1209 | ||
1210 | if child.type != 'expr_list': # Already handled. | |
1211 | self._check_assignment(child, is_deletion=True) | |
1212 | ||
1213 | ||
1214 | @ErrorFinder.register_rule(type='expr_list') | |
1215 | class _ExprListRule(_CheckAssignmentRule): | |
1216 | def is_issue(self, expr_list): | |
1217 | for expr in expr_list.children[::2]: | |
1218 | self._check_assignment(expr) | |
1219 | ||
1220 | ||
1221 | @ErrorFinder.register_rule(type='for_stmt') | |
1222 | class _ForStmtRule(_CheckAssignmentRule): | |
1223 | def is_issue(self, for_stmt): | |
1224 | # Some of the nodes here are already used, so no else if | |
1225 | expr_list = for_stmt.children[1] | |
1226 | if expr_list.type != 'expr_list': # Already handled. | |
1227 | self._check_assignment(expr_list) | |
1228 | ||
1229 | ||
1230 | @ErrorFinder.register_rule(type='namedexpr_test') | |
1231 | class _NamedExprRule(_CheckAssignmentRule): | |
1232 | # namedexpr_test: test [':=' test] | |
1233 | ||
1234 | def is_issue(self, namedexpr_test): | |
1235 | # assigned name | |
1236 | first = namedexpr_test.children[0] | |
1237 | ||
1238 | def search_namedexpr_in_comp_for(node): | |
1239 | while True: | |
1240 | parent = node.parent | |
1241 | if parent is None: | |
1242 | return parent | |
1243 | if parent.type == 'sync_comp_for' and parent.children[3] == node: | |
1244 | return parent | |
1245 | node = parent | |
1246 | ||
1247 | if search_namedexpr_in_comp_for(namedexpr_test): | |
1248 | # [i+1 for i in (i := range(5))] | |
1249 | # [i+1 for i in (j := range(5))] | |
1250 | # [i+1 for i in (lambda: (j := range(5)))()] | |
1251 | message = 'assignment expression cannot be used in a comprehension iterable expression' | |
1252 | self.add_issue(namedexpr_test, message=message) | |
1253 | ||
1254 | # defined names | |
1255 | exprlist = list() | |
1256 | ||
1257 | def process_comp_for(comp_for): | |
1258 | if comp_for.type == 'sync_comp_for': | |
1259 | comp = comp_for | |
1260 | elif comp_for.type == 'comp_for': | |
1261 | comp = comp_for.children[1] | |
1262 | exprlist.extend(_get_for_stmt_definition_exprs(comp)) | |
1263 | ||
1264 | def search_all_comp_ancestors(node): | |
1265 | has_ancestors = False | |
1266 | while True: | |
1267 | node = node.search_ancestor('testlist_comp', 'dictorsetmaker') | |
1268 | if node is None: | |
1269 | break | |
1270 | for child in node.children: | |
1271 | if child.type in _COMP_FOR_TYPES: | |
1272 | process_comp_for(child) | |
1273 | has_ancestors = True | |
1274 | break | |
1275 | return has_ancestors | |
1276 | ||
1277 | # check assignment expressions in comprehensions | |
1278 | search_all = search_all_comp_ancestors(namedexpr_test) | |
1279 | if search_all: | |
1280 | if self._normalizer.context.node.type == 'classdef': | |
1281 | message = 'assignment expression within a comprehension ' \ | |
1282 | 'cannot be used in a class body' | |
1283 | self.add_issue(namedexpr_test, message=message) | |
1284 | ||
1285 | namelist = [expr.value for expr in exprlist if expr.type == 'name'] | |
1286 | if first.type == 'name' and first.value in namelist: | |
1287 | # [i := 0 for i, j in range(5)] | |
1288 | # [[(i := i) for j in range(5)] for i in range(5)] | |
1289 | # [i for i, j in range(5) if True or (i := 1)] | |
1290 | # [False and (i := 0) for i, j in range(5)] | |
1291 | message = 'assignment expression cannot rebind ' \ | |
1292 | 'comprehension iteration variable %r' % first.value | |
1293 | self.add_issue(namedexpr_test, message=message) | |
1294 | ||
1295 | self._check_assignment(first, is_namedexpr=True) |