]>
crepu.dev Git - config.git/blob - djavu-asus/elpy/rpc-venv/lib/python3.11/site-packages/yapf/pyparser/pyparser.py
1 # Copyright 2022 Bill Wendling, All Rights Reserved.
3 # Licensed under the Apache License, Version 2.0 (the "License");
4 # you may not use this file except in compliance with the License.
5 # You may obtain a copy of the License at
7 # http://www.apache.org/licenses/LICENSE-2.0
9 # Unless required by applicable law or agreed to in writing, software
10 # distributed under the License is distributed on an "AS IS" BASIS,
11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 # See the License for the specific language governing permissions and
13 # limitations under the License.
14 """Simple Python Parser
16 Parse Python code into a list of logical lines, represented by LogicalLine
17 objects. This uses Python's tokenizer to generate the tokens. As such, YAPF must
18 be run with the appropriate Python version---Python >=3.7 for Python 3.7 code,
19 Python >=3.8 for Python 3.8 code, etc.
21 This parser uses Python's native "tokenizer" module to generate a list of tokens
22 for the source code. It then uses Python's native "ast" module to assign
23 subtypes, calculate split penalties, etc.
25 A "logical line" produced by Python's "tokenizer" module ends with a
26 tokenize.NEWLINE, rather than a tokenize.NL, making it easy to separate them
27 out. Comments all end with a tokentizer.NL, so we need to make sure we don't
28 errantly pick up non-comment tokens when parsing comment blocks.
30 ParseCode(): parse the code producing a list of logical lines.
33 # TODO: Call from yapf_api.FormatCode.
40 from io
import StringIO
41 from tokenize
import TokenInfo
43 from yapf
.pyparser
import split_penalty_visitor
44 from yapf
.yapflib
import format_token
45 from yapf
.yapflib
import logical_line
47 CONTINUATION
= token
.N_TOKENS
50 def ParseCode(unformatted_source
, filename
='<unknown>'):
51 """Parse a string of Python code into logical lines.
53 This provides an alternative entry point to YAPF.
56 unformatted_source: (unicode) The code to format.
57 filename: (unicode) The name of the file being reformatted.
60 A list of LogicalLines.
63 An exception is raised if there's an error during AST parsing.
65 if not unformatted_source
.endswith(os
.linesep
):
66 unformatted_source
+= os
.linesep
69 ast_tree
= ast
.parse(unformatted_source
, filename
)
70 ast
.fix_missing_locations(ast_tree
)
71 readline
= StringIO(unformatted_source
).readline
72 tokens
= tokenize
.generate_tokens(readline
)
76 logical_lines
= _CreateLogicalLines(tokens
)
78 # Process the logical lines.
79 split_penalty_visitor
.SplitPenalty(logical_lines
).visit(ast_tree
)
84 def _CreateLogicalLines(tokens
):
85 """Separate tokens into logical lines.
88 tokens: (list of tokenizer.TokenInfo) Tokens generated by tokenizer.
91 A list of LogicalLines.
95 # Convert tokens into "TokenInfo" and add tokens for continuation markers.
100 if (prev_tok
and prev_tok
.line
.rstrip().endswith('\\') and
101 prev_tok
.start
[0] < tok
.start
[0]):
105 start
=(prev_tok
.start
[0], prev_tok
.start
[1] + 1),
106 end
=(prev_tok
.end
[0], prev_tok
.end
[0] + 2),
108 ctok
.lineno
= ctok
.start
[0]
109 ctok
.column
= ctok
.start
[1]
111 formatted_tokens
.append(format_token
.FormatToken(ctok
, 'CONTINUATION'))
113 tok
.lineno
= tok
.start
[0]
114 tok
.column
= tok
.start
[1]
115 tok
.value
= tok
.string
116 formatted_tokens
.append(
117 format_token
.FormatToken(tok
, token
.tok_name
[tok
.type]))
120 # Generate logical lines.
121 logical_lines
, cur_logical_line
= [], []
123 for tok
in formatted_tokens
:
124 if tok
.type == tokenize
.ENDMARKER
:
127 if tok
.type == tokenize
.NEWLINE
:
128 # End of a logical line.
129 logical_lines
.append(logical_line
.LogicalLine(depth
, cur_logical_line
))
130 cur_logical_line
= []
131 elif tok
.type == tokenize
.INDENT
:
133 elif tok
.type == tokenize
.DEDENT
:
135 elif tok
.type == tokenize
.NL
:
138 if (cur_logical_line
and not tok
.type == tokenize
.COMMENT
and
139 cur_logical_line
[0].type == tokenize
.COMMENT
):
140 # We were parsing a comment block, but now we have real code to worry
141 # about. Store the comment and carry on.
142 logical_lines
.append(logical_line
.LogicalLine(depth
, cur_logical_line
))
143 cur_logical_line
= []
145 cur_logical_line
.append(tok
)
147 # Link the FormatTokens in each line together to form a doubly linked list.
148 for line
in logical_lines
:
149 previous
= line
.first
150 bracket_stack
= [previous
] if previous
.OpensScope() else []
151 for tok
in line
.tokens
[1:]:
152 tok
.previous_token
= previous
153 previous
.next_token
= tok
156 # Set up the "matching_bracket" attribute.
158 bracket_stack
.append(tok
)
159 elif tok
.ClosesScope():
160 bracket_stack
[-1].matching_bracket
= tok
161 tok
.matching_bracket
= bracket_stack
.pop()