]>
crepu.dev Git - config.git/blob - djavu-asus/elpy/rpc-venv/lib/python3.11/site-packages/packaging/_tokenizer.py
3 from dataclasses
import dataclass
4 from typing
import Dict
, Iterator
, NoReturn
, Optional
, Tuple
, Union
6 from .specifiers
import Specifier
16 class ParserSyntaxError(Exception):
17 """The provided source text could not be parsed correctly."""
24 span
: Tuple
[int, int],
27 self
.message
= message
32 def __str__(self
) -> str:
33 marker
= " " * self
.span
[0] + "~" * (self
.span
[1] - self
.span
[0]) + "^"
34 return "\n ".join([self
.message
, self
.source
, marker
])
37 DEFAULT_RULES
: "Dict[str, Union[str, re.Pattern[str]]]" = {
38 "LEFT_PARENTHESIS": r
"\(",
39 "RIGHT_PARENTHESIS": r
"\)",
40 "LEFT_BRACKET": r
"\[",
41 "RIGHT_BRACKET": r
"\]",
44 "QUOTED_STRING": re
.compile(
54 "OP": r
"(===|==|~=|!=|<=|>=|<|>)",
55 "BOOLOP": r
"\b(or|and)\b",
58 "VARIABLE": re
.compile(
65 |platform_(release|system)
66 |platform[._](version|machine|python_implementation)
67 |python_implementation
68 |implementation_(name|version)
74 "SPECIFIER": re
.compile(
75 Specifier
._operator
_regex
_str
+ Specifier
._version
_regex
_str
,
76 re
.VERBOSE | re
.IGNORECASE
,
80 "IDENTIFIER": r
"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
81 "VERSION_PREFIX_TRAIL": r
"\.\*",
82 "VERSION_LOCAL_LABEL_TRAIL": r
"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
89 """Context-sensitive token parsing.
91 Provides methods to examine the input stream to check whether the next token
99 rules
: "Dict[str, Union[str, re.Pattern[str]]]",
102 self
.rules
: Dict
[str, re
.Pattern
[str]] = {
103 name
: re
.compile(pattern
) for name
, pattern
in rules
.items()
105 self
.next_token
: Optional
[Token
] = None
108 def consume(self
, name
: str) -> None:
109 """Move beyond provided token name, if at current position."""
113 def check(self
, name
: str, *, peek
: bool = False) -> bool:
114 """Check whether the next token has the provided name.
116 By default, if the check succeeds, the token *must* be read before
117 another check. If `peek` is set to `True`, the token is not loaded and
118 would need to be checked again.
121 self
.next_token
is None
122 ), f
"Cannot check for {name!r}, already have {self.next_token!r}"
123 assert name
in self
.rules
, f
"Unknown token name: {name!r}"
125 expression
= self
.rules
[name
]
127 match
= expression
.match(self
.source
, self
.position
)
131 self
.next_token
= Token(name
, match
[0], self
.position
)
134 def expect(self
, name
: str, *, expected
: str) -> Token
:
135 """Expect a certain token name next, failing with a syntax error otherwise.
137 The token is *not* read.
139 if not self
.check(name
):
140 raise self
.raise_syntax_error(f
"Expected {expected}")
143 def read(self
) -> Token
:
144 """Consume the next token and return it."""
145 token
= self
.next_token
146 assert token
is not None
148 self
.position
+= len(token
.text
)
149 self
.next_token
= None
153 def raise_syntax_error(
157 span_start
: Optional
[int] = None,
158 span_end
: Optional
[int] = None,
160 """Raise ParserSyntaxError at the given position."""
162 self
.position
if span_start
is None else span_start
,
163 self
.position
if span_end
is None else span_end
,
165 raise ParserSyntaxError(
171 @contextlib.contextmanager
172 def enclosing_tokens(
173 self
, open_token
: str, close_token
: str, *, around
: str
175 if self
.check(open_token
):
176 open_position
= self
.position
183 if open_position
is None:
186 if not self
.check(close_token
):
187 self
.raise_syntax_error(
188 f
"Expected matching {close_token} for {open_token}, after {around}",
189 span_start
=open_position
,