blib2to3: add a few annotations (#3675)

This commit is contained in:
Jelle Zijlstra 2023-05-03 10:26:57 -07:00 committed by GitHub
parent a07871b9cd
commit eb32729ab5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -34,6 +34,7 @@
Iterator, Iterator,
List, List,
Optional, Optional,
Set,
Text, Text,
Tuple, Tuple,
Pattern, Pattern,
@ -66,19 +67,19 @@
del token del token
def group(*choices): def group(*choices: str) -> str:
return "(" + "|".join(choices) + ")" return "(" + "|".join(choices) + ")"
def any(*choices): def any(*choices: str) -> str:
return group(*choices) + "*" return group(*choices) + "*"
def maybe(*choices): def maybe(*choices: str) -> str:
return group(*choices) + "?" return group(*choices) + "?"
def _combinations(*l): def _combinations(*l: str) -> Set[str]:
return set(x + y for x in l for y in l + ("",) if x.casefold() != y.casefold()) return set(x + y for x in l for y in l + ("",) if x.casefold() != y.casefold())
@ -187,15 +188,19 @@ class StopTokenizing(Exception):
pass pass
def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing Coord = Tuple[int, int]
(srow, scol) = xxx_todo_changeme
(erow, ecol) = xxx_todo_changeme1
def printtoken(
type: int, token: Text, srow_col: Coord, erow_col: Coord, line: Text
) -> None: # for testing
(srow, scol) = srow_col
(erow, ecol) = erow_col
print( print(
"%d,%d-%d,%d:\t%s\t%s" % (srow, scol, erow, ecol, tok_name[type], repr(token)) "%d,%d-%d,%d:\t%s\t%s" % (srow, scol, erow, ecol, tok_name[type], repr(token))
) )
Coord = Tuple[int, int]
TokenEater = Callable[[int, Text, Coord, Coord, Text], None] TokenEater = Callable[[int, Text, Coord, Coord, Text], None]
@ -219,7 +224,7 @@ def tokenize(readline: Callable[[], Text], tokeneater: TokenEater = printtoken)
# backwards compatible interface # backwards compatible interface
def tokenize_loop(readline, tokeneater): def tokenize_loop(readline: Callable[[], Text], tokeneater: TokenEater) -> None:
for token_info in generate_tokens(readline): for token_info in generate_tokens(readline):
tokeneater(*token_info) tokeneater(*token_info)
@ -229,7 +234,6 @@ def tokenize_loop(readline, tokeneater):
class Untokenizer: class Untokenizer:
tokens: List[Text] tokens: List[Text]
prev_row: int prev_row: int
prev_col: int prev_col: int
@ -603,7 +607,9 @@ def generate_tokens(
or endprogs.get(token[1]) or endprogs.get(token[1])
or endprogs.get(token[2]) or endprogs.get(token[2])
) )
assert maybe_endprog is not None, f"endprog not found for {token}" assert (
maybe_endprog is not None
), f"endprog not found for {token}"
endprog = maybe_endprog endprog = maybe_endprog
contstr, needcont = line[start:], 1 contstr, needcont = line[start:], 1
contline = line contline = line
@ -632,7 +638,6 @@ def generate_tokens(
if token in ("def", "for"): if token in ("def", "for"):
if stashed and stashed[0] == NAME and stashed[1] == "async": if stashed and stashed[0] == NAME and stashed[1] == "async":
if token == "def": if token == "def":
async_def = True async_def = True
async_def_indent = indents[-1] async_def_indent = indents[-1]