
Based on the feedback in https://github.com/python/black/pull/845#issuecomment-490622711 - Remove TokenizerConfig, and add a field to Grammar instead. - Pass the Grammar to the tokenizer. - Rename `ASYNC_IS_RESERVED_KEYWORD` to `ASYNC_KEYWORDS` and `ASYNC_IS_VALID_IDENTIFIER` to `ASYNC_IDENTIFIERS`.
33 lines
1.0 KiB
Python
33 lines
1.0 KiB
Python
# Stubs for lib2to3.pgen2.tokenize (Python 3.6)
|
|
# NOTE: Only elements from __all__ are present.
|
|
|
|
from typing import Callable, Iterable, Iterator, List, Optional, Text, Tuple
|
|
from blib2to3.pgen2.token import * # noqa
|
|
from blib2to3.pygram import Grammar
|
|
|
|
|
|
_Coord = Tuple[int, int]
|
|
_TokenEater = Callable[[int, Text, _Coord, _Coord, Text], None]
|
|
_TokenInfo = Tuple[int, Text, _Coord, _Coord, Text]
|
|
|
|
|
|
class TokenError(Exception): ...
|
|
class StopTokenizing(Exception): ...
|
|
|
|
def tokenize(readline: Callable[[], Text], tokeneater: _TokenEater = ...) -> None: ...
|
|
|
|
class Untokenizer:
|
|
tokens: List[Text]
|
|
prev_row: int
|
|
prev_col: int
|
|
def __init__(self) -> None: ...
|
|
def add_whitespace(self, start: _Coord) -> None: ...
|
|
def untokenize(self, iterable: Iterable[_TokenInfo]) -> Text: ...
|
|
def compat(self, token: Tuple[int, Text], iterable: Iterable[_TokenInfo]) -> None: ...
|
|
|
|
def untokenize(iterable: Iterable[_TokenInfo]) -> Text: ...
|
|
def generate_tokens(
|
|
readline: Callable[[], Text],
|
|
grammar: Optional[Grammar] = ...
|
|
) -> Iterator[_TokenInfo]: ...
|