Fix most blib2to3 lint (#3794)

This commit is contained in:
Shantanu 2023-07-16 21:33:58 -07:00 committed by GitHub
parent 8d80aecd50
commit c1e30d97fe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 72 additions and 80 deletions

View File

@ -1,6 +1,6 @@
# Note: don't use this config for your own repositories. Instead, see # Note: don't use this config for your own repositories. Instead, see
# "Version control integration" in docs/integrations/source_version_control.md # "Version control integration" in docs/integrations/source_version_control.md
exclude: ^(src/blib2to3/|profiling/|tests/data/) exclude: ^(profiling/|tests/data/)
repos: repos:
- repo: local - repo: local
hooks: hooks:
@ -36,6 +36,7 @@ repos:
- flake8-bugbear - flake8-bugbear
- flake8-comprehensions - flake8-comprehensions
- flake8-simplify - flake8-simplify
exclude: ^src/blib2to3/
- repo: https://github.com/pre-commit/mirrors-mypy - repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.4.1 rev: v1.4.1

View File

@ -12,8 +12,7 @@ include = '\.pyi?$'
extend-exclude = ''' extend-exclude = '''
/( /(
# The following are specific to Black, you probably don't want those. # The following are specific to Black, you probably don't want those.
| blib2to3 tests/data
| tests/data
| profiling | profiling
)/ )/
''' '''
@ -183,7 +182,7 @@ atomic = true
profile = "black" profile = "black"
line_length = 88 line_length = 88
skip_gitignore = true skip_gitignore = true
skip_glob = ["src/blib2to3", "tests/data", "profiling"] skip_glob = ["tests/data", "profiling"]
known_first_party = ["black", "blib2to3", "blackd", "_black_version"] known_first_party = ["black", "blib2to3", "blackd", "_black_version"]
[tool.pytest.ini_options] [tool.pytest.ini_options]

View File

@ -1,18 +1,19 @@
A subset of lib2to3 taken from Python 3.7.0b2. A subset of lib2to3 taken from Python 3.7.0b2. Commit hash:
Commit hash: 9c17e3a1987004b8bcfbe423953aad84493a7984 9c17e3a1987004b8bcfbe423953aad84493a7984
Reasons for forking: Reasons for forking:
- consistent handling of f-strings for users of Python < 3.6.2 - consistent handling of f-strings for users of Python < 3.6.2
- backport of BPO-33064 that fixes parsing files with trailing commas after - backport of BPO-33064 that fixes parsing files with trailing commas after \*args and
*args and **kwargs \*\*kwargs
- backport of GH-6143 that restores the ability to reformat legacy usage of - backport of GH-6143 that restores the ability to reformat legacy usage of `async`
`async`
- support all types of string literals - support all types of string literals
- better ability to debug (better reprs) - better ability to debug (better reprs)
- INDENT and DEDENT don't hold whitespace and comment prefixes - INDENT and DEDENT don't hold whitespace and comment prefixes
- ability to Cythonize - ability to Cythonize
Change Log: Change Log:
- Changes default logger used by Driver - Changes default logger used by Driver
- Backported the following upstream parser changes: - Backported the following upstream parser changes:
- "bpo-42381: Allow walrus in set literals and set comprehensions (GH-23332)" - "bpo-42381: Allow walrus in set literals and set comprehensions (GH-23332)"

View File

@ -17,30 +17,21 @@
# Python imports # Python imports
import io import io
import os
import logging import logging
import os
import pkgutil import pkgutil
import sys import sys
from typing import (
Any,
cast,
IO,
Iterable,
List,
Optional,
Iterator,
Tuple,
Union,
)
from contextlib import contextmanager from contextlib import contextmanager
from dataclasses import dataclass, field from dataclasses import dataclass, field
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
from logging import Logger from logging import Logger
from blib2to3.pytree import NL from typing import IO, Any, Iterable, Iterator, List, Optional, Tuple, Union, cast
from blib2to3.pgen2.grammar import Grammar from blib2to3.pgen2.grammar import Grammar
from blib2to3.pgen2.tokenize import GoodTokenInfo from blib2to3.pgen2.tokenize import GoodTokenInfo
from blib2to3.pytree import NL
# Pgen imports
from . import grammar, parse, pgen, token, tokenize
Path = Union[str, "os.PathLike[str]"] Path = Union[str, "os.PathLike[str]"]

View File

@ -4,10 +4,8 @@
"""Safely evaluate Python string literals without using eval().""" """Safely evaluate Python string literals without using eval()."""
import re import re
from typing import Dict, Match from typing import Dict, Match
simple_escapes: Dict[str, str] = { simple_escapes: Dict[str, str] = {
"a": "\a", "a": "\a",
"b": "\b", "b": "\b",

View File

@ -10,24 +10,25 @@
""" """
from contextlib import contextmanager from contextlib import contextmanager
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterator,
List,
Optional,
Set,
Tuple,
Union,
cast,
)
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pytree import NL, Context, Leaf, Node, RawNode, convert
# Local imports # Local imports
from . import grammar, token, tokenize from . import grammar, token, tokenize
from typing import (
cast,
Any,
Optional,
Union,
Tuple,
Dict,
List,
Iterator,
Callable,
Set,
TYPE_CHECKING,
)
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pytree import convert, NL, Context, RawNode, Leaf, Node
if TYPE_CHECKING: if TYPE_CHECKING:
from blib2to3.pgen2.driver import TokenProxy from blib2to3.pgen2.driver import TokenProxy
@ -112,7 +113,9 @@ def add_token(self, tok_type: int, tok_val: str, raw: bool = False) -> None:
args.insert(0, ilabel) args.insert(0, ilabel)
func(*args) func(*args)
def determine_route(self, value: Optional[str] = None, force: bool = False) -> Optional[int]: def determine_route(
self, value: Optional[str] = None, force: bool = False
) -> Optional[int]:
alive_ilabels = self.ilabels alive_ilabels = self.ilabels
if len(alive_ilabels) == 0: if len(alive_ilabels) == 0:
*_, most_successful_ilabel = self._dead_ilabels *_, most_successful_ilabel = self._dead_ilabels

View File

@ -1,25 +1,22 @@
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement. # Licensed to PSF under a Contributor Agreement.
# Pgen imports import os
from . import grammar, token, tokenize
from typing import ( from typing import (
IO,
Any, Any,
Dict, Dict,
IO,
Iterator, Iterator,
List, List,
NoReturn,
Optional, Optional,
Sequence,
Tuple, Tuple,
Union, Union,
Sequence,
NoReturn,
) )
from blib2to3.pgen2 import grammar
from blib2to3.pgen2.tokenize import GoodTokenInfo
import os
from blib2to3.pgen2 import grammar, token, tokenize
from blib2to3.pgen2.tokenize import GoodTokenInfo
Path = Union[str, "os.PathLike[str]"] Path = Union[str, "os.PathLike[str]"]
@ -149,7 +146,7 @@ def calcfirst(self, name: str) -> None:
state = dfa[0] state = dfa[0]
totalset: Dict[str, int] = {} totalset: Dict[str, int] = {}
overlapcheck = {} overlapcheck = {}
for label, next in state.arcs.items(): for label in state.arcs:
if label in self.dfas: if label in self.dfas:
if label in self.first: if label in self.first:
fset = self.first[label] fset = self.first[label]
@ -190,9 +187,9 @@ def parse(self) -> Tuple[Dict[str, List["DFAState"]], str]:
# self.dump_nfa(name, a, z) # self.dump_nfa(name, a, z)
dfa = self.make_dfa(a, z) dfa = self.make_dfa(a, z)
# self.dump_dfa(name, dfa) # self.dump_dfa(name, dfa)
oldlen = len(dfa) # oldlen = len(dfa)
self.simplify_dfa(dfa) self.simplify_dfa(dfa)
newlen = len(dfa) # newlen = len(dfa)
dfas[name] = dfa dfas[name] = dfa
# print name, oldlen, newlen # print name, oldlen, newlen
if startsymbol is None: if startsymbol is None:
@ -346,7 +343,7 @@ def parse_atom(self) -> Tuple["NFAState", "NFAState"]:
self.raise_error( self.raise_error(
"expected (...) or NAME or STRING, got %s/%s", self.type, self.value "expected (...) or NAME or STRING, got %s/%s", self.type, self.value
) )
assert False raise AssertionError
def expect(self, type: int, value: Optional[Any] = None) -> str: def expect(self, type: int, value: Optional[Any] = None) -> str:
if self.type != type or (value is not None and self.value != value): if self.type != type or (value is not None and self.value != value):
@ -368,7 +365,7 @@ def raise_error(self, msg: str, *args: Any) -> NoReturn:
if args: if args:
try: try:
msg = msg % args msg = msg % args
except: except Exception:
msg = " ".join([msg] + list(map(str, args))) msg = " ".join([msg] + list(map(str, args)))
raise SyntaxError(msg, (self.filename, self.end[0], self.end[1], self.line)) raise SyntaxError(msg, (self.filename, self.end[0], self.end[1], self.line))

View File

@ -1,8 +1,6 @@
"""Token constants (from "token.h").""" """Token constants (from "token.h")."""
from typing import Dict from typing import Dict, Final
from typing import Final
# Taken from Python (r53757) and modified to include some tokens # Taken from Python (r53757) and modified to include some tokens
# originally monkeypatched in by pgen2.tokenize # originally monkeypatched in by pgen2.tokenize

View File

@ -30,28 +30,41 @@
import sys import sys
from typing import ( from typing import (
Callable, Callable,
Final,
Iterable, Iterable,
Iterator, Iterator,
List, List,
Optional, Optional,
Pattern,
Set, Set,
Tuple, Tuple,
Pattern,
Union, Union,
cast, cast,
) )
from typing import Final
from blib2to3.pgen2.token import *
from blib2to3.pgen2.grammar import Grammar from blib2to3.pgen2.grammar import Grammar
from blib2to3.pgen2.token import (
ASYNC,
AWAIT,
COMMENT,
DEDENT,
ENDMARKER,
ERRORTOKEN,
INDENT,
NAME,
NEWLINE,
NL,
NUMBER,
OP,
STRING,
tok_name,
)
__author__ = "Ka-Ping Yee <ping@lfw.org>" __author__ = "Ka-Ping Yee <ping@lfw.org>"
__credits__ = "GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro" __credits__ = "GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro"
import re import re
from codecs import BOM_UTF8, lookup from codecs import BOM_UTF8, lookup
from blib2to3.pgen2.token import *
from . import token from . import token
@ -334,7 +347,7 @@ def read_or_stop() -> bytes:
try: try:
return readline() return readline()
except StopIteration: except StopIteration:
return b'' return b""
def find_cookie(line: bytes) -> Optional[str]: def find_cookie(line: bytes) -> Optional[str]:
try: try:
@ -676,14 +689,12 @@ def generate_tokens(
yield stashed yield stashed
stashed = None stashed = None
for indent in indents[1:]: # pop remaining indent levels for _indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, "", (lnum, 0), (lnum, 0), "") yield (DEDENT, "", (lnum, 0), (lnum, 0), "")
yield (ENDMARKER, "", (lnum, 0), (lnum, 0), "") yield (ENDMARKER, "", (lnum, 0), (lnum, 0), "")
if __name__ == "__main__": # testing if __name__ == "__main__": # testing
import sys
if len(sys.argv) > 1: if len(sys.argv) > 1:
tokenize(open(sys.argv[1]).readline) tokenize(open(sys.argv[1]).readline)
else: else:

View File

@ -5,12 +5,10 @@
# Python imports # Python imports
import os import os
from typing import Union from typing import Union
# Local imports # Local imports
from .pgen2 import driver from .pgen2 import driver
from .pgen2.grammar import Grammar from .pgen2.grammar import Grammar
# Moved into initialize because mypyc can't handle __file__ (XXX bug) # Moved into initialize because mypyc can't handle __file__ (XXX bug)

View File

@ -15,15 +15,16 @@
from typing import ( from typing import (
Any, Any,
Dict, Dict,
Iterable,
Iterator, Iterator,
List, List,
Optional, Optional,
Set,
Tuple, Tuple,
TypeVar, TypeVar,
Union, Union,
Set,
Iterable,
) )
from blib2to3.pgen2.grammar import Grammar from blib2to3.pgen2.grammar import Grammar
__author__ = "Guido van Rossum <guido@python.org>" __author__ = "Guido van Rossum <guido@python.org>"
@ -58,7 +59,6 @@ def type_repr(type_num: int) -> Union[str, int]:
class Base: class Base:
""" """
Abstract base class for Node and Leaf. Abstract base class for Node and Leaf.
@ -237,7 +237,6 @@ def get_suffix(self) -> str:
class Node(Base): class Node(Base):
"""Concrete implementation for interior nodes.""" """Concrete implementation for interior nodes."""
fixers_applied: Optional[List[Any]] fixers_applied: Optional[List[Any]]
@ -378,7 +377,6 @@ def update_sibling_maps(self) -> None:
class Leaf(Base): class Leaf(Base):
"""Concrete implementation for leaf nodes.""" """Concrete implementation for leaf nodes."""
# Default values for instance variables # Default values for instance variables
@ -506,7 +504,6 @@ def convert(gr: Grammar, raw_node: RawNode) -> NL:
class BasePattern: class BasePattern:
""" """
A pattern is a tree matching pattern. A pattern is a tree matching pattern.
@ -646,7 +643,6 @@ def _submatch(self, node, results=None):
class NodePattern(BasePattern): class NodePattern(BasePattern):
wildcards: bool = False wildcards: bool = False
def __init__( def __init__(
@ -715,7 +711,6 @@ def _submatch(self, node, results=None) -> bool:
class WildcardPattern(BasePattern): class WildcardPattern(BasePattern):
""" """
A wildcard pattern can match zero or more nodes. A wildcard pattern can match zero or more nodes.