Fix most blib2to3 lint (#3794)

This commit is contained in:
Shantanu 2023-07-16 21:33:58 -07:00 committed by GitHub
parent 8d80aecd50
commit c1e30d97fe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 72 additions and 80 deletions

View File

@ -1,6 +1,6 @@
# Note: don't use this config for your own repositories. Instead, see
# "Version control integration" in docs/integrations/source_version_control.md
exclude: ^(src/blib2to3/|profiling/|tests/data/)
exclude: ^(profiling/|tests/data/)
repos:
- repo: local
hooks:
@ -36,6 +36,7 @@ repos:
- flake8-bugbear
- flake8-comprehensions
- flake8-simplify
exclude: ^src/blib2to3/
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.4.1

View File

@ -12,8 +12,7 @@ include = '\.pyi?$'
extend-exclude = '''
/(
# The following are specific to Black, you probably don't want those.
| blib2to3
| tests/data
tests/data
| profiling
)/
'''
@ -183,7 +182,7 @@ atomic = true
profile = "black"
line_length = 88
skip_gitignore = true
skip_glob = ["src/blib2to3", "tests/data", "profiling"]
skip_glob = ["tests/data", "profiling"]
known_first_party = ["black", "blib2to3", "blackd", "_black_version"]
[tool.pytest.ini_options]

View File

@ -1,18 +1,19 @@
A subset of lib2to3 taken from Python 3.7.0b2.
Commit hash: 9c17e3a1987004b8bcfbe423953aad84493a7984
A subset of lib2to3 taken from Python 3.7.0b2. Commit hash:
9c17e3a1987004b8bcfbe423953aad84493a7984
Reasons for forking:
- consistent handling of f-strings for users of Python < 3.6.2
- backport of BPO-33064 that fixes parsing files with trailing commas after
*args and **kwargs
- backport of GH-6143 that restores the ability to reformat legacy usage of
`async`
- backport of BPO-33064 that fixes parsing files with trailing commas after \*args and
\*\*kwargs
- backport of GH-6143 that restores the ability to reformat legacy usage of `async`
- support all types of string literals
- better ability to debug (better reprs)
- INDENT and DEDENT don't hold whitespace and comment prefixes
- ability to Cythonize
Change Log:
- Changes default logger used by Driver
- Backported the following upstream parser changes:
- "bpo-42381: Allow walrus in set literals and set comprehensions (GH-23332)"

View File

@ -17,30 +17,21 @@
# Python imports
import io
import os
import logging
import os
import pkgutil
import sys
from typing import (
Any,
cast,
IO,
Iterable,
List,
Optional,
Iterator,
Tuple,
Union,
)
from contextlib import contextmanager
from dataclasses import dataclass, field
# Pgen imports
from . import grammar, parse, token, tokenize, pgen
from logging import Logger
from blib2to3.pytree import NL
from typing import IO, Any, Iterable, Iterator, List, Optional, Tuple, Union, cast
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pgen2.tokenize import GoodTokenInfo
from blib2to3.pytree import NL
# Pgen imports
from . import grammar, parse, pgen, token, tokenize
Path = Union[str, "os.PathLike[str]"]

View File

@ -4,10 +4,8 @@
"""Safely evaluate Python string literals without using eval()."""
import re
from typing import Dict, Match
simple_escapes: Dict[str, str] = {
"a": "\a",
"b": "\b",

View File

@ -10,24 +10,25 @@
"""
from contextlib import contextmanager
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterator,
List,
Optional,
Set,
Tuple,
Union,
cast,
)
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pytree import NL, Context, Leaf, Node, RawNode, convert
# Local imports
from . import grammar, token, tokenize
from typing import (
cast,
Any,
Optional,
Union,
Tuple,
Dict,
List,
Iterator,
Callable,
Set,
TYPE_CHECKING,
)
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pytree import convert, NL, Context, RawNode, Leaf, Node
if TYPE_CHECKING:
from blib2to3.pgen2.driver import TokenProxy
@ -112,7 +113,9 @@ def add_token(self, tok_type: int, tok_val: str, raw: bool = False) -> None:
args.insert(0, ilabel)
func(*args)
def determine_route(self, value: Optional[str] = None, force: bool = False) -> Optional[int]:
def determine_route(
self, value: Optional[str] = None, force: bool = False
) -> Optional[int]:
alive_ilabels = self.ilabels
if len(alive_ilabels) == 0:
*_, most_successful_ilabel = self._dead_ilabels

View File

@ -1,25 +1,22 @@
# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
# Pgen imports
from . import grammar, token, tokenize
import os
from typing import (
IO,
Any,
Dict,
IO,
Iterator,
List,
NoReturn,
Optional,
Sequence,
Tuple,
Union,
Sequence,
NoReturn,
)
from blib2to3.pgen2 import grammar
from blib2to3.pgen2.tokenize import GoodTokenInfo
import os
from blib2to3.pgen2 import grammar, token, tokenize
from blib2to3.pgen2.tokenize import GoodTokenInfo
Path = Union[str, "os.PathLike[str]"]
@ -149,7 +146,7 @@ def calcfirst(self, name: str) -> None:
state = dfa[0]
totalset: Dict[str, int] = {}
overlapcheck = {}
for label, next in state.arcs.items():
for label in state.arcs:
if label in self.dfas:
if label in self.first:
fset = self.first[label]
@ -190,9 +187,9 @@ def parse(self) -> Tuple[Dict[str, List["DFAState"]], str]:
# self.dump_nfa(name, a, z)
dfa = self.make_dfa(a, z)
# self.dump_dfa(name, dfa)
oldlen = len(dfa)
# oldlen = len(dfa)
self.simplify_dfa(dfa)
newlen = len(dfa)
# newlen = len(dfa)
dfas[name] = dfa
# print name, oldlen, newlen
if startsymbol is None:
@ -346,7 +343,7 @@ def parse_atom(self) -> Tuple["NFAState", "NFAState"]:
self.raise_error(
"expected (...) or NAME or STRING, got %s/%s", self.type, self.value
)
assert False
raise AssertionError
def expect(self, type: int, value: Optional[Any] = None) -> str:
if self.type != type or (value is not None and self.value != value):
@ -368,7 +365,7 @@ def raise_error(self, msg: str, *args: Any) -> NoReturn:
if args:
try:
msg = msg % args
except:
except Exception:
msg = " ".join([msg] + list(map(str, args)))
raise SyntaxError(msg, (self.filename, self.end[0], self.end[1], self.line))

View File

@ -1,8 +1,6 @@
"""Token constants (from "token.h")."""
from typing import Dict
from typing import Final
from typing import Dict, Final
# Taken from Python (r53757) and modified to include some tokens
# originally monkeypatched in by pgen2.tokenize

View File

@ -30,28 +30,41 @@
import sys
from typing import (
Callable,
Final,
Iterable,
Iterator,
List,
Optional,
Pattern,
Set,
Tuple,
Pattern,
Union,
cast,
)
from typing import Final
from blib2to3.pgen2.token import *
from blib2to3.pgen2.grammar import Grammar
from blib2to3.pgen2.token import (
ASYNC,
AWAIT,
COMMENT,
DEDENT,
ENDMARKER,
ERRORTOKEN,
INDENT,
NAME,
NEWLINE,
NL,
NUMBER,
OP,
STRING,
tok_name,
)
__author__ = "Ka-Ping Yee <ping@lfw.org>"
__credits__ = "GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro"
import re
from codecs import BOM_UTF8, lookup
from blib2to3.pgen2.token import *
from . import token
@ -334,7 +347,7 @@ def read_or_stop() -> bytes:
try:
return readline()
except StopIteration:
return b''
return b""
def find_cookie(line: bytes) -> Optional[str]:
try:
@ -676,14 +689,12 @@ def generate_tokens(
yield stashed
stashed = None
for indent in indents[1:]: # pop remaining indent levels
for _indent in indents[1:]: # pop remaining indent levels
yield (DEDENT, "", (lnum, 0), (lnum, 0), "")
yield (ENDMARKER, "", (lnum, 0), (lnum, 0), "")
if __name__ == "__main__": # testing
import sys
if len(sys.argv) > 1:
tokenize(open(sys.argv[1]).readline)
else:

View File

@ -5,12 +5,10 @@
# Python imports
import os
from typing import Union
# Local imports
from .pgen2 import driver
from .pgen2.grammar import Grammar
# Moved into initialize because mypyc can't handle __file__ (XXX bug)

View File

@ -15,15 +15,16 @@
from typing import (
Any,
Dict,
Iterable,
Iterator,
List,
Optional,
Set,
Tuple,
TypeVar,
Union,
Set,
Iterable,
)
from blib2to3.pgen2.grammar import Grammar
__author__ = "Guido van Rossum <guido@python.org>"
@ -58,7 +59,6 @@ def type_repr(type_num: int) -> Union[str, int]:
class Base:
"""
Abstract base class for Node and Leaf.
@ -237,7 +237,6 @@ def get_suffix(self) -> str:
class Node(Base):
"""Concrete implementation for interior nodes."""
fixers_applied: Optional[List[Any]]
@ -378,7 +377,6 @@ def update_sibling_maps(self) -> None:
class Leaf(Base):
"""Concrete implementation for leaf nodes."""
# Default values for instance variables
@ -506,7 +504,6 @@ def convert(gr: Grammar, raw_node: RawNode) -> NL:
class BasePattern:
"""
A pattern is a tree matching pattern.
@ -646,7 +643,6 @@ def _submatch(self, node, results=None):
class NodePattern(BasePattern):
wildcards: bool = False
def __init__(
@ -715,7 +711,6 @@ def _submatch(self, node, results=None) -> bool:
class WildcardPattern(BasePattern):
"""
A wildcard pattern can match zero or more nodes.