[563] Fix standalone comments inside complex blocks crashing Black (#4016)
Bracket depth is not an accurate indicator of standalone comment position inside more complex blocks because bracket depth can be virtual (in loops' and lambdas' parameter blocks) or from optional parens. Here we try to stop cumulating lines upon standalone comments in complex blocks, and try to make standalone comment processing more simple. The fundamental idea is, that if we have a standalone comment, it needs to go on its own line, so we always have to split. This is not perfect, but at least a first step.
This commit is contained in:
parent
50ed6221d9
commit
66008fda5d
@ -13,6 +13,7 @@
|
||||
|
||||
- Fix crash on formatting bytes strings that look like docstrings (#4003)
|
||||
- Fix crash when whitespace followed a backslash before newline in a docstring (#4008)
|
||||
- Fix standalone comments inside complex blocks crashing Black (#4016)
|
||||
|
||||
- Fix crash on formatting code like `await (a ** b)` (#3994)
|
||||
|
||||
|
@ -127,6 +127,13 @@ def mark(self, leaf: Leaf) -> None:
|
||||
self.maybe_increment_lambda_arguments(leaf)
|
||||
self.maybe_increment_for_loop_variable(leaf)
|
||||
|
||||
def any_open_for_or_lambda(self) -> bool:
|
||||
"""Return True if there is an open for or lambda expression on the line.
|
||||
|
||||
See maybe_increment_for_loop_variable and maybe_increment_lambda_arguments
|
||||
for details."""
|
||||
return bool(self._for_loop_depths or self._lambda_argument_depths)
|
||||
|
||||
def any_open_brackets(self) -> bool:
|
||||
"""Return True if there is an yet unmatched open bracket on the line."""
|
||||
return bool(self.bracket_match)
|
||||
|
@ -861,8 +861,6 @@ def _maybe_split_omitting_optional_parens(
|
||||
# it's not an import (optional parens are the only thing we can split on
|
||||
# in this case; attempting a split without them is a waste of time)
|
||||
and not line.is_import
|
||||
# there are no standalone comments in the body
|
||||
and not rhs.body.contains_standalone_comments(0)
|
||||
# and we can actually remove the parens
|
||||
and can_omit_invisible_parens(rhs, mode.line_length)
|
||||
):
|
||||
@ -1181,7 +1179,7 @@ def standalone_comment_split(
|
||||
line: Line, features: Collection[Feature], mode: Mode
|
||||
) -> Iterator[Line]:
|
||||
"""Split standalone comments from the rest of the line."""
|
||||
if not line.contains_standalone_comments(0):
|
||||
if not line.contains_standalone_comments():
|
||||
raise CannotSplit("Line does not have any standalone comments")
|
||||
|
||||
current_line = Line(
|
||||
|
@ -1,6 +1,5 @@
|
||||
import itertools
|
||||
import math
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from typing import (
|
||||
Callable,
|
||||
@ -103,7 +102,10 @@ def append_safe(self, leaf: Leaf, preformatted: bool = False) -> None:
|
||||
Raises ValueError when any `leaf` is appended after a standalone comment
|
||||
or when a standalone comment is not the first leaf on the line.
|
||||
"""
|
||||
if self.bracket_tracker.depth == 0:
|
||||
if (
|
||||
self.bracket_tracker.depth == 0
|
||||
or self.bracket_tracker.any_open_for_or_lambda()
|
||||
):
|
||||
if self.is_comment:
|
||||
raise ValueError("cannot append to standalone comments")
|
||||
|
||||
@ -233,10 +235,10 @@ def is_fmt_pass_converted(
|
||||
leaf.fmt_pass_converted_first_leaf
|
||||
)
|
||||
|
||||
def contains_standalone_comments(self, depth_limit: int = sys.maxsize) -> bool:
|
||||
def contains_standalone_comments(self) -> bool:
|
||||
"""If so, needs to be split before emitting."""
|
||||
for leaf in self.leaves:
|
||||
if leaf.type == STANDALONE_COMMENT and leaf.bracket_depth <= depth_limit:
|
||||
if leaf.type == STANDALONE_COMMENT:
|
||||
return True
|
||||
|
||||
return False
|
||||
@ -982,6 +984,23 @@ def can_omit_invisible_parens(
|
||||
are too long.
|
||||
"""
|
||||
line = rhs.body
|
||||
|
||||
# We need optional parens in order to split standalone comments to their own lines
|
||||
# if there are no nested parens around the standalone comments
|
||||
closing_bracket: Optional[Leaf] = None
|
||||
for leaf in reversed(line.leaves):
|
||||
if closing_bracket and leaf is closing_bracket.opening_bracket:
|
||||
closing_bracket = None
|
||||
if leaf.type == STANDALONE_COMMENT and not closing_bracket:
|
||||
return False
|
||||
if (
|
||||
not closing_bracket
|
||||
and leaf.type in CLOSING_BRACKETS
|
||||
and leaf.opening_bracket in line.leaves
|
||||
and leaf.value
|
||||
):
|
||||
closing_bracket = leaf
|
||||
|
||||
bt = line.bracket_tracker
|
||||
if not bt.delimiters:
|
||||
# Without delimiters the optional parentheses are useless.
|
||||
|
111
tests/data/cases/comments_in_blocks.py
Normal file
111
tests/data/cases/comments_in_blocks.py
Normal file
@ -0,0 +1,111 @@
|
||||
# Test cases from:
|
||||
# - https://github.com/psf/black/issues/1798
|
||||
# - https://github.com/psf/black/issues/1499
|
||||
# - https://github.com/psf/black/issues/1211
|
||||
# - https://github.com/psf/black/issues/563
|
||||
|
||||
(
|
||||
lambda
|
||||
# a comment
|
||||
: None
|
||||
)
|
||||
|
||||
(
|
||||
lambda:
|
||||
# b comment
|
||||
None
|
||||
)
|
||||
|
||||
(
|
||||
lambda
|
||||
# a comment
|
||||
:
|
||||
# b comment
|
||||
None
|
||||
)
|
||||
|
||||
[
|
||||
x
|
||||
# Let's do this
|
||||
for
|
||||
# OK?
|
||||
x
|
||||
# Some comment
|
||||
# And another
|
||||
in
|
||||
# One more
|
||||
y
|
||||
]
|
||||
|
||||
return [
|
||||
(offers[offer_index], 1.0)
|
||||
for offer_index, _
|
||||
# avoid returning any offers that don't match the grammar so
|
||||
# that the return values here are consistent with what would be
|
||||
# returned in AcceptValidHeader
|
||||
in self._parse_and_normalize_offers(offers)
|
||||
]
|
||||
|
||||
from foo import (
|
||||
bar,
|
||||
# qux
|
||||
)
|
||||
|
||||
|
||||
def convert(collection):
|
||||
# replace all variables by integers
|
||||
replacement_dict = {
|
||||
variable: f"{index}"
|
||||
for index, variable
|
||||
# 0 is reserved as line terminator
|
||||
in enumerate(collection.variables(), start=1)
|
||||
}
|
||||
|
||||
|
||||
{
|
||||
i: i
|
||||
for i
|
||||
# a comment
|
||||
in range(5)
|
||||
}
|
||||
|
||||
|
||||
def get_subtree_proof_nodes(
|
||||
chunk_index_groups: Sequence[Tuple[int, ...], ...],
|
||||
) -> Tuple[int, ...]:
|
||||
subtree_node_paths = (
|
||||
# We take a candidate element from each group and shift it to
|
||||
# remove the bits that are not common to other group members, then
|
||||
# we convert it to a tree path that all elements from this group
|
||||
# have in common.
|
||||
chunk_index
|
||||
for chunk_index, bits_to_truncate
|
||||
# Each group will contain an even "power-of-two" number of# elements.
|
||||
# This tells us how many tailing bits each element has# which need to
|
||||
# be truncated to get the group's common prefix.
|
||||
in ((group[0], (len(group) - 1).bit_length()) for group in chunk_index_groups)
|
||||
)
|
||||
return subtree_node_paths
|
||||
|
||||
|
||||
if (
|
||||
# comment1
|
||||
a
|
||||
# comment2
|
||||
or (
|
||||
# comment3
|
||||
(
|
||||
# comment4
|
||||
b
|
||||
)
|
||||
# comment5
|
||||
and
|
||||
# comment6
|
||||
c
|
||||
or (
|
||||
# comment7
|
||||
d
|
||||
)
|
||||
)
|
||||
):
|
||||
print("Foo")
|
@ -152,6 +152,16 @@ def foo_square_brackets(request):
|
||||
|
||||
foo(**{x: y for x, y in enumerate(["long long long long line","long long long long line"])})
|
||||
|
||||
for foo in ["a", "b"]:
|
||||
output.extend([
|
||||
individual
|
||||
for
|
||||
# Foobar
|
||||
container in xs_by_y[foo]
|
||||
# Foobar
|
||||
for individual in container["nested"]
|
||||
])
|
||||
|
||||
# output
|
||||
def foo_brackets(request):
|
||||
return JsonResponse({
|
||||
@ -323,3 +333,13 @@ def foo_square_brackets(request):
|
||||
foo(**{
|
||||
x: y for x, y in enumerate(["long long long long line", "long long long long line"])
|
||||
})
|
||||
|
||||
for foo in ["a", "b"]:
|
||||
output.extend([
|
||||
individual
|
||||
for
|
||||
# Foobar
|
||||
container in xs_by_y[foo]
|
||||
# Foobar
|
||||
for individual in container["nested"]
|
||||
])
|
||||
|
Loading…
Reference in New Issue
Block a user