Speed up blib2to3
tokenization using startswith with a tuple (#4541)
This commit is contained in:
parent
9431e98522
commit
fdabd424e2
@ -43,6 +43,7 @@
|
||||
### Performance
|
||||
|
||||
<!-- Changes that improve Black's performance. -->
|
||||
- Speed up the `is_fstring_start` function in Black's tokenizer (#4541)
|
||||
|
||||
### Output
|
||||
|
||||
|
@ -221,7 +221,7 @@ def _combinations(*l: str) -> set[str]:
|
||||
| {f"{prefix}'" for prefix in _strprefixes | _fstring_prefixes}
|
||||
| {f'{prefix}"' for prefix in _strprefixes | _fstring_prefixes}
|
||||
)
|
||||
fstring_prefix: Final = (
|
||||
fstring_prefix: Final = tuple(
|
||||
{f"{prefix}'" for prefix in _fstring_prefixes}
|
||||
| {f'{prefix}"' for prefix in _fstring_prefixes}
|
||||
| {f"{prefix}'''" for prefix in _fstring_prefixes}
|
||||
@ -459,7 +459,7 @@ def untokenize(iterable: Iterable[TokenInfo]) -> str:
|
||||
|
||||
|
||||
def is_fstring_start(token: str) -> bool:
|
||||
return builtins.any(token.startswith(prefix) for prefix in fstring_prefix)
|
||||
return token.startswith(fstring_prefix)
|
||||
|
||||
|
||||
def _split_fstring_start_and_middle(token: str) -> tuple[str, str]:
|
||||
|
Loading…
Reference in New Issue
Block a user