Compare commits
451 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7987951e24 | ||
![]() |
e5e5dad792 | ||
![]() |
24e4cb20ab | ||
![]() |
e7bf7b4619 | ||
![]() |
71e380aedf | ||
![]() |
2630801f95 | ||
![]() |
b0f36f5b42 | ||
![]() |
314f8cf92b | ||
![]() |
d0ff3bd6cb | ||
![]() |
a41dc89f1f | ||
![]() |
950ec38c11 | ||
![]() |
2c135edf37 | ||
![]() |
6144c46c6a | ||
![]() |
dd278cb316 | ||
![]() |
dbb14eac93 | ||
![]() |
5342d2eeda | ||
![]() |
9f38928414 | ||
![]() |
3e9dd25dad | ||
![]() |
bb802cf19a | ||
![]() |
5ae38dd370 | ||
![]() |
45cbe572ee | ||
![]() |
fccd70cff1 | ||
![]() |
00c0d6d91a | ||
![]() |
0580ecbef3 | ||
![]() |
ed64d89faa | ||
![]() |
452d3b68f4 | ||
![]() |
256f3420b1 | ||
![]() |
00cb6d15c5 | ||
![]() |
14e1de805a | ||
![]() |
5f23701708 | ||
![]() |
9c129567e7 | ||
![]() |
c02ca47daa | ||
![]() |
edaf085a18 | ||
![]() |
b844c8a136 | ||
![]() |
d82da0f0e9 | ||
![]() |
8a737e727a | ||
![]() |
d330deea00 | ||
![]() |
3d8129001f | ||
![]() |
459562c71a | ||
![]() |
99dbf3006b | ||
![]() |
c0b92f3888 | ||
![]() |
e58baf15b9 | ||
![]() |
1455ae4731 | ||
![]() |
584d0331c8 | ||
![]() |
6e9654065c | ||
![]() |
8dc912774e | ||
![]() |
40b73f2fb5 | ||
![]() |
e157ba4de5 | ||
![]() |
fdabd424e2 | ||
![]() |
9431e98522 | ||
![]() |
3b00112ac5 | ||
![]() |
0aabac4fe0 | ||
![]() |
ed33205579 | ||
![]() |
6000d37f09 | ||
![]() |
30759ca782 | ||
![]() |
84ac1a947d | ||
![]() |
0db1173bbc | ||
![]() |
3fab5ade71 | ||
![]() |
e54f86bae4 | ||
![]() |
96ca1b6be3 | ||
![]() |
17efac45f9 | ||
![]() |
73f651f02f | ||
![]() |
f6c7c98f34 | ||
![]() |
d670b0439c | ||
![]() |
56896264e4 | ||
![]() |
efd9778873 | ||
![]() |
c472557ba8 | ||
![]() |
53a219056d | ||
![]() |
c98fc0c128 | ||
![]() |
f54f34799b | ||
![]() |
484a669699 | ||
![]() |
fff747d61b | ||
![]() |
9995bffbe4 | ||
![]() |
7452902c77 | ||
![]() |
32ebb93003 | ||
![]() |
1b2427a2b7 | ||
![]() |
a22b1ebbfd | ||
![]() |
b7d0e7212b | ||
![]() |
f1a2f92bba | ||
![]() |
8d9d18c033 | ||
![]() |
bbfdba3a5e | ||
![]() |
8fb2add1f7 | ||
![]() |
2a45cecf29 | ||
![]() |
b4d6d8632d | ||
![]() |
ac018c16ca | ||
![]() |
058da5f81a | ||
![]() |
98a580bbdc | ||
![]() |
f50aba4984 | ||
![]() |
6b27ef53e2 | ||
![]() |
26aeebe9fb | ||
![]() |
9e13708be8 | ||
![]() |
ac28187bf4 | ||
![]() |
823a7b0ff0 | ||
![]() |
699b45aef7 | ||
![]() |
c20423249e | ||
![]() |
5ec91686ff | ||
![]() |
7e3e8f5bd9 | ||
![]() |
b965c2a502 | ||
![]() |
9ccf279a17 | ||
![]() |
14b6e61970 | ||
![]() |
b1c4dd96d7 | ||
![]() |
4b4ae43e8b | ||
![]() |
7fa1faf83a | ||
![]() |
8827accf56 | ||
![]() |
b0da11d370 | ||
![]() |
721dff5493 | ||
![]() |
7e2afc9bfd | ||
![]() |
1ad5263f2f | ||
![]() |
9ff047a957 | ||
![]() |
5e571ccbbe | ||
![]() |
978bc505ac | ||
![]() |
b1f7b9f87d | ||
![]() |
b677a643c5 | ||
![]() |
8447af4d8d | ||
![]() |
9c1fd463e1 | ||
![]() |
b9c63230b4 | ||
![]() |
4af12c499e | ||
![]() |
c827551b23 | ||
![]() |
3be19b306f | ||
![]() |
f71925885c | ||
![]() |
ccfb0db4d5 | ||
![]() |
c801cd60b1 | ||
![]() |
f22b2437d5 | ||
![]() |
75eb55764e | ||
![]() |
f2da85fe7f | ||
![]() |
0ab0b75717 | ||
![]() |
0c033f3eb7 | ||
![]() |
455de7703e | ||
![]() |
dbb956b0d3 | ||
![]() |
3702ba224e | ||
![]() |
e4aaa8a994 | ||
![]() |
ba88fc372e | ||
![]() |
5683242fd4 | ||
![]() |
e7fb048281 | ||
![]() |
3f0f8f1956 | ||
![]() |
2f88085da5 | ||
![]() |
12ce3db077 | ||
![]() |
1354be2525 | ||
![]() |
f4b644b82f | ||
![]() |
551ede2825 | ||
![]() |
944b99aa91 | ||
![]() |
7134754ef4 | ||
![]() |
7669381420 | ||
![]() |
735733b205 | ||
![]() |
9dc9114aef | ||
![]() |
66e2b11571 | ||
![]() |
8fe627072f | ||
![]() |
6b25e7cdab | ||
![]() |
07fe1ca88a | ||
![]() |
3383f531bc | ||
![]() |
c8f1a5542c | ||
![]() |
836acad863 | ||
![]() |
13bd0925eb | ||
![]() |
c9d2635b55 | ||
![]() |
bf1195612c | ||
![]() |
97993f997f | ||
![]() |
f78b15712a | ||
![]() |
8332a75e82 | ||
![]() |
552baf8229 | ||
![]() |
f000936726 | ||
![]() |
7b5a657285 | ||
![]() |
1abcffc818 | ||
![]() |
719e67462c | ||
![]() |
e5510afc06 | ||
![]() |
6af7d11096 | ||
![]() |
f03ee113c9 | ||
![]() |
e4bfedbec2 | ||
![]() |
d0287e1f75 | ||
![]() |
0f18001abf | ||
![]() |
899002399a | ||
![]() |
ea66d40dd7 | ||
![]() |
d1d4fc58d3 | ||
![]() |
6fdf8a4af2 | ||
![]() |
8af439407c | ||
![]() |
35e9776919 | ||
![]() |
23dfc5b2c3 | ||
![]() |
a20100395c | ||
![]() |
dab37a6a11 | ||
![]() |
32230e6f5c | ||
![]() |
7edb50f5a0 | ||
![]() |
3e80de3447 | ||
![]() |
a08b480a2f | ||
![]() |
632f44bd68 | ||
![]() |
9728b8e9b8 | ||
![]() |
2623269dab | ||
![]() |
24c9c01e91 | ||
![]() |
cca3c0fd9f | ||
![]() |
d6e11ca399 | ||
![]() |
8912a33321 | ||
![]() |
2bc5ce8ae1 | ||
![]() |
177e306363 | ||
![]() |
d919746fae | ||
![]() |
0b4364b7e3 | ||
![]() |
e026c93888 | ||
![]() |
79fc1158a9 | ||
![]() |
8bf04549ff | ||
![]() |
1607e9ab20 | ||
![]() |
ed770ba4dd | ||
![]() |
659c29a41c | ||
![]() |
0e6e46b9eb | ||
![]() |
4f47cac192 | ||
![]() |
bccec8adfb | ||
![]() |
7d789469ed | ||
![]() |
17f7f297ef | ||
![]() |
f7d552d9b7 | ||
![]() |
a5196e6f1f | ||
![]() |
59b9d858a3 | ||
![]() |
8fe602b1fa | ||
![]() |
6f3fb78444 | ||
![]() |
995e4ada14 | ||
![]() |
7f60f3dbd7 | ||
![]() |
9a331d606f | ||
![]() |
b7c3a9fedd | ||
![]() |
e11eaf2f44 | ||
![]() |
b1d17ef9a2 | ||
![]() |
4ceed0b958 | ||
![]() |
269190274b | ||
![]() |
8e0a9dee1b | ||
![]() |
b9ad4da2e8 | ||
![]() |
fe3376141c | ||
![]() |
c35924663c | ||
![]() |
db9c592967 | ||
![]() |
bf6cabc804 | ||
![]() |
c80685f361 | ||
![]() |
51786141cc | ||
![]() |
1b831f214a | ||
![]() |
ec91a2be3c | ||
![]() |
8fec1c3085 | ||
![]() |
35ce37ded7 | ||
![]() |
d9ad09a32b | ||
![]() |
ebd543c0ac | ||
![]() |
eb7661f8ab | ||
![]() |
0c9899956d | ||
![]() |
9aea9768cb | ||
![]() |
67b23d7185 | ||
![]() |
ce28be2705 | ||
![]() |
61b529b7d1 | ||
![]() |
e7e122e9ff | ||
![]() |
432d9050c3 | ||
![]() |
50e287cece | ||
![]() |
e4ae213f06 | ||
![]() |
50d5756e8e | ||
![]() |
3416b2c82d | ||
![]() |
66ec056e39 | ||
![]() |
a0e270d0f2 | ||
![]() |
69d49c5a6f | ||
![]() |
fb5e5d2be6 | ||
![]() |
be336bb67f | ||
![]() |
a8062983cd | ||
![]() |
89e28ea66f | ||
![]() |
ec4a1525ee | ||
![]() |
30c6bb3651 | ||
![]() |
f23b845a29 | ||
![]() |
96faa3b469 | ||
![]() |
80a166f2e1 | ||
![]() |
11da02da72 | ||
![]() |
d93a942a79 | ||
![]() |
c4cd200a06 | ||
![]() |
85b1c71a34 | ||
![]() |
5773d5cd2b | ||
![]() |
1b6b0bfcac | ||
![]() |
58f31a70ef | ||
![]() |
2a1c67e0b2 | ||
![]() |
72e7a2e43e | ||
![]() |
1a7d9c2f58 | ||
![]() |
f4c7be5445 | ||
![]() |
2e4fac9d87 | ||
![]() |
66008fda5d | ||
![]() |
50ed6221d9 | ||
![]() |
46be1f8e54 | ||
![]() |
ecbd9e8cf7 | ||
![]() |
e808e61db8 | ||
![]() |
9e3daa1107 | ||
![]() |
448324637d | ||
![]() |
c54c213d6a | ||
![]() |
e2f2bd076f | ||
![]() |
5758da6e3c | ||
![]() |
e50110353a | ||
![]() |
ddfecf06c1 | ||
![]() |
f7cbe4ae1b | ||
![]() |
5515a5ac7a | ||
![]() |
5edd99312a | ||
![]() |
9a90fa2cb7 | ||
![]() |
7686989fc8 | ||
![]() |
53c4278a4c | ||
![]() |
c712d57ca9 | ||
![]() |
caef19689b | ||
![]() |
c369e446f9 | ||
![]() |
7bfa35cca8 | ||
![]() |
de701fe6aa | ||
![]() |
f7174bfc43 | ||
![]() |
878937bcc3 | ||
![]() |
1d4c31aa58 | ||
![]() |
ef1048d5f8 | ||
![]() |
744d23b348 | ||
![]() |
8de4be5168 | ||
![]() |
c0adca321d | ||
![]() |
a7643fac8d | ||
![]() |
d291c2338c | ||
![]() |
7f1c578b89 | ||
![]() |
2db5ab0a7b | ||
![]() |
0a37888e79 | ||
![]() |
882d8795c6 | ||
![]() |
9edba85f71 | ||
![]() |
bb588073ab | ||
![]() |
722735d20e | ||
![]() |
abe57e3d92 | ||
![]() |
1648ac5180 | ||
![]() |
6f84f65285 | ||
![]() |
3bb92146f5 | ||
![]() |
935f303a0a | ||
![]() |
b7717c3f1e | ||
![]() |
7aa37ea0ad | ||
![]() |
5d5bf6e087 | ||
![]() |
a69bda3b9b | ||
![]() |
715f60c11b | ||
![]() |
a8f68f57fe | ||
![]() |
738c2789cc | ||
![]() |
3457ec48af | ||
![]() |
3a2d76c7bc | ||
![]() |
27c05e1e24 | ||
![]() |
6c88e8e46e | ||
![]() |
36078bc83f | ||
![]() |
947bd3825e | ||
![]() |
9e9fdce9a8 | ||
![]() |
1b08cbc634 | ||
![]() |
f99ef6e190 | ||
![]() |
a91eb73064 | ||
![]() |
e7c3368c13 | ||
![]() |
9b82120ddb | ||
![]() |
3dcacdda0d | ||
![]() |
5f6ea5ff20 | ||
![]() |
8c5d96ffd3 | ||
![]() |
e974fc3c52 | ||
![]() |
7316a79318 | ||
![]() |
f5990e8547 | ||
![]() |
edf66baa21 | ||
![]() |
ab92daf408 | ||
![]() |
34ed4cf8fd | ||
![]() |
5a0615a7ed | ||
![]() |
e9356c1ff0 | ||
![]() |
004fb79706 | ||
![]() |
14f60c84c8 | ||
![]() |
b2f03f9132 | ||
![]() |
e73662ca7c | ||
![]() |
213cb65518 | ||
![]() |
e87737140f | ||
![]() |
62dca32dc5 | ||
![]() |
751583a1df | ||
![]() |
f7917453c9 | ||
![]() |
0b62b9c9a4 | ||
![]() |
c83ad6c077 | ||
![]() |
4eebfd1a7a | ||
![]() |
add161b367 | ||
![]() |
4e93f2aa01 | ||
![]() |
716fa08090 | ||
![]() |
b70b2c6196 | ||
![]() |
b40b01ffe3 | ||
![]() |
a20338cf10 | ||
![]() |
74d3009ba4 | ||
![]() |
8daa64a2e1 | ||
![]() |
df50fee7fd | ||
![]() |
58f1bf69d2 | ||
![]() |
47676bf593 | ||
![]() |
d9c249c25a | ||
![]() |
6310a405f6 | ||
![]() |
066aa9210a | ||
![]() |
c6a031e623 | ||
![]() |
793c2b5f9f | ||
![]() |
ade371fd1c | ||
![]() |
7c4fe83bd8 | ||
![]() |
66648c528a | ||
![]() |
c36e468794 | ||
![]() |
77f19944f6 | ||
![]() |
c160e4b7ce | ||
![]() |
59e8936768 | ||
![]() |
1b028cc9d9 | ||
![]() |
8a16b25fb1 | ||
![]() |
1a972e3e11 | ||
![]() |
133af57207 | ||
![]() |
d9d0a02d89 | ||
![]() |
c3235e6da7 | ||
![]() |
13bd4fffae | ||
![]() |
e7e8d6287b | ||
![]() |
0b301f8095 | ||
![]() |
2f68ac850b | ||
![]() |
068f6fb8fa | ||
![]() |
c1e30d97fe | ||
![]() |
8d80aecd50 | ||
![]() |
92e0f5b965 | ||
![]() |
0e26ada66d | ||
![]() |
6123b4ac26 | ||
![]() |
37895f8e50 | ||
![]() |
8d2110320b | ||
![]() |
f4490acfd7 | ||
![]() |
027afda403 | ||
![]() |
a062d5c985 | ||
![]() |
193ee766ca | ||
![]() |
38723bb778 | ||
![]() |
138769aa27 | ||
![]() |
ad3724b7ff | ||
![]() |
b8e2ec728c | ||
![]() |
257d392217 | ||
![]() |
2593af2c5d | ||
![]() |
f3b50e4669 | ||
![]() |
0b4d7d55f7 | ||
![]() |
114e8357e6 | ||
![]() |
4130c65578 | ||
![]() |
b4dca26c7d | ||
![]() |
cf4cc29819 | ||
![]() |
8e618f3869 | ||
![]() |
839ef35dc1 | ||
![]() |
f01aaa63a0 | ||
![]() |
63481bb926 | ||
![]() |
31b3b6701d | ||
![]() |
e1036119f2 | ||
![]() |
93989e995d | ||
![]() |
7be2735318 | ||
![]() |
c732a1f13a | ||
![]() |
453828d17d | ||
![]() |
d1248ca9be | ||
![]() |
e7783e9ab2 | ||
![]() |
01b8d3d409 | ||
![]() |
35722dff62 | ||
![]() |
688f78d380 | ||
![]() |
c76e0b03ec | ||
![]() |
898915d556 | ||
![]() |
3aad6e385b | ||
![]() |
a538ab7663 | ||
![]() |
a4032dce64 | ||
![]() |
c42178690e | ||
![]() |
3decbd6db9 | ||
![]() |
f95b43d6fa | ||
![]() |
c99417ffe8 | ||
![]() |
cd02c2809b | ||
![]() |
eedfc38322 | ||
![]() |
2fd9d8b339 | ||
![]() |
db3668a381 | ||
![]() |
dc188f2060 | ||
![]() |
c97b9c55b4 | ||
![]() |
64887aab03 | ||
![]() |
eb32729ab5 | ||
![]() |
a07871b9cd | ||
![]() |
e712e48e06 | ||
![]() |
de65741b8d | ||
![]() |
4b76a54815 | ||
![]() |
3da7339955 | ||
![]() |
02f81c6995 | ||
![]() |
f265ff5bcd | ||
![]() |
a552f7096a | ||
![]() |
96ee2fef3d |
@ -1,9 +0,0 @@
|
||||
[report]
|
||||
omit =
|
||||
src/blib2to3/*
|
||||
tests/data/*
|
||||
*/site-packages/*
|
||||
.tox/*
|
||||
|
||||
[run]
|
||||
relative_files = True
|
4
.flake8
4
.flake8
@ -1,8 +1,8 @@
|
||||
[flake8]
|
||||
# B905 should be enabled when we drop support for 3.9
|
||||
ignore = E203, E266, E501, W503, B905, B907
|
||||
ignore = E203, E266, E501, E701, E704, W503, B905, B907
|
||||
# line length is intentionally set to 80 here because black uses Bugbear
|
||||
# See https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length for more details
|
||||
# See https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#bugbear for more details
|
||||
max-line-length = 80
|
||||
max-complexity = 18
|
||||
select = B,C,E,F,W,T4,B9
|
||||
|
@ -1,4 +1,3 @@
|
||||
node: $Format:%H$
|
||||
node-date: $Format:%cI$
|
||||
describe-name: $Format:%(describe:tags=true,match=*[0-9]*)$
|
||||
ref-names: $Format:%D$
|
||||
describe-name: $Format:%(describe:tags=true,match=[0-9]*)$
|
||||
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1 +1,2 @@
|
||||
.git_archival.txt export-subst
|
||||
*.py diff=python
|
||||
|
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -12,7 +12,9 @@ current development version. To confirm this, you have three options:
|
||||
|
||||
1. Update Black's version if a newer release exists: `pip install -U black`
|
||||
2. Use the online formatter at <https://black.vercel.app/?version=main>, which will use
|
||||
the latest main branch.
|
||||
the latest main branch. Note that the online formatter currently runs on
|
||||
an older version of Python and may not support newer syntax, such as the
|
||||
extended f-string syntax added in Python 3.12.
|
||||
3. Or run _Black_ on your machine:
|
||||
- create a new virtualenv (make sure it's the same Python version);
|
||||
- clone this repository;
|
||||
|
1
.github/dependabot.yml
vendored
1
.github/dependabot.yml
vendored
@ -14,4 +14,3 @@ updates:
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: ["skip news", "C: dependencies", "T: documentation"]
|
||||
reviewers: ["ichard26"]
|
||||
|
2
.github/workflows/changelog.yml
vendored
2
.github/workflows/changelog.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Grep CHANGES.md for PR number
|
||||
if: contains(github.event.pull_request.labels.*.name, 'skip news') != true
|
||||
|
40
.github/workflows/diff_shades.yml
vendored
40
.github/workflows/diff_shades.yml
vendored
@ -19,14 +19,14 @@ jobs:
|
||||
matrix: ${{ steps.set-config.outputs.matrix }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install diff-shades and support dependencies
|
||||
run: |
|
||||
python -m pip install click packaging urllib3
|
||||
python -m pip install 'click>=8.1.7' packaging urllib3
|
||||
python -m pip install https://github.com/ichard26/diff-shades/archive/stable.zip
|
||||
|
||||
- name: Calculate run configuration & metadata
|
||||
@ -34,7 +34,8 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
run: >
|
||||
python scripts/diff_shades_gha_helper.py config ${{ github.event_name }} ${{ matrix.mode }}
|
||||
python scripts/diff_shades_gha_helper.py config ${{ github.event_name }}
|
||||
${{ matrix.mode }}
|
||||
|
||||
analysis:
|
||||
name: analysis / ${{ matrix.mode }}
|
||||
@ -44,27 +45,27 @@ jobs:
|
||||
HATCH_BUILD_HOOKS_ENABLE: "1"
|
||||
# Clang is less picky with the C code it's given than gcc (and may
|
||||
# generate faster binaries too).
|
||||
CC: clang-12
|
||||
CC: clang-18
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.configure.outputs.matrix )}}
|
||||
include: ${{ fromJson(needs.configure.outputs.matrix) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout this repository (full clone)
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# The baseline revision could be rather old so a full clone is ideal.
|
||||
fetch-depth: 0
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Install diff-shades and support dependencies
|
||||
run: |
|
||||
python -m pip install https://github.com/ichard26/diff-shades/archive/stable.zip
|
||||
python -m pip install click packaging urllib3
|
||||
python -m pip install 'click>=8.1.7' packaging urllib3
|
||||
# After checking out old revisions, this might not exist so we'll use a copy.
|
||||
cat scripts/diff_shades_gha_helper.py > helper.py
|
||||
git config user.name "diff-shades-gha"
|
||||
@ -72,7 +73,7 @@ jobs:
|
||||
|
||||
- name: Attempt to use cached baseline analysis
|
||||
id: baseline-cache
|
||||
uses: actions/cache@v3
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ matrix.baseline-analysis }}
|
||||
key: ${{ matrix.baseline-cache-key }}
|
||||
@ -110,19 +111,19 @@ jobs:
|
||||
${{ matrix.baseline-analysis }} ${{ matrix.target-analysis }}
|
||||
|
||||
- name: Upload diff report
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.mode }}-diff.html
|
||||
path: diff.html
|
||||
|
||||
- name: Upload baseline analysis
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.baseline-analysis }}
|
||||
path: ${{ matrix.baseline-analysis }}
|
||||
|
||||
- name: Upload target analysis
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target-analysis }}
|
||||
path: ${{ matrix.target-analysis }}
|
||||
@ -130,14 +131,13 @@ jobs:
|
||||
- name: Generate summary file (PR only)
|
||||
if: github.event_name == 'pull_request' && matrix.mode == 'preview-changes'
|
||||
run: >
|
||||
python helper.py comment-body
|
||||
${{ matrix.baseline-analysis }} ${{ matrix.target-analysis }}
|
||||
${{ matrix.baseline-sha }} ${{ matrix.target-sha }}
|
||||
${{ github.event.pull_request.number }}
|
||||
python helper.py comment-body ${{ matrix.baseline-analysis }}
|
||||
${{ matrix.target-analysis }} ${{ matrix.baseline-sha }}
|
||||
${{ matrix.target-sha }} ${{ github.event.pull_request.number }}
|
||||
|
||||
- name: Upload summary file (PR only)
|
||||
if: github.event_name == 'pull_request' && matrix.mode == 'preview-changes'
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: .pr-comment.json
|
||||
path: .pr-comment.json
|
||||
|
8
.github/workflows/diff_shades_comment.yml
vendored
8
.github/workflows/diff_shades_comment.yml
vendored
@ -12,8 +12,8 @@ jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
|
||||
@ -33,7 +33,7 @@ jobs:
|
||||
- name: Try to find pre-existing PR comment
|
||||
if: steps.metadata.outputs.needs-comment == 'true'
|
||||
id: find-comment
|
||||
uses: peter-evans/find-comment@034abe94d3191f9c89d870519735beae326f2bdb
|
||||
uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e
|
||||
with:
|
||||
issue-number: ${{ steps.metadata.outputs.pr-number }}
|
||||
comment-author: "github-actions[bot]"
|
||||
@ -41,7 +41,7 @@ jobs:
|
||||
|
||||
- name: Create or update PR comment
|
||||
if: steps.metadata.outputs.needs-comment == 'true'
|
||||
uses: peter-evans/create-or-update-comment@67dcc547d311b736a8e6c5c236542148a47adc3d
|
||||
uses: peter-evans/create-or-update-comment@71345be0265236311c031f5c7866368bd1eff043
|
||||
with:
|
||||
comment-id: ${{ steps.find-comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.metadata.outputs.pr-number }}
|
||||
|
14
.github/workflows/doc.yml
vendored
14
.github/workflows/doc.yml
vendored
@ -21,18 +21,20 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up latest Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
python-version: "3.13"
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel
|
||||
python -m pip install -e ".[d]"
|
||||
python -m pip install -r "docs/requirements.txt"
|
||||
python -m pip install uv
|
||||
python -m uv venv
|
||||
python -m uv pip install -e ".[d]"
|
||||
python -m uv pip install -r "docs/requirements.txt"
|
||||
|
||||
- name: Build documentation
|
||||
run: sphinx-build -a -b html -W --keep-going docs/ docs/_build
|
||||
|
14
.github/workflows/docker.yml
vendored
14
.github/workflows/docker.yml
vendored
@ -16,16 +16,16 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
@ -36,7 +36,7 @@ jobs:
|
||||
latest_non_release)" >> $GITHUB_ENV
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@ -47,7 +47,7 @@ jobs:
|
||||
if:
|
||||
${{ github.event_name == 'release' && github.event.action == 'published' &&
|
||||
!github.event.release.prerelease }}
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@ -58,7 +58,7 @@ jobs:
|
||||
if:
|
||||
${{ github.event_name == 'release' && github.event.action == 'published' &&
|
||||
github.event.release.prerelease }}
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
platforms: linux/amd64,linux/arm64
|
||||
|
7
.github/workflows/fuzz.yml
vendored
7
.github/workflows/fuzz.yml
vendored
@ -22,15 +22,16 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12.4", "3.13"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
|
18
.github/workflows/lint.yml
vendored
18
.github/workflows/lint.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: Lint
|
||||
name: Lint + format ourselves
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
@ -14,7 +14,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Assert PR target is main
|
||||
if: github.event_name == 'pull_request' && github.repository == 'psf/black'
|
||||
@ -24,19 +24,25 @@ jobs:
|
||||
fi
|
||||
|
||||
- name: Set up latest Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
python-version: "3.13"
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install -e '.[d]'
|
||||
python -m pip install -e '.'
|
||||
python -m pip install tox
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
uses: pre-commit/action@v3.0.0
|
||||
uses: pre-commit/action@v3.0.1
|
||||
|
||||
- name: Format ourselves
|
||||
run: |
|
||||
tox -e run_self
|
||||
|
||||
- name: Regenerate schema
|
||||
run: |
|
||||
tox -e generate_schema
|
||||
git diff --exit-code
|
||||
|
99
.github/workflows/pypi_upload.yml
vendored
99
.github/workflows/pypi_upload.yml
vendored
@ -1,8 +1,12 @@
|
||||
name: Publish to PyPI
|
||||
name: Build and publish
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@ -11,14 +15,16 @@ jobs:
|
||||
main:
|
||||
name: sdist + pure wheel
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'release'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up latest Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
python-version: "3.13"
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Install latest pip, build, twine
|
||||
run: |
|
||||
@ -28,47 +34,76 @@ jobs:
|
||||
- name: Build wheel and source distributions
|
||||
run: python -m build
|
||||
|
||||
- name: Upload to PyPI via Twine
|
||||
- if: github.event_name == 'release'
|
||||
name: Upload to PyPI via Twine
|
||||
env:
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
run: twine upload --verbose -u '__token__' dist/*
|
||||
|
||||
generate_wheels_matrix:
|
||||
name: generate wheels matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
include: ${{ steps.set-matrix.outputs.include }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# Keep cibuildwheel version in sync with below
|
||||
- name: Install cibuildwheel and pypyp
|
||||
run: |
|
||||
pipx install cibuildwheel==2.22.0
|
||||
pipx install pypyp==1.3.0
|
||||
- name: generate matrix
|
||||
if: github.event_name != 'pull_request'
|
||||
run: |
|
||||
{
|
||||
cibuildwheel --print-build-identifiers --platform linux \
|
||||
| pyp 'json.dumps({"only": x, "os": "ubuntu-latest"})' \
|
||||
&& cibuildwheel --print-build-identifiers --platform macos \
|
||||
| pyp 'json.dumps({"only": x, "os": "macos-latest"})' \
|
||||
&& cibuildwheel --print-build-identifiers --platform windows \
|
||||
| pyp 'json.dumps({"only": x, "os": "windows-latest"})'
|
||||
} | pyp 'json.dumps(list(map(json.loads, lines)))' > /tmp/matrix
|
||||
env:
|
||||
CIBW_ARCHS_LINUX: x86_64
|
||||
CIBW_ARCHS_MACOS: x86_64 arm64
|
||||
CIBW_ARCHS_WINDOWS: AMD64
|
||||
- name: generate matrix (PR)
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
{
|
||||
cibuildwheel --print-build-identifiers --platform linux \
|
||||
| pyp 'json.dumps({"only": x, "os": "ubuntu-latest"})'
|
||||
} | pyp 'json.dumps(list(map(json.loads, lines)))' > /tmp/matrix
|
||||
env:
|
||||
CIBW_BUILD: "cp39-* cp313-*"
|
||||
CIBW_ARCHS_LINUX: x86_64
|
||||
- id: set-matrix
|
||||
run: echo "include=$(cat /tmp/matrix)" | tee -a $GITHUB_OUTPUT
|
||||
|
||||
mypyc:
|
||||
name: mypyc wheels (${{ matrix.name }})
|
||||
name: mypyc wheels ${{ matrix.only }}
|
||||
needs: generate_wheels_matrix
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
name: linux-x86_64
|
||||
- os: windows-2019
|
||||
name: windows-amd64
|
||||
- os: macos-11
|
||||
name: macos-x86_64
|
||||
macos_arch: "x86_64"
|
||||
- os: macos-11
|
||||
name: macos-arm64
|
||||
macos_arch: "arm64"
|
||||
- os: macos-11
|
||||
name: macos-universal2
|
||||
macos_arch: "universal2"
|
||||
include: ${{ fromJson(needs.generate_wheels_matrix.outputs.include) }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Build wheels via cibuildwheel
|
||||
uses: pypa/cibuildwheel@v2.12.1
|
||||
env:
|
||||
CIBW_ARCHS_MACOS: "${{ matrix.macos_arch }}"
|
||||
- uses: actions/checkout@v4
|
||||
# Keep cibuildwheel version in sync with above
|
||||
- uses: pypa/cibuildwheel@v2.23.3
|
||||
with:
|
||||
only: ${{ matrix.only }}
|
||||
|
||||
- name: Upload wheels as workflow artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.name }}-mypyc-wheels
|
||||
name: ${{ matrix.only }}-mypyc-wheels
|
||||
path: ./wheelhouse/*.whl
|
||||
|
||||
- name: Upload wheels to PyPI via Twine
|
||||
- if: github.event_name == 'release'
|
||||
name: Upload wheels to PyPI via Twine
|
||||
env:
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
|
||||
run: pipx run twine upload --verbose -u '__token__' wheelhouse/*.whl
|
||||
@ -77,17 +112,19 @@ jobs:
|
||||
name: Update stable branch
|
||||
needs: [main, mypyc]
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'release'
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Checkout stable branch
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: stable
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update stable branch to release tag & push
|
||||
- if: github.event_name == 'release'
|
||||
name: Update stable branch to release tag & push
|
||||
run: |
|
||||
git reset --hard ${{ github.event.release.tag_name }}
|
||||
git push
|
||||
|
56
.github/workflows/release_tests.yml
vendored
Normal file
56
.github/workflows/release_tests.yml
vendored
Normal file
@ -0,0 +1,56 @@
|
||||
name: Release tool CI
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- .github/workflows/release_tests.yml
|
||||
- release.py
|
||||
- release_tests.py
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/release_tests.yml
|
||||
- release.py
|
||||
- release_tests.py
|
||||
|
||||
jobs:
|
||||
build:
|
||||
# We want to run on external PRs, but not on our own internal PRs as they'll be run
|
||||
# by the push to the branch. Without this if check, checks are duplicated since
|
||||
# internal PRs match both the push and pull_request events.
|
||||
if:
|
||||
github.event_name == 'push' || github.event.pull_request.head.repo.full_name !=
|
||||
github.repository
|
||||
|
||||
name: Running python ${{ matrix.python-version }} on ${{matrix.os}}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.13"]
|
||||
os: [macOS-latest, ubuntu-latest, windows-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
# Give us all history, branches and tags
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Print Python Version
|
||||
run: python --version --version && which python
|
||||
|
||||
- name: Print Git Version
|
||||
run: git --version && which git
|
||||
|
||||
- name: Update pip, setuptools + wheels
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools wheel
|
||||
|
||||
- name: Run unit tests via coverage + print report
|
||||
run: |
|
||||
python -m pip install coverage
|
||||
coverage run scripts/release_tests.py
|
||||
coverage report --show-missing
|
28
.github/workflows/test.yml
vendored
28
.github/workflows/test.yml
vendored
@ -31,16 +31,17 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "pypy-3.7", "pypy-3.8"]
|
||||
python-version: ["3.9", "3.10", "3.11", "3.12.4", "3.13", "pypy-3.9"]
|
||||
os: [ubuntu-latest, macOS-latest, windows-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
allow-prereleases: true
|
||||
|
||||
- name: Install tox
|
||||
run: |
|
||||
@ -49,7 +50,8 @@ jobs:
|
||||
|
||||
- name: Unit tests
|
||||
if: "!startsWith(matrix.python-version, 'pypy')"
|
||||
run: tox -e ci-py -- -v --color=yes
|
||||
run:
|
||||
tox -e ci-py$(echo ${{ matrix.python-version }} | tr -d '.') -- -v --color=yes
|
||||
|
||||
- name: Unit tests (pypy)
|
||||
if: "startsWith(matrix.python-version, 'pypy')"
|
||||
@ -58,8 +60,10 @@ jobs:
|
||||
- name: Upload coverage to Coveralls
|
||||
# Upload coverage if we are on the main repository and
|
||||
# we're running on Linux (this action only supports Linux)
|
||||
if: github.repository == 'psf/black' && matrix.os == 'ubuntu-latest'
|
||||
uses: AndreMiras/coveralls-python-action@v20201129
|
||||
if:
|
||||
github.repository == 'psf/black' && matrix.os == 'ubuntu-latest' &&
|
||||
!startsWith(matrix.python-version, 'pypy')
|
||||
uses: AndreMiras/coveralls-python-action@ac868b9540fad490f7ca82b8ca00480fd751ed19
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
parallel: true
|
||||
@ -72,9 +76,9 @@ jobs:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Send finished signal to Coveralls
|
||||
uses: AndreMiras/coveralls-python-action@v20201129
|
||||
uses: AndreMiras/coveralls-python-action@ac868b9540fad490f7ca82b8ca00480fd751ed19
|
||||
with:
|
||||
parallel-finished: true
|
||||
debug: true
|
||||
@ -90,12 +94,12 @@ jobs:
|
||||
os: [ubuntu-latest, macOS-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up latest Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
python-version: "3.12.4"
|
||||
|
||||
- name: Install black with uvloop
|
||||
run: |
|
||||
@ -103,4 +107,4 @@ jobs:
|
||||
python -m pip install -e ".[uvloop]"
|
||||
|
||||
- name: Format ourselves
|
||||
run: python -m black --check src/
|
||||
run: python -m black --check src/ tests/
|
||||
|
10
.github/workflows/upload_binary.yml
vendored
10
.github/workflows/upload_binary.yml
vendored
@ -13,13 +13,13 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [windows-2019, ubuntu-20.04, macos-latest]
|
||||
os: [windows-2019, ubuntu-22.04, macos-latest]
|
||||
include:
|
||||
- os: windows-2019
|
||||
pathsep: ";"
|
||||
asset_name: black_windows.exe
|
||||
executable_mime: "application/vnd.microsoft.portable-executable"
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
pathsep: ":"
|
||||
asset_name: black_linux
|
||||
executable_mime: "application/x-executable"
|
||||
@ -29,12 +29,12 @@ jobs:
|
||||
executable_mime: "application/x-mach-binary"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up latest Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "*"
|
||||
python-version: "3.12.4"
|
||||
|
||||
- name: Install Black and PyInstaller
|
||||
run: |
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -4,6 +4,7 @@
|
||||
_build
|
||||
.DS_Store
|
||||
.vscode
|
||||
.python-version
|
||||
docs/_static/pypi.svg
|
||||
.tox
|
||||
__pycache__
|
||||
@ -24,3 +25,4 @@ src/_black_version.py
|
||||
.hypothesis/
|
||||
venv/
|
||||
.ipynb_checkpoints/
|
||||
node_modules/
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Note: don't use this config for your own repositories. Instead, see
|
||||
# "Version control integration" in docs/integrations/source_version_control.md
|
||||
exclude: ^(src/blib2to3/|profiling/|tests/data/)
|
||||
exclude: ^(profiling/|tests/data/)
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
@ -12,7 +12,7 @@ repos:
|
||||
additional_dependencies:
|
||||
&version_check_dependencies [
|
||||
commonmark==0.9.1,
|
||||
pyyaml==5.4.1,
|
||||
pyyaml==6.0.1,
|
||||
beautifulsoup4==4.9.3,
|
||||
]
|
||||
|
||||
@ -24,43 +24,60 @@ repos:
|
||||
additional_dependencies: *version_check_dependencies
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0
|
||||
rev: 6.0.1
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 4.0.1
|
||||
rev: 7.2.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-bugbear
|
||||
- flake8-bugbear==24.2.6
|
||||
- flake8-comprehensions
|
||||
- flake8-simplify
|
||||
exclude: ^src/blib2to3/
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.991
|
||||
rev: v1.15.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
exclude: ^docs/conf.py
|
||||
additional_dependencies:
|
||||
- types-dataclasses >= 0.1.3
|
||||
exclude: ^(docs/conf.py|scripts/generate_schema.py)$
|
||||
args: []
|
||||
additional_dependencies: &mypy_deps
|
||||
- types-PyYAML
|
||||
- types-atheris
|
||||
- tomli >= 0.2.6, < 2.0.0
|
||||
- types-typed-ast >= 1.4.1
|
||||
- click >= 8.1.0
|
||||
- click >= 8.2.0
|
||||
# Click is intentionally out-of-sync with pyproject.toml
|
||||
# v8.2 has breaking changes. We work around them at runtime, but we need the newer stubs.
|
||||
- packaging >= 22.0
|
||||
- platformdirs >= 2.1.0
|
||||
- pytokens >= 0.1.10
|
||||
- pytest
|
||||
- hypothesis
|
||||
- aiohttp >= 3.7.4
|
||||
- types-commonmark
|
||||
- urllib3
|
||||
- hypothesmith
|
||||
- id: mypy
|
||||
name: mypy (Python 3.10)
|
||||
files: scripts/generate_schema.py
|
||||
args: ["--python-version=3.10"]
|
||||
additional_dependencies: *mypy_deps
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.7.1
|
||||
- repo: https://github.com/rbubley/mirrors-prettier
|
||||
rev: v3.5.3
|
||||
hooks:
|
||||
- id: prettier
|
||||
types_or: [markdown, yaml, json]
|
||||
exclude: \.github/workflows/diff_shades\.yml
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
|
||||
ci:
|
||||
autoupdate_schedule: quarterly
|
||||
|
@ -1,3 +1,5 @@
|
||||
# Note that we recommend using https://github.com/psf/black-pre-commit-mirror instead
|
||||
# This will work about 2x as fast as using the hooks in this repository
|
||||
- id: black
|
||||
name: black
|
||||
description: "Black: The uncompromising Python code formatter"
|
||||
|
@ -6,7 +6,7 @@ formats:
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.8"
|
||||
python: "3.11"
|
||||
|
||||
python:
|
||||
install:
|
||||
@ -16,3 +16,6 @@ python:
|
||||
path: .
|
||||
extra_requirements:
|
||||
- d
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
@ -13,6 +13,7 @@ Maintained with:
|
||||
- [Richard Si](mailto:sichard26@gmail.com)
|
||||
- [Felix Hildén](mailto:felix.hilden@gmail.com)
|
||||
- [Batuhan Taskaya](mailto:batuhan@python.org)
|
||||
- [Shantanu Jain](mailto:hauntsaninja@gmail.com)
|
||||
|
||||
Multiple contributions by:
|
||||
|
||||
@ -180,6 +181,7 @@ Multiple contributions by:
|
||||
- [Tony Narlock](mailto:tony@git-pull.com)
|
||||
- [Tsuyoshi Hombashi](mailto:tsuyoshi.hombashi@gmail.com)
|
||||
- [Tushar Chandra](mailto:tusharchandra2018@u.northwestern.edu)
|
||||
- [Tushar Sadhwani](mailto:tushar.sadhwani000@gmail.com)
|
||||
- [Tzu-ping Chung](mailto:uranusjr@gmail.com)
|
||||
- [Utsav Shah](mailto:ukshah2@illinois.edu)
|
||||
- utsav-dbx
|
||||
|
659
CHANGES.md
659
CHANGES.md
@ -10,10 +10,24 @@
|
||||
|
||||
<!-- Changes that affect Black's stable style -->
|
||||
|
||||
- Fix crash while formatting a long `del` statement containing tuples (#4628)
|
||||
- Fix crash while formatting expressions using the walrus operator in complex `with`
|
||||
statements (#4630)
|
||||
- Handle `# fmt: skip` followed by a comment at the end of file (#4635)
|
||||
- Fix crash when a tuple appears in the `as` clause of a `with` statement (#4634)
|
||||
- Fix crash when tuple is used as a context manager inside a `with` statement (#4646)
|
||||
- Fix crash on a `\\r\n` (#4673)
|
||||
- Fix crash on `await ...` (where `...` is a literal `Ellipsis`) (#4676)
|
||||
- Remove support for pre-python 3.7 `await/async` as soft keywords/variable names
|
||||
(#4676)
|
||||
|
||||
### Preview style
|
||||
|
||||
<!-- Changes that affect Black's preview style -->
|
||||
|
||||
- Fix a bug where one-liner functions/conditionals marked with `# fmt: skip` would still
|
||||
be formatted (#4552)
|
||||
|
||||
### Configuration
|
||||
|
||||
<!-- Changes to how Black can be configured -->
|
||||
@ -26,6 +40,10 @@
|
||||
|
||||
<!-- Changes to the parser or to version autodetection -->
|
||||
|
||||
- Rewrite tokenizer to improve performance and compliance (#4536)
|
||||
- Fix bug where certain unusual expressions (e.g., lambdas) were not accepted in type
|
||||
parameter bounds and defaults. (#4602)
|
||||
|
||||
### Performance
|
||||
|
||||
<!-- Changes that improve Black's performance. -->
|
||||
@ -42,11 +60,627 @@
|
||||
|
||||
<!-- For example, Docker, GitHub Actions, pre-commit, editors -->
|
||||
|
||||
- Fix the version check in the vim file to reject Python 3.8 (#4567)
|
||||
- Enhance GitHub Action `psf/black` to read Black version from an additional section in
|
||||
pyproject.toml: `[project.dependency-groups]` (#4606)
|
||||
|
||||
### Documentation
|
||||
|
||||
<!-- Major changes to documentation and policies. Small docs changes
|
||||
don't need a changelog entry. -->
|
||||
|
||||
## 25.1.0
|
||||
|
||||
### Highlights
|
||||
|
||||
This release introduces the new 2025 stable style (#4558), stabilizing the following
|
||||
changes:
|
||||
|
||||
- Normalize casing of Unicode escape characters in strings to lowercase (#2916)
|
||||
- Fix inconsistencies in whether certain strings are detected as docstrings (#4095)
|
||||
- Consistently add trailing commas to typed function parameters (#4164)
|
||||
- Remove redundant parentheses in if guards for case blocks (#4214)
|
||||
- Add parentheses to if clauses in case blocks when the line is too long (#4269)
|
||||
- Whitespace before `# fmt: skip` comments is no longer normalized (#4146)
|
||||
- Fix line length computation for certain expressions that involve the power operator
|
||||
(#4154)
|
||||
- Check if there is a newline before the terminating quotes of a docstring (#4185)
|
||||
- Fix type annotation spacing between `*` and more complex type variable tuple (#4440)
|
||||
|
||||
The following changes were not in any previous release:
|
||||
|
||||
- Remove parentheses around sole list items (#4312)
|
||||
- Generic function definitions are now formatted more elegantly: parameters are split
|
||||
over multiple lines first instead of type parameter definitions (#4553)
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix formatting cells in IPython notebooks with magic methods and starting or trailing
|
||||
empty lines (#4484)
|
||||
- Fix crash when formatting `with` statements containing tuple generators/unpacking
|
||||
(#4538)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Fix/remove string merging changing f-string quotes on f-strings with internal quotes
|
||||
(#4498)
|
||||
- Collapse multiple empty lines after an import into one (#4489)
|
||||
- Prevent `string_processing` and `wrap_long_dict_values_in_parens` from removing
|
||||
parentheses around long dictionary values (#4377)
|
||||
- Move `wrap_long_dict_values_in_parens` from the unstable to preview style (#4561)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Store license identifier inside the `License-Expression` metadata field, see
|
||||
[PEP 639](https://peps.python.org/pep-0639/). (#4479)
|
||||
|
||||
### Performance
|
||||
|
||||
- Speed up the `is_fstring_start` function in Black's tokenizer (#4541)
|
||||
|
||||
### Integrations
|
||||
|
||||
- If using stdin with `--stdin-filename` set to a force excluded path, stdin won't be
|
||||
formatted. (#4539)
|
||||
|
||||
## 24.10.0
|
||||
|
||||
### Highlights
|
||||
|
||||
- Black is now officially tested with Python 3.13 and provides Python 3.13
|
||||
mypyc-compiled wheels. (#4436) (#4449)
|
||||
- Black will issue an error when used with Python 3.12.5, due to an upstream memory
|
||||
safety issue in Python 3.12.5 that can cause Black's AST safety checks to fail. Please
|
||||
use Python 3.12.6 or Python 3.12.4 instead. (#4447)
|
||||
- Black no longer supports running with Python 3.8 (#4452)
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix crashes involving comments in parenthesised return types or `X | Y` style unions.
|
||||
(#4453)
|
||||
- Fix skipping Jupyter cells with unknown `%%` magic (#4462)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Fix type annotation spacing between * and more complex type variable tuple (i.e. `def
|
||||
fn(*args: *tuple[*Ts, T]) -> None: pass`) (#4440)
|
||||
|
||||
### Caching
|
||||
|
||||
- Fix bug where the cache was shared between runs with and without `--unstable` (#4466)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Upgrade version of mypyc used to 1.12 beta (#4450) (#4449)
|
||||
- `blackd` now requires a newer version of aiohttp. (#4451)
|
||||
|
||||
### Output
|
||||
|
||||
- Added Python target version information on parse error (#4378)
|
||||
- Add information about Black version to internal error messages (#4457)
|
||||
|
||||
## 24.8.0
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix crash when `# fmt: off` is used before a closing parenthesis or bracket. (#4363)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Packaging metadata updated: docs are explictly linked, the issue tracker is now also
|
||||
linked. This improves the PyPI listing for Black. (#4345)
|
||||
|
||||
### Parser
|
||||
|
||||
- Fix regression where Black failed to parse a multiline f-string containing another
|
||||
multiline string (#4339)
|
||||
- Fix regression where Black failed to parse an escaped single quote inside an f-string
|
||||
(#4401)
|
||||
- Fix bug with Black incorrectly parsing empty lines with a backslash (#4343)
|
||||
- Fix bugs with Black's tokenizer not handling `\{` inside f-strings very well (#4422)
|
||||
- Fix incorrect line numbers in the tokenizer for certain tokens within f-strings
|
||||
(#4423)
|
||||
|
||||
### Performance
|
||||
|
||||
- Improve performance when a large directory is listed in `.gitignore` (#4415)
|
||||
|
||||
### _Blackd_
|
||||
|
||||
- Fix blackd (and all extras installs) for docker container (#4357)
|
||||
|
||||
## 24.4.2
|
||||
|
||||
This is a bugfix release to fix two regressions in the new f-string parser introduced in
|
||||
24.4.1.
|
||||
|
||||
### Parser
|
||||
|
||||
- Fix regression where certain complex f-strings failed to parse (#4332)
|
||||
|
||||
### Performance
|
||||
|
||||
- Fix bad performance on certain complex string literals (#4331)
|
||||
|
||||
## 24.4.1
|
||||
|
||||
### Highlights
|
||||
|
||||
- Add support for the new Python 3.12 f-string syntax introduced by PEP 701 (#3822)
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix crash involving indented dummy functions containing newlines (#4318)
|
||||
|
||||
### Parser
|
||||
|
||||
- Add support for type parameter defaults, a new syntactic feature added to Python 3.13
|
||||
by PEP 696 (#4327)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Github Action now works even when `git archive` is skipped (#4313)
|
||||
|
||||
## 24.4.0
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix unwanted crashes caused by AST equivalency check (#4290)
|
||||
|
||||
### Preview style
|
||||
|
||||
- `if` guards in `case` blocks are now wrapped in parentheses when the line is too long.
|
||||
(#4269)
|
||||
- Stop moving multiline strings to a new line unless inside brackets (#4289)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Add a new option `use_pyproject` to the GitHub Action `psf/black`. This will read the
|
||||
Black version from `pyproject.toml`. (#4294)
|
||||
|
||||
## 24.3.0
|
||||
|
||||
### Highlights
|
||||
|
||||
This release is a milestone: it fixes Black's first CVE security vulnerability. If you
|
||||
run Black on untrusted input, or if you habitually put thousands of leading tab
|
||||
characters in your docstrings, you are strongly encouraged to upgrade immediately to fix
|
||||
[CVE-2024-21503](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-21503).
|
||||
|
||||
This release also fixes a bug in Black's AST safety check that allowed Black to make
|
||||
incorrect changes to certain f-strings that are valid in Python 3.12 and higher.
|
||||
|
||||
### Stable style
|
||||
|
||||
- Don't move comments along with delimiters, which could cause crashes (#4248)
|
||||
- Strengthen AST safety check to catch more unsafe changes to strings. Previous versions
|
||||
of Black would incorrectly format the contents of certain unusual f-strings containing
|
||||
nested strings with the same quote type. Now, Black will crash on such strings until
|
||||
support for the new f-string syntax is implemented. (#4270)
|
||||
- Fix a bug where line-ranges exceeding the last code line would not work as expected
|
||||
(#4273)
|
||||
|
||||
### Performance
|
||||
|
||||
- Fix catastrophic performance on docstrings that contain large numbers of leading tab
|
||||
characters. This fixes
|
||||
[CVE-2024-21503](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2024-21503).
|
||||
(#4278)
|
||||
|
||||
### Documentation
|
||||
|
||||
- Note what happens when `--check` is used with `--quiet` (#4236)
|
||||
|
||||
## 24.2.0
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fixed a bug where comments where mistakenly removed along with redundant parentheses
|
||||
(#4218)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Move the `hug_parens_with_braces_and_square_brackets` feature to the unstable style
|
||||
due to an outstanding crash and proposed formatting tweaks (#4198)
|
||||
- Fixed a bug where base expressions caused inconsistent formatting of \*\* in tenary
|
||||
expression (#4154)
|
||||
- Checking for newline before adding one on docstring that is almost at the line limit
|
||||
(#4185)
|
||||
- Remove redundant parentheses in `case` statement `if` guards (#4214).
|
||||
|
||||
### Configuration
|
||||
|
||||
- Fix issue where _Black_ would ignore input files in the presence of symlinks (#4222)
|
||||
- _Black_ now ignores `pyproject.toml` that is missing a `tool.black` section when
|
||||
discovering project root and configuration. Since _Black_ continues to use version
|
||||
control as an indicator of project root, this is expected to primarily change behavior
|
||||
for users in a monorepo setup (desirably). If you wish to preserve previous behavior,
|
||||
simply add an empty `[tool.black]` to the previously discovered `pyproject.toml`
|
||||
(#4204)
|
||||
|
||||
### Output
|
||||
|
||||
- Black will swallow any `SyntaxWarning`s or `DeprecationWarning`s produced by the `ast`
|
||||
module when performing equivalence checks (#4189)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Add a JSONSchema and provide a validate-pyproject entry-point (#4181)
|
||||
|
||||
## 24.1.1
|
||||
|
||||
Bugfix release to fix a bug that made Black unusable on certain file systems with strict
|
||||
limits on path length.
|
||||
|
||||
### Preview style
|
||||
|
||||
- Consistently add trailing comma on typed parameters (#4164)
|
||||
|
||||
### Configuration
|
||||
|
||||
- Shorten the length of the name of the cache file to fix crashes on file systems that
|
||||
do not support long paths (#4176)
|
||||
|
||||
## 24.1.0
|
||||
|
||||
### Highlights
|
||||
|
||||
This release introduces the new 2024 stable style (#4106), stabilizing the following
|
||||
changes:
|
||||
|
||||
- Add parentheses around `if`-`else` expressions (#2278)
|
||||
- Dummy class and function implementations consisting only of `...` are formatted more
|
||||
compactly (#3796)
|
||||
- If an assignment statement is too long, we now prefer splitting on the right-hand side
|
||||
(#3368)
|
||||
- Hex codes in Unicode escape sequences are now standardized to lowercase (#2916)
|
||||
- Allow empty first lines at the beginning of most blocks (#3967, #4061)
|
||||
- Add parentheses around long type annotations (#3899)
|
||||
- Enforce newline after module docstrings (#3932, #4028)
|
||||
- Fix incorrect magic trailing comma handling in return types (#3916)
|
||||
- Remove blank lines before class docstrings (#3692)
|
||||
- Wrap multiple context managers in parentheses if combined in a single `with` statement
|
||||
(#3489)
|
||||
- Fix bug in line length calculations for power operations (#3942)
|
||||
- Add trailing commas to collection literals even if there's a comment after the last
|
||||
entry (#3393)
|
||||
- When using `--skip-magic-trailing-comma` or `-C`, trailing commas are stripped from
|
||||
subscript expressions with more than 1 element (#3209)
|
||||
- Add extra blank lines in stubs in a few cases (#3564, #3862)
|
||||
- Accept raw strings as docstrings (#3947)
|
||||
- Split long lines in case blocks (#4024)
|
||||
- Stop removing spaces from walrus operators within subscripts (#3823)
|
||||
- Fix incorrect formatting of certain async statements (#3609)
|
||||
- Allow combining `# fmt: skip` with other comments (#3959)
|
||||
|
||||
There are already a few improvements in the `--preview` style, which are slated for the
|
||||
2025 stable style. Try them out and
|
||||
[share your feedback](https://github.com/psf/black/issues). In the past, the preview
|
||||
style has included some features that we were not able to stabilize. This year, we're
|
||||
adding a separate `--unstable` style for features with known problems. Now, the
|
||||
`--preview` style only includes features that we actually expect to make it into next
|
||||
year's stable style.
|
||||
|
||||
### Stable style
|
||||
|
||||
Several bug fixes were made in features that are moved to the stable style in this
|
||||
release:
|
||||
|
||||
- Fix comment handling when parenthesising conditional expressions (#4134)
|
||||
- Fix bug where spaces were not added around parenthesized walruses in subscripts,
|
||||
unlike other binary operators (#4109)
|
||||
- Remove empty lines before docstrings in async functions (#4132)
|
||||
- Address a missing case in the change to allow empty lines at the beginning of all
|
||||
blocks, except immediately before a docstring (#4130)
|
||||
- For stubs, fix logic to enforce empty line after nested classes with bodies (#4141)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Add `--unstable` style, covering preview features that have known problems that would
|
||||
block them from going into the stable style. Also add the `--enable-unstable-feature`
|
||||
flag; for example, use
|
||||
`--enable-unstable-feature hug_parens_with_braces_and_square_brackets` to apply this
|
||||
preview feature throughout 2024, even if a later Black release downgrades the feature
|
||||
to unstable (#4096)
|
||||
- Format module docstrings the same as class and function docstrings (#4095)
|
||||
- Fix crash when using a walrus in a dictionary (#4155)
|
||||
- Fix unnecessary parentheses when wrapping long dicts (#4135)
|
||||
- Stop normalizing spaces before `# fmt: skip` comments (#4146)
|
||||
|
||||
### Configuration
|
||||
|
||||
- Print warning when configuration in `pyproject.toml` contains an invalid key (#4165)
|
||||
- Fix symlink handling, properly ignoring symlinks that point outside of root (#4161)
|
||||
- Fix cache mtime logic that resulted in false positive cache hits (#4128)
|
||||
- Remove the long-deprecated `--experimental-string-processing` flag. This feature can
|
||||
currently be enabled with `--preview --enable-unstable-feature string_processing`.
|
||||
(#4096)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Revert the change to run Black's pre-commit integration only on specific git hooks
|
||||
(#3940) for better compatibility with older versions of pre-commit (#4137)
|
||||
|
||||
## 23.12.1
|
||||
|
||||
### Packaging
|
||||
|
||||
- Fixed a bug that included dependencies from the `d` extra by default (#4108)
|
||||
|
||||
## 23.12.0
|
||||
|
||||
### Highlights
|
||||
|
||||
It's almost 2024, which means it's time for a new edition of _Black_'s stable style!
|
||||
Together with this release, we'll put out an alpha release 24.1a1 showcasing the draft
|
||||
2024 stable style, which we'll finalize in the January release. Please try it out and
|
||||
[share your feedback](https://github.com/psf/black/issues/4042).
|
||||
|
||||
This release (23.12.0) will still produce the 2023 style. Most but not all of the
|
||||
changes in `--preview` mode will be in the 2024 stable style.
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix bug where `# fmt: off` automatically dedents when used with the `--line-ranges`
|
||||
option, even when it is not within the specified line range. (#4084)
|
||||
- Fix feature detection for parenthesized context managers (#4104)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Prefer more equal signs before a break when splitting chained assignments (#4010)
|
||||
- Standalone form feed characters at the module level are no longer removed (#4021)
|
||||
- Additional cases of immediately nested tuples, lists, and dictionaries are now
|
||||
indented less (#4012)
|
||||
- Allow empty lines at the beginning of all blocks, except immediately before a
|
||||
docstring (#4060)
|
||||
- Fix crash in preview mode when using a short `--line-length` (#4086)
|
||||
- Keep suites consisting of only an ellipsis on their own lines if they are not
|
||||
functions or class definitions (#4066) (#4103)
|
||||
|
||||
### Configuration
|
||||
|
||||
- `--line-ranges` now skips _Black_'s internal stability check in `--safe` mode. This
|
||||
avoids a crash on rare inputs that have many unformatted same-content lines. (#4034)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Upgrade to mypy 1.7.1 (#4049) (#4069)
|
||||
- Faster compiled wheels are now available for CPython 3.12 (#4070)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Enable 3.12 CI (#4035)
|
||||
- Build docker images in parallel (#4054)
|
||||
- Build docker images with 3.12 (#4055)
|
||||
|
||||
## 23.11.0
|
||||
|
||||
### Highlights
|
||||
|
||||
- Support formatting ranges of lines with the new `--line-ranges` command-line option
|
||||
(#4020)
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix crash on formatting bytes strings that look like docstrings (#4003)
|
||||
- Fix crash when whitespace followed a backslash before newline in a docstring (#4008)
|
||||
- Fix standalone comments inside complex blocks crashing Black (#4016)
|
||||
- Fix crash on formatting code like `await (a ** b)` (#3994)
|
||||
- No longer treat leading f-strings as docstrings. This matches Python's behaviour and
|
||||
fixes a crash (#4019)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Multiline dicts and lists that are the sole argument to a function are now indented
|
||||
less (#3964)
|
||||
- Multiline unpacked dicts and lists as the sole argument to a function are now also
|
||||
indented less (#3992)
|
||||
- In f-string debug expressions, quote types that are visible in the final string are
|
||||
now preserved (#4005)
|
||||
- Fix a bug where long `case` blocks were not split into multiple lines. Also enable
|
||||
general trailing comma rules on `case` blocks (#4024)
|
||||
- Keep requiring two empty lines between module-level docstring and first function or
|
||||
class definition (#4028)
|
||||
- Add support for single-line format skip with other comments on the same line (#3959)
|
||||
|
||||
### Configuration
|
||||
|
||||
- Consistently apply force exclusion logic before resolving symlinks (#4015)
|
||||
- Fix a bug in the matching of absolute path names in `--include` (#3976)
|
||||
|
||||
### Performance
|
||||
|
||||
- Fix mypyc builds on arm64 on macOS (#4017)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Black's pre-commit integration will now run only on git hooks appropriate for a code
|
||||
formatter (#3940)
|
||||
|
||||
## 23.10.1
|
||||
|
||||
### Highlights
|
||||
|
||||
- Maintenance release to get a fix out for GitHub Action edge case (#3957)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Fix merging implicit multiline strings that have inline comments (#3956)
|
||||
- Allow empty first line after block open before a comment or compound statement (#3967)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Change Dockerfile to hatch + compile black (#3965)
|
||||
|
||||
### Integrations
|
||||
|
||||
- The summary output for GitHub workflows is now suppressible using the `summary`
|
||||
parameter. (#3958)
|
||||
- Fix the action failing when Black check doesn't pass (#3957)
|
||||
|
||||
### Documentation
|
||||
|
||||
- It is known Windows documentation CI is broken
|
||||
https://github.com/psf/black/issues/3968
|
||||
|
||||
## 23.10.0
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix comments getting removed from inside parenthesized strings (#3909)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Fix long lines with power operators getting split before the line length (#3942)
|
||||
- Long type hints are now wrapped in parentheses and properly indented when split across
|
||||
multiple lines (#3899)
|
||||
- Magic trailing commas are now respected in return types. (#3916)
|
||||
- Require one empty line after module-level docstrings. (#3932)
|
||||
- Treat raw triple-quoted strings as docstrings (#3947)
|
||||
|
||||
### Configuration
|
||||
|
||||
- Fix cache versioning logic when `BLACK_CACHE_DIR` is set (#3937)
|
||||
|
||||
### Parser
|
||||
|
||||
- Fix bug where attributes named `type` were not accepted inside `match` statements
|
||||
(#3950)
|
||||
- Add support for PEP 695 type aliases containing lambdas and other unusual expressions
|
||||
(#3949)
|
||||
|
||||
### Output
|
||||
|
||||
- Black no longer attempts to provide special errors for attempting to format Python 2
|
||||
code (#3933)
|
||||
- Black will more consistently print stacktraces on internal errors in verbose mode
|
||||
(#3938)
|
||||
|
||||
### Integrations
|
||||
|
||||
- The action output displayed in the job summary is now wrapped in Markdown (#3914)
|
||||
|
||||
## 23.9.1
|
||||
|
||||
Due to various issues, the previous release (23.9.0) did not include compiled mypyc
|
||||
wheels, which make Black significantly faster. These issues have now been fixed, and
|
||||
this release should come with compiled wheels once again.
|
||||
|
||||
There will be no wheels for Python 3.12 due to a bug in mypyc. We will provide 3.12
|
||||
wheels in a future release as soon as the mypyc bug is fixed.
|
||||
|
||||
### Packaging
|
||||
|
||||
- Upgrade to mypy 1.5.1 (#3864)
|
||||
|
||||
### Performance
|
||||
|
||||
- Store raw tuples instead of NamedTuples in Black's cache, improving performance and
|
||||
decreasing the size of the cache (#3877)
|
||||
|
||||
## 23.9.0
|
||||
|
||||
### Preview style
|
||||
|
||||
- More concise formatting for dummy implementations (#3796)
|
||||
- In stub files, add a blank line between a statement with a body (e.g an
|
||||
`if sys.version_info > (3, x):`) and a function definition on the same level (#3862)
|
||||
- Fix a bug whereby spaces were removed from walrus operators within subscript(#3823)
|
||||
|
||||
### Configuration
|
||||
|
||||
- Black now applies exclusion and ignore logic before resolving symlinks (#3846)
|
||||
|
||||
### Performance
|
||||
|
||||
- Avoid importing `IPython` if notebook cells do not contain magics (#3782)
|
||||
- Improve caching by comparing file hashes as fallback for mtime and size (#3821)
|
||||
|
||||
### _Blackd_
|
||||
|
||||
- Fix an issue in `blackd` with single character input (#3558)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Black now has an
|
||||
[official pre-commit mirror](https://github.com/psf/black-pre-commit-mirror). Swapping
|
||||
`https://github.com/psf/black` to `https://github.com/psf/black-pre-commit-mirror` in
|
||||
your `.pre-commit-config.yaml` will make Black about 2x faster (#3828)
|
||||
- The `.black.env` folder specified by `ENV_PATH` will now be removed on the completion
|
||||
of the GitHub Action (#3759)
|
||||
|
||||
## 23.7.0
|
||||
|
||||
### Highlights
|
||||
|
||||
- Runtime support for Python 3.7 has been removed. Formatting 3.7 code will still be
|
||||
supported until further notice (#3765)
|
||||
|
||||
### Stable style
|
||||
|
||||
- Fix a bug where an illegal trailing comma was added to return type annotations using
|
||||
PEP 604 unions (#3735)
|
||||
- Fix several bugs and crashes where comments in stub files were removed or mishandled
|
||||
under some circumstances (#3745)
|
||||
- Fix a crash with multi-line magic comments like `type: ignore` within parentheses
|
||||
(#3740)
|
||||
- Fix error in AST validation when _Black_ removes trailing whitespace in a type comment
|
||||
(#3773)
|
||||
|
||||
### Preview style
|
||||
|
||||
- Implicitly concatenated strings used as function args are no longer wrapped inside
|
||||
parentheses (#3640)
|
||||
- Remove blank lines between a class definition and its docstring (#3692)
|
||||
|
||||
### Configuration
|
||||
|
||||
- The `--workers` argument to _Black_ can now be specified via the `BLACK_NUM_WORKERS`
|
||||
environment variable (#3743)
|
||||
- `.pytest_cache`, `.ruff_cache` and `.vscode` are now excluded by default (#3691)
|
||||
- Fix _Black_ not honouring `pyproject.toml` settings when running `--stdin-filename`
|
||||
and the `pyproject.toml` found isn't in the current working directory (#3719)
|
||||
- _Black_ will now error if `exclude` and `extend-exclude` have invalid data types in
|
||||
`pyproject.toml`, instead of silently doing the wrong thing (#3764)
|
||||
|
||||
### Packaging
|
||||
|
||||
- Upgrade mypyc from 0.991 to 1.3 (#3697)
|
||||
- Remove patching of Click that mitigated errors on Python 3.6 with `LANG=C` (#3768)
|
||||
|
||||
### Parser
|
||||
|
||||
- Add support for the new PEP 695 syntax in Python 3.12 (#3703)
|
||||
|
||||
### Performance
|
||||
|
||||
- Speed up _Black_ significantly when the cache is full (#3751)
|
||||
- Avoid importing `IPython` in a case where we wouldn't need it (#3748)
|
||||
|
||||
### Output
|
||||
|
||||
- Use aware UTC datetimes internally, avoids deprecation warning on Python 3.12 (#3728)
|
||||
- Change verbose logging to exactly mirror _Black_'s logic for source discovery (#3749)
|
||||
|
||||
### _Blackd_
|
||||
|
||||
- The `blackd` argument parser now shows the default values for options in their help
|
||||
text (#3712)
|
||||
|
||||
### Integrations
|
||||
|
||||
- Black is now tested with
|
||||
[`PYTHONWARNDEFAULTENCODING = 1`](https://docs.python.org/3/library/io.html#io-encoding-warning)
|
||||
(#3763)
|
||||
- Update GitHub Action to display black output in the job summary (#3688)
|
||||
|
||||
### Documentation
|
||||
|
||||
- Add a CITATION.cff file to the root of the repository, containing metadata on how to
|
||||
cite this software (#3723)
|
||||
- Update the _classes_ and _exceptions_ documentation in Developer reference to match
|
||||
the latest code base (#3755)
|
||||
|
||||
## 23.3.0
|
||||
|
||||
### Highlights
|
||||
@ -117,8 +751,6 @@ versions separately.
|
||||
|
||||
### Stable style
|
||||
|
||||
<!-- Changes that affect Black's stable style -->
|
||||
|
||||
- Introduce the 2023 stable style, which incorporates most aspects of last year's
|
||||
preview style (#3418). Specific changes:
|
||||
- Enforce empty lines before classes and functions with sticky leading comments
|
||||
@ -152,8 +784,6 @@ versions separately.
|
||||
|
||||
### Preview style
|
||||
|
||||
<!-- Changes that affect Black's preview style -->
|
||||
|
||||
- Format hex codes in unicode escape sequences in string literals (#2916)
|
||||
- Add parentheses around `if`-`else` expressions (#2278)
|
||||
- Improve performance on large expressions that contain many strings (#3467)
|
||||
@ -184,15 +814,11 @@ versions separately.
|
||||
|
||||
### Configuration
|
||||
|
||||
<!-- Changes to how Black can be configured -->
|
||||
|
||||
- Black now tries to infer its `--target-version` from the project metadata specified in
|
||||
`pyproject.toml` (#3219)
|
||||
|
||||
### Packaging
|
||||
|
||||
<!-- Changes to how Black is packaged, such as dependency requirements -->
|
||||
|
||||
- Upgrade mypyc from `0.971` to `0.991` so mypycified _Black_ can be built on armv7
|
||||
(#3380)
|
||||
- This also fixes some crashes while using compiled Black with a debug build of
|
||||
@ -205,8 +831,6 @@ versions separately.
|
||||
|
||||
### Output
|
||||
|
||||
<!-- Changes to Black's terminal output and error messages -->
|
||||
|
||||
- Calling `black --help` multiple times will return the same help contents each time
|
||||
(#3516)
|
||||
- Verbose logging now shows the values of `pyproject.toml` configuration variables
|
||||
@ -216,25 +840,18 @@ versions separately.
|
||||
|
||||
### Integrations
|
||||
|
||||
<!-- For example, Docker, GitHub Actions, pre-commit, editors -->
|
||||
|
||||
- Move 3.11 CI to normal flow now that all dependencies support 3.11 (#3446)
|
||||
- Docker: Add new `latest_prerelease` tag automation to follow latest black alpha
|
||||
release on docker images (#3465)
|
||||
|
||||
### Documentation
|
||||
|
||||
<!-- Major changes to documentation and policies. Small docs changes
|
||||
don't need a changelog entry. -->
|
||||
|
||||
- Expand `vim-plug` installation instructions to offer more explicit options (#3468)
|
||||
|
||||
## 22.12.0
|
||||
|
||||
### Preview style
|
||||
|
||||
<!-- Changes that affect Black's preview style -->
|
||||
|
||||
- Enforce empty lines before classes and functions with sticky leading comments (#3302)
|
||||
- Reformat empty and whitespace-only files as either an empty file (if no newline is
|
||||
present) or as a single newline character (if a newline is present) (#3348)
|
||||
@ -247,8 +864,6 @@ versions separately.
|
||||
|
||||
### Configuration
|
||||
|
||||
<!-- Changes to how Black can be configured -->
|
||||
|
||||
- Fix incorrectly applied `.gitignore` rules by considering the `.gitignore` location
|
||||
and the relative path to the target file (#3338)
|
||||
- Fix incorrectly ignoring `.gitignore` presence when more than one source directory is
|
||||
@ -256,16 +871,12 @@ versions separately.
|
||||
|
||||
### Parser
|
||||
|
||||
<!-- Changes to the parser or to version autodetection -->
|
||||
|
||||
- Parsing support has been added for walruses inside generator expression that are
|
||||
passed as function args (for example,
|
||||
`any(match := my_re.match(text) for text in texts)`) (#3327).
|
||||
|
||||
### Integrations
|
||||
|
||||
<!-- For example, Docker, GitHub Actions, pre-commit, editors -->
|
||||
|
||||
- Vim plugin: Optionally allow using the system installation of Black via
|
||||
`let g:black_use_virtualenv = 0`(#3309)
|
||||
|
||||
@ -768,7 +1379,7 @@ and the first release covered by our new
|
||||
[`master`](https://github.com/psf/black/tree/main) branch with the
|
||||
[`main`](https://github.com/psf/black/tree/main) branch. Some additional changes in
|
||||
the source code were also made. (#2210)
|
||||
- Sigificantly reorganized the documentation to make much more sense. Check them out by
|
||||
- Significantly reorganized the documentation to make much more sense. Check them out by
|
||||
heading over to [the stable docs on RTD](https://black.readthedocs.io/en/stable/).
|
||||
(#2174)
|
||||
|
||||
|
22
CITATION.cff
Normal file
22
CITATION.cff
Normal file
@ -0,0 +1,22 @@
|
||||
cff-version: 1.2.0
|
||||
title: "Black: The uncompromising Python code formatter"
|
||||
message: >-
|
||||
If you use this software, please cite it using the metadata from this file.
|
||||
type: software
|
||||
authors:
|
||||
- family-names: Langa
|
||||
given-names: Łukasz
|
||||
- name: "contributors to Black"
|
||||
repository-code: "https://github.com/psf/black"
|
||||
url: "https://black.readthedocs.io/en/stable/"
|
||||
abstract: >-
|
||||
Black is the uncompromising Python code formatter. By using it, you agree to cede
|
||||
control over minutiae of hand-formatting. In return, Black gives you speed,
|
||||
determinism, and freedom from pycodestyle nagging about formatting. You will save time
|
||||
and mental energy for more important matters.
|
||||
|
||||
Blackened code looks the same regardless of the project you're reading. Formatting
|
||||
becomes transparent after a while and you can focus on the content instead.
|
||||
|
||||
Black makes code review faster by producing the smallest diffs possible.
|
||||
license: MIT
|
@ -1,10 +1,13 @@
|
||||
# Contributing to _Black_
|
||||
|
||||
Welcome! Happy to see you willing to make the project better. Have you read the entire
|
||||
[user documentation](https://black.readthedocs.io/en/latest/) yet?
|
||||
Welcome future contributor! We're happy to see you willing to make the project better.
|
||||
|
||||
Our [contributing documentation](https://black.readthedocs.org/en/latest/contributing/)
|
||||
contains details on all you need to know about contributing to _Black_, the basics to
|
||||
the internals of _Black_.
|
||||
If you aren't familiar with _Black_, or are looking for documentation on something
|
||||
specific, the [user documentation](https://black.readthedocs.io/en/latest/) is the best
|
||||
place to look.
|
||||
|
||||
We look forward to your contributions!
|
||||
For getting started on contributing, please read the
|
||||
[contributing documentation](https://black.readthedocs.org/en/latest/contributing/) for
|
||||
all you need to know.
|
||||
|
||||
Thank you, and we look forward to your contributions!
|
||||
|
17
Dockerfile
17
Dockerfile
@ -1,16 +1,19 @@
|
||||
FROM python:3-slim AS builder
|
||||
FROM python:3.12-slim AS builder
|
||||
|
||||
RUN mkdir /src
|
||||
COPY . /src/
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV HATCH_BUILD_HOOKS_ENABLE=1
|
||||
# Install build tools to compile black + dependencies
|
||||
RUN apt update && apt install -y build-essential git python3-dev
|
||||
RUN python -m venv $VIRTUAL_ENV
|
||||
RUN . /opt/venv/bin/activate && pip install --no-cache-dir --upgrade pip setuptools wheel \
|
||||
# Install build tools to compile dependencies that don't have prebuilt wheels
|
||||
&& apt update && apt install -y git build-essential \
|
||||
&& cd /src \
|
||||
&& pip install --no-cache-dir .[colorama,d]
|
||||
RUN python -m pip install --no-cache-dir hatch hatch-fancy-pypi-readme hatch-vcs
|
||||
RUN . /opt/venv/bin/activate && pip install --no-cache-dir --upgrade pip setuptools \
|
||||
&& cd /src && hatch build -t wheel \
|
||||
&& pip install --no-cache-dir dist/*-cp* \
|
||||
&& pip install black[colorama,d,uvloop]
|
||||
|
||||
FROM python:3-slim
|
||||
FROM python:3.12-slim
|
||||
|
||||
# copy only Python packages to limit the image size
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
|
10
README.md
10
README.md
@ -8,7 +8,7 @@
|
||||
<a href="https://coveralls.io/github/psf/black?branch=main"><img alt="Coverage Status" src="https://coveralls.io/repos/github/psf/black/badge.svg?branch=main"></a>
|
||||
<a href="https://github.com/psf/black/blob/main/LICENSE"><img alt="License: MIT" src="https://black.readthedocs.io/en/stable/_static/license.svg"></a>
|
||||
<a href="https://pypi.org/project/black/"><img alt="PyPI" src="https://img.shields.io/pypi/v/black"></a>
|
||||
<a href="https://pepy.tech/project/black"><img alt="Downloads" src="https://pepy.tech/badge/black"></a>
|
||||
<a href="https://pepy.tech/project/black"><img alt="Downloads" src="https://static.pepy.tech/badge/black"></a>
|
||||
<a href="https://anaconda.org/conda-forge/black/"><img alt="conda-forge" src="https://img.shields.io/conda/dn/conda-forge/black.svg?label=conda-forge"></a>
|
||||
<a href="https://github.com/psf/black"><img alt="Code style: black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a>
|
||||
</p>
|
||||
@ -38,7 +38,7 @@ Try it out now using the [Black Playground](https://black.vercel.app). Watch the
|
||||
|
||||
### Installation
|
||||
|
||||
_Black_ can be installed by running `pip install black`. It requires Python 3.7+ to run.
|
||||
_Black_ can be installed by running `pip install black`. It requires Python 3.9+ to run.
|
||||
If you want to format Jupyter Notebooks, install with `pip install "black[jupyter]"`.
|
||||
|
||||
If you can't wait for the latest _hotness_ and want to install from GitHub, use:
|
||||
@ -68,7 +68,7 @@ projects, small and big. _Black_ has a comprehensive test suite, with efficient
|
||||
tests, and our own auto formatting and parallel Continuous Integration runner. Now that
|
||||
we have become stable, you should not expect large formatting changes in the future.
|
||||
Stylistic changes will mostly be responses to bug reports and support for new Python
|
||||
syntax. For more information please refer to the
|
||||
syntax. For more information please refer to
|
||||
[The Black Code Style](https://black.readthedocs.io/en/stable/the_black_code_style/index.html).
|
||||
|
||||
Also, as a safety measure which slows down processing, _Black_ will check that the
|
||||
@ -86,7 +86,7 @@ take previous formatting into account (see
|
||||
for exceptions).
|
||||
|
||||
Our documentation covers the current _Black_ code style, but planned changes to it are
|
||||
also documented. They're both worth taking a look:
|
||||
also documented. They're both worth taking a look at:
|
||||
|
||||
- [The _Black_ Code Style: Current style](https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html)
|
||||
- [The _Black_ Code Style: Future style](https://black.readthedocs.io/en/stable/the_black_code_style/future_style.html)
|
||||
@ -137,7 +137,7 @@ SQLAlchemy, Poetry, PyPA applications (Warehouse, Bandersnatch, Pipenv, virtuale
|
||||
pandas, Pillow, Twisted, LocalStack, every Datadog Agent Integration, Home Assistant,
|
||||
Zulip, Kedro, OpenOA, FLORIS, ORBIT, WOMBAT, and many more.
|
||||
|
||||
The following organizations use _Black_: Facebook, Dropbox, KeepTruckin, Mozilla, Quora,
|
||||
The following organizations use _Black_: Dropbox, KeepTruckin, Lyft, Mozilla, Quora,
|
||||
Duolingo, QuantumBlack, Tesla, Archer Aviation.
|
||||
|
||||
Are we missing anyone? Let us know.
|
||||
|
35
action.yml
35
action.yml
@ -27,18 +27,46 @@ inputs:
|
||||
description: 'Python Version specifier (PEP440) - e.g. "21.5b1"'
|
||||
required: false
|
||||
default: ""
|
||||
use_pyproject:
|
||||
description: Read Black version specifier from pyproject.toml if `true`.
|
||||
required: false
|
||||
default: "false"
|
||||
summary:
|
||||
description: "Whether to add the output to the workflow summary"
|
||||
required: false
|
||||
default: true
|
||||
branding:
|
||||
color: "black"
|
||||
icon: "check-circle"
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- run: |
|
||||
- name: black
|
||||
run: |
|
||||
# Even when black fails, do not close the shell
|
||||
set +e
|
||||
|
||||
if [ "$RUNNER_OS" == "Windows" ]; then
|
||||
python $GITHUB_ACTION_PATH/action/main.py
|
||||
runner="python"
|
||||
else
|
||||
python3 $GITHUB_ACTION_PATH/action/main.py
|
||||
runner="python3"
|
||||
fi
|
||||
|
||||
out=$(${runner} $GITHUB_ACTION_PATH/action/main.py)
|
||||
exit_code=$?
|
||||
|
||||
# Display the raw output in the step
|
||||
echo "${out}"
|
||||
|
||||
if [ "${{ inputs.summary }}" == "true" ]; then
|
||||
# Display the Markdown output in the job summary
|
||||
echo "\`\`\`python" >> $GITHUB_STEP_SUMMARY
|
||||
echo "${out}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
# Exit with the exit-code returned by Black
|
||||
exit ${exit_code}
|
||||
env:
|
||||
# TODO: Remove once https://github.com/actions/runner/issues/665 is fixed.
|
||||
INPUT_OPTIONS: ${{ inputs.options }}
|
||||
@ -46,5 +74,6 @@ runs:
|
||||
INPUT_JUPYTER: ${{ inputs.jupyter }}
|
||||
INPUT_BLACK_ARGS: ${{ inputs.black_args }}
|
||||
INPUT_VERSION: ${{ inputs.version }}
|
||||
INPUT_USE_PYPROJECT: ${{ inputs.use_pyproject }}
|
||||
pythonioencoding: utf-8
|
||||
shell: bash
|
||||
|
133
action/main.py
133
action/main.py
@ -1,8 +1,11 @@
|
||||
import os
|
||||
import re
|
||||
import shlex
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from subprocess import PIPE, STDOUT, run
|
||||
from typing import Union
|
||||
|
||||
ACTION_PATH = Path(os.environ["GITHUB_ACTION_PATH"])
|
||||
ENV_PATH = ACTION_PATH / ".black-env"
|
||||
@ -12,12 +15,109 @@
|
||||
JUPYTER = os.getenv("INPUT_JUPYTER") == "true"
|
||||
BLACK_ARGS = os.getenv("INPUT_BLACK_ARGS", default="")
|
||||
VERSION = os.getenv("INPUT_VERSION", default="")
|
||||
USE_PYPROJECT = os.getenv("INPUT_USE_PYPROJECT") == "true"
|
||||
|
||||
BLACK_VERSION_RE = re.compile(r"^black([^A-Z0-9._-]+.*)$", re.IGNORECASE)
|
||||
EXTRAS_RE = re.compile(r"\[.*\]")
|
||||
EXPORT_SUBST_FAIL_RE = re.compile(r"\$Format:.*\$")
|
||||
|
||||
|
||||
def determine_version_specifier() -> str:
|
||||
"""Determine the version of Black to install.
|
||||
|
||||
The version can be specified either via the `with.version` input or via the
|
||||
pyproject.toml file if `with.use_pyproject` is set to `true`.
|
||||
"""
|
||||
if USE_PYPROJECT and VERSION:
|
||||
print(
|
||||
"::error::'with.version' and 'with.use_pyproject' inputs are "
|
||||
"mutually exclusive.",
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
if USE_PYPROJECT:
|
||||
return read_version_specifier_from_pyproject()
|
||||
elif VERSION and VERSION[0] in "0123456789":
|
||||
return f"=={VERSION}"
|
||||
else:
|
||||
return VERSION
|
||||
|
||||
|
||||
def read_version_specifier_from_pyproject() -> str:
|
||||
if sys.version_info < (3, 11):
|
||||
print(
|
||||
"::error::'with.use_pyproject' input requires Python 3.11 or later.",
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
import tomllib # type: ignore[import-not-found,unreachable]
|
||||
|
||||
try:
|
||||
with Path("pyproject.toml").open("rb") as fp:
|
||||
pyproject = tomllib.load(fp)
|
||||
except FileNotFoundError:
|
||||
print(
|
||||
"::error::'with.use_pyproject' input requires a pyproject.toml file.",
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
version = pyproject.get("tool", {}).get("black", {}).get("required-version")
|
||||
if version is not None:
|
||||
return f"=={version}"
|
||||
|
||||
arrays = [
|
||||
*pyproject.get("dependency-groups", {}).values(),
|
||||
pyproject.get("project", {}).get("dependencies"),
|
||||
*pyproject.get("project", {}).get("optional-dependencies", {}).values(),
|
||||
]
|
||||
for array in arrays:
|
||||
version = find_black_version_in_array(array)
|
||||
if version is not None:
|
||||
break
|
||||
|
||||
if version is None:
|
||||
print(
|
||||
"::error::'black' dependency missing from pyproject.toml.",
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
return version
|
||||
|
||||
|
||||
def find_black_version_in_array(array: object) -> Union[str, None]:
|
||||
if not isinstance(array, list):
|
||||
return None
|
||||
try:
|
||||
for item in array:
|
||||
# Rudimentary PEP 508 parsing.
|
||||
item = item.split(";")[0]
|
||||
item = EXTRAS_RE.sub("", item).strip()
|
||||
if item == "black":
|
||||
print(
|
||||
"::error::Version specifier missing for 'black' dependency in "
|
||||
"pyproject.toml.",
|
||||
file=sys.stderr,
|
||||
flush=True,
|
||||
)
|
||||
sys.exit(1)
|
||||
elif m := BLACK_VERSION_RE.match(item):
|
||||
return m.group(1).strip()
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
run([sys.executable, "-m", "venv", str(ENV_PATH)], check=True)
|
||||
|
||||
version_specifier = VERSION
|
||||
if VERSION and VERSION[0] in "0123456789":
|
||||
version_specifier = f"=={VERSION}"
|
||||
version_specifier = determine_version_specifier()
|
||||
if JUPYTER:
|
||||
extra_deps = "[colorama,jupyter]"
|
||||
else:
|
||||
@ -32,12 +132,16 @@
|
||||
describe_name = line[len("describe-name: ") :].rstrip()
|
||||
break
|
||||
if not describe_name:
|
||||
print("::error::Failed to detect action version.", flush=True)
|
||||
print("::error::Failed to detect action version.", file=sys.stderr, flush=True)
|
||||
sys.exit(1)
|
||||
# expected format is one of:
|
||||
# - 23.1.0
|
||||
# - 23.1.0-51-g448bba7
|
||||
if describe_name.count("-") < 2:
|
||||
# - $Format:%(describe:tags=true,match=*[0-9]*)$ (if export-subst fails)
|
||||
if (
|
||||
describe_name.count("-") < 2
|
||||
and EXPORT_SUBST_FAIL_RE.match(describe_name) is None
|
||||
):
|
||||
# the action's commit matches a tag exactly, install exact version from PyPI
|
||||
req = f"black{extra_deps}=={describe_name}"
|
||||
else:
|
||||
@ -53,15 +157,26 @@
|
||||
)
|
||||
if pip_proc.returncode:
|
||||
print(pip_proc.stdout)
|
||||
print("::error::Failed to install Black.", flush=True)
|
||||
print("::error::Failed to install Black.", file=sys.stderr, flush=True)
|
||||
sys.exit(pip_proc.returncode)
|
||||
|
||||
|
||||
base_cmd = [str(ENV_BIN / "black")]
|
||||
if BLACK_ARGS:
|
||||
# TODO: remove after a while since this is deprecated in favour of SRC + OPTIONS.
|
||||
proc = run([*base_cmd, *shlex.split(BLACK_ARGS)])
|
||||
proc = run(
|
||||
[*base_cmd, *shlex.split(BLACK_ARGS)],
|
||||
stdout=PIPE,
|
||||
stderr=STDOUT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
else:
|
||||
proc = run([*base_cmd, *shlex.split(OPTIONS), *shlex.split(SRC)])
|
||||
|
||||
proc = run(
|
||||
[*base_cmd, *shlex.split(OPTIONS), *shlex.split(SRC)],
|
||||
stdout=PIPE,
|
||||
stderr=STDOUT,
|
||||
encoding="utf-8",
|
||||
)
|
||||
shutil.rmtree(ENV_PATH, ignore_errors=True)
|
||||
print(proc.stdout)
|
||||
sys.exit(proc.returncode)
|
||||
|
@ -75,8 +75,8 @@ def _initialize_black_env(upgrade=False):
|
||||
return True
|
||||
|
||||
pyver = sys.version_info[:3]
|
||||
if pyver < (3, 7):
|
||||
print("Sorry, Black requires Python 3.7+ to run.")
|
||||
if pyver < (3, 9):
|
||||
print("Sorry, Black requires Python 3.9+ to run.")
|
||||
return False
|
||||
|
||||
from pathlib import Path
|
||||
|
@ -1,3 +1,3 @@
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
extend-ignore = E203,E701
|
||||
|
@ -1,3 +1,3 @@
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
extend-ignore = E203,E701
|
||||
|
@ -1,3 +1,3 @@
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
extend-ignore = E203,E701
|
||||
|
3
docs/compatible_configs/pycodestyle/.flake8
Normal file
3
docs/compatible_configs/pycodestyle/.flake8
Normal file
@ -0,0 +1,3 @@
|
||||
[pycodestyle]
|
||||
max-line-length = 88
|
||||
ignore = E203,E701
|
3
docs/compatible_configs/pycodestyle/setup.cfg
Normal file
3
docs/compatible_configs/pycodestyle/setup.cfg
Normal file
@ -0,0 +1,3 @@
|
||||
[pycodestyle]
|
||||
max-line-length = 88
|
||||
ignore = E203,E701
|
3
docs/compatible_configs/pycodestyle/tox.ini
Normal file
3
docs/compatible_configs/pycodestyle/tox.ini
Normal file
@ -0,0 +1,3 @@
|
||||
[pycodestyle]
|
||||
max-line-length = 88
|
||||
ignore = E203,E701
|
51
docs/conf.py
51
docs/conf.py
@ -1,4 +1,3 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
@ -14,10 +13,12 @@
|
||||
#
|
||||
|
||||
import os
|
||||
import re
|
||||
import string
|
||||
from importlib.metadata import version
|
||||
from pathlib import Path
|
||||
|
||||
from pkg_resources import get_distribution
|
||||
from sphinx.application import Sphinx
|
||||
|
||||
CURRENT_DIR = Path(__file__).parent
|
||||
|
||||
@ -25,12 +26,33 @@
|
||||
def make_pypi_svg(version: str) -> None:
|
||||
template: Path = CURRENT_DIR / "_static" / "pypi_template.svg"
|
||||
target: Path = CURRENT_DIR / "_static" / "pypi.svg"
|
||||
with open(str(template), "r", encoding="utf8") as f:
|
||||
with open(str(template), encoding="utf8") as f:
|
||||
svg: str = string.Template(f.read()).substitute(version=version)
|
||||
with open(str(target), "w", encoding="utf8") as f:
|
||||
f.write(svg)
|
||||
|
||||
|
||||
def replace_pr_numbers_with_links(content: str) -> str:
|
||||
"""Replaces all PR numbers with the corresponding GitHub link."""
|
||||
return re.sub(r"#(\d+)", r"[#\1](https://github.com/psf/black/pull/\1)", content)
|
||||
|
||||
|
||||
def handle_include_read(
|
||||
app: Sphinx,
|
||||
relative_path: Path,
|
||||
parent_docname: str,
|
||||
content: list[str],
|
||||
) -> None:
|
||||
"""Handler for the include-read sphinx event."""
|
||||
if parent_docname == "change_log":
|
||||
content[0] = replace_pr_numbers_with_links(content[0])
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> None:
|
||||
"""Sets up a minimal sphinx extension."""
|
||||
app.connect("include-read", handle_include_read)
|
||||
|
||||
|
||||
# Necessary so Click doesn't hit an encode error when called by
|
||||
# sphinxcontrib-programoutput on Windows.
|
||||
os.putenv("pythonioencoding", "utf-8")
|
||||
@ -43,7 +65,7 @@ def make_pypi_svg(version: str) -> None:
|
||||
|
||||
# Autopopulate version
|
||||
# The version, including alpha/beta/rc tags, but not commit hash and datestamps
|
||||
release = get_distribution("black").version.split("+")[0]
|
||||
release = version("black").split("+")[0]
|
||||
# The short X.Y version.
|
||||
version = release
|
||||
for sp in "abcfr":
|
||||
@ -149,15 +171,13 @@ def make_pypi_svg(version: str) -> None:
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(
|
||||
latex_documents = [(
|
||||
master_doc,
|
||||
"black.tex",
|
||||
"Documentation for Black",
|
||||
"Łukasz Langa and contributors to Black",
|
||||
"manual",
|
||||
)
|
||||
]
|
||||
)]
|
||||
|
||||
|
||||
# -- Options for manual page output ------------------------------------------
|
||||
@ -172,8 +192,7 @@ def make_pypi_svg(version: str) -> None:
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(
|
||||
texinfo_documents = [(
|
||||
master_doc,
|
||||
"Black",
|
||||
"Documentation for Black",
|
||||
@ -181,8 +200,7 @@ def make_pypi_svg(version: str) -> None:
|
||||
"Black",
|
||||
"The uncompromising Python code formatter",
|
||||
"Miscellaneous",
|
||||
)
|
||||
]
|
||||
)]
|
||||
|
||||
|
||||
# -- Options for Epub output -------------------------------------------------
|
||||
@ -210,7 +228,14 @@ def make_pypi_svg(version: str) -> None:
|
||||
|
||||
autodoc_member_order = "bysource"
|
||||
|
||||
# -- sphinx-copybutton configuration ----------------------------------------
|
||||
copybutton_prompt_text = (
|
||||
r">>> |\.\.\. |> |\$ |\# | In \[\d*\]: | {2,5}\.\.\.: | {5,8}: "
|
||||
)
|
||||
copybutton_prompt_is_regexp = True
|
||||
copybutton_remove_prompts = True
|
||||
|
||||
# -- Options for intersphinx extension ---------------------------------------
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {"https://docs.python.org/3/": None}
|
||||
intersphinx_mapping = {"<name>": ("https://docs.python.org/3/", None)}
|
||||
|
@ -9,7 +9,6 @@ the_basics
|
||||
gauging_changes
|
||||
issue_triage
|
||||
release_process
|
||||
reference/reference_summary
|
||||
```
|
||||
|
||||
Welcome! Happy to see you willing to make the project better. Have you read the entire
|
||||
@ -24,7 +23,8 @@ not very). This is deliberate. _Black_ aims to provide a consistent style and ta
|
||||
opportunities for arguing about style.
|
||||
|
||||
Bug reports and fixes are always welcome! Please follow the
|
||||
[issue template on GitHub](https://github.com/psf/black/issues/new) for best results.
|
||||
[issue templates on GitHub](https://github.com/psf/black/issues/new/choose) for best
|
||||
results.
|
||||
|
||||
Before you suggest a new feature or configuration knob, ask yourself why you want it. If
|
||||
it enables better integration with some workflow, fixes an inconsistency, speeds things
|
||||
@ -41,9 +41,5 @@ This section covers the following topics:
|
||||
- {doc}`the_basics`
|
||||
- {doc}`gauging_changes`
|
||||
- {doc}`release_process`
|
||||
- {doc}`reference/reference_summary`
|
||||
|
||||
For an overview on contributing to the _Black_, please checkout {doc}`the_basics`.
|
||||
|
||||
If you need a reference of the functions, classes, etc. available to you while
|
||||
developing _Black_, there's the {doc}`reference/reference_summary` docs.
|
||||
|
@ -1,82 +0,0 @@
|
||||
*Black* classes
|
||||
===============
|
||||
|
||||
*Contents are subject to change.*
|
||||
|
||||
.. currentmodule:: black
|
||||
|
||||
:class:`BracketTracker`
|
||||
-----------------------
|
||||
|
||||
.. autoclass:: black.brackets.BracketTracker
|
||||
:members:
|
||||
|
||||
:class:`Line`
|
||||
-------------
|
||||
|
||||
.. autoclass:: black.lines.Line
|
||||
:members:
|
||||
:special-members: __str__, __bool__
|
||||
|
||||
:class:`LinesBlock`
|
||||
-------------------------
|
||||
|
||||
.. autoclass:: black.lines.LinesBlock
|
||||
:members:
|
||||
|
||||
:class:`EmptyLineTracker`
|
||||
-------------------------
|
||||
|
||||
.. autoclass:: black.lines.EmptyLineTracker
|
||||
:members:
|
||||
|
||||
:class:`LineGenerator`
|
||||
----------------------
|
||||
|
||||
.. autoclass:: black.linegen.LineGenerator
|
||||
:show-inheritance:
|
||||
:members:
|
||||
|
||||
:class:`ProtoComment`
|
||||
---------------------
|
||||
|
||||
.. autoclass:: black.comments.ProtoComment
|
||||
:members:
|
||||
|
||||
:class:`Report`
|
||||
---------------
|
||||
|
||||
.. autoclass:: black.report.Report
|
||||
:members:
|
||||
:special-members: __str__
|
||||
|
||||
:class:`Visitor`
|
||||
----------------
|
||||
|
||||
.. autoclass:: black.nodes.Visitor
|
||||
:show-inheritance:
|
||||
:members:
|
||||
|
||||
Enums
|
||||
=====
|
||||
|
||||
:class:`Changed`
|
||||
----------------
|
||||
|
||||
.. autoclass:: black.Changed
|
||||
:show-inheritance:
|
||||
:members:
|
||||
|
||||
:class:`Mode`
|
||||
-----------------
|
||||
|
||||
.. autoclass:: black.Mode
|
||||
:show-inheritance:
|
||||
:members:
|
||||
|
||||
:class:`WriteBack`
|
||||
------------------
|
||||
|
||||
.. autoclass:: black.WriteBack
|
||||
:show-inheritance:
|
||||
:members:
|
@ -1,12 +0,0 @@
|
||||
*Black* exceptions
|
||||
==================
|
||||
|
||||
*Contents are subject to change.*
|
||||
|
||||
.. currentmodule:: black
|
||||
|
||||
.. autoexception:: black.linegen.CannotSplit
|
||||
|
||||
.. autoexception:: black.NothingChanged
|
||||
|
||||
.. autoexception:: black.InvalidInput
|
@ -1,180 +0,0 @@
|
||||
*Black* functions
|
||||
=================
|
||||
|
||||
*Contents are subject to change.*
|
||||
|
||||
.. currentmodule:: black
|
||||
|
||||
Assertions and checks
|
||||
---------------------
|
||||
|
||||
.. autofunction:: black.assert_equivalent
|
||||
|
||||
.. autofunction:: black.assert_stable
|
||||
|
||||
.. autofunction:: black.lines.can_be_split
|
||||
|
||||
.. autofunction:: black.lines.can_omit_invisible_parens
|
||||
|
||||
.. autofunction:: black.nodes.is_empty_tuple
|
||||
|
||||
.. autofunction:: black.nodes.is_import
|
||||
|
||||
.. autofunction:: black.lines.is_line_short_enough
|
||||
|
||||
.. autofunction:: black.nodes.is_multiline_string
|
||||
|
||||
.. autofunction:: black.nodes.is_one_tuple
|
||||
|
||||
.. autofunction:: black.brackets.is_split_after_delimiter
|
||||
|
||||
.. autofunction:: black.brackets.is_split_before_delimiter
|
||||
|
||||
.. autofunction:: black.nodes.is_stub_body
|
||||
|
||||
.. autofunction:: black.nodes.is_stub_suite
|
||||
|
||||
.. autofunction:: black.nodes.is_vararg
|
||||
|
||||
.. autofunction:: black.nodes.is_yield
|
||||
|
||||
|
||||
Formatting
|
||||
----------
|
||||
|
||||
.. autofunction:: black.format_file_contents
|
||||
|
||||
.. autofunction:: black.format_file_in_place
|
||||
|
||||
.. autofunction:: black.format_stdin_to_stdout
|
||||
|
||||
.. autofunction:: black.format_str
|
||||
|
||||
.. autofunction:: black.reformat_one
|
||||
|
||||
.. autofunction:: black.concurrency.schedule_formatting
|
||||
|
||||
File operations
|
||||
---------------
|
||||
|
||||
.. autofunction:: black.dump_to_file
|
||||
|
||||
.. autofunction:: black.find_project_root
|
||||
|
||||
.. autofunction:: black.gen_python_files
|
||||
|
||||
.. autofunction:: black.read_pyproject_toml
|
||||
|
||||
Parsing
|
||||
-------
|
||||
|
||||
.. autofunction:: black.decode_bytes
|
||||
|
||||
.. autofunction:: black.parsing.lib2to3_parse
|
||||
|
||||
.. autofunction:: black.parsing.lib2to3_unparse
|
||||
|
||||
Split functions
|
||||
---------------
|
||||
|
||||
.. autofunction:: black.linegen.bracket_split_build_line
|
||||
|
||||
.. autofunction:: black.linegen.bracket_split_succeeded_or_raise
|
||||
|
||||
.. autofunction:: black.linegen.delimiter_split
|
||||
|
||||
.. autofunction:: black.linegen.left_hand_split
|
||||
|
||||
.. autofunction:: black.linegen.right_hand_split
|
||||
|
||||
.. autofunction:: black.linegen.standalone_comment_split
|
||||
|
||||
.. autofunction:: black.linegen.transform_line
|
||||
|
||||
Caching
|
||||
-------
|
||||
|
||||
.. autofunction:: black.cache.filter_cached
|
||||
|
||||
.. autofunction:: black.cache.get_cache_dir
|
||||
|
||||
.. autofunction:: black.cache.get_cache_file
|
||||
|
||||
.. autofunction:: black.cache.get_cache_info
|
||||
|
||||
.. autofunction:: black.cache.read_cache
|
||||
|
||||
.. autofunction:: black.cache.write_cache
|
||||
|
||||
Utilities
|
||||
---------
|
||||
|
||||
.. py:function:: black.debug.DebugVisitor.show(code: str) -> None
|
||||
|
||||
Pretty-print the lib2to3 AST of a given string of `code`.
|
||||
|
||||
.. autofunction:: black.concurrency.cancel
|
||||
|
||||
.. autofunction:: black.nodes.child_towards
|
||||
|
||||
.. autofunction:: black.nodes.container_of
|
||||
|
||||
.. autofunction:: black.comments.convert_one_fmt_off_pair
|
||||
|
||||
.. autofunction:: black.diff
|
||||
|
||||
.. autofunction:: black.linegen.dont_increase_indentation
|
||||
|
||||
.. autofunction:: black.numerics.format_float_or_int_string
|
||||
|
||||
.. autofunction:: black.nodes.ensure_visible
|
||||
|
||||
.. autofunction:: black.lines.enumerate_reversed
|
||||
|
||||
.. autofunction:: black.comments.generate_comments
|
||||
|
||||
.. autofunction:: black.comments.generate_ignored_nodes
|
||||
|
||||
.. autofunction:: black.comments.is_fmt_on
|
||||
|
||||
.. autofunction:: black.comments.children_contains_fmt_on
|
||||
|
||||
.. autofunction:: black.nodes.first_leaf_of
|
||||
|
||||
.. autofunction:: black.linegen.generate_trailers_to_omit
|
||||
|
||||
.. autofunction:: black.get_future_imports
|
||||
|
||||
.. autofunction:: black.comments.list_comments
|
||||
|
||||
.. autofunction:: black.comments.make_comment
|
||||
|
||||
.. autofunction:: black.linegen.maybe_make_parens_invisible_in_atom
|
||||
|
||||
.. autofunction:: black.brackets.max_delimiter_priority_in_atom
|
||||
|
||||
.. autofunction:: black.normalize_fmt_off
|
||||
|
||||
.. autofunction:: black.numerics.normalize_numeric_literal
|
||||
|
||||
.. autofunction:: black.linegen.normalize_prefix
|
||||
|
||||
.. autofunction:: black.strings.normalize_string_prefix
|
||||
|
||||
.. autofunction:: black.strings.normalize_string_quotes
|
||||
|
||||
.. autofunction:: black.linegen.normalize_invisible_parens
|
||||
|
||||
.. autofunction:: black.patch_click
|
||||
|
||||
.. autofunction:: black.nodes.preceding_leaf
|
||||
|
||||
.. autofunction:: black.re_compile_maybe_verbose
|
||||
|
||||
.. autofunction:: black.linegen.should_split_line
|
||||
|
||||
.. autofunction:: black.concurrency.shutdown
|
||||
|
||||
.. autofunction:: black.strings.sub_twice
|
||||
|
||||
.. autofunction:: black.nodes.whitespace
|
@ -1,16 +0,0 @@
|
||||
Developer reference
|
||||
===================
|
||||
|
||||
.. note::
|
||||
|
||||
The documentation here is quite outdated and has been neglected. Many objects worthy
|
||||
of inclusion aren't documented. Contributions are appreciated!
|
||||
|
||||
*Contents are subject to change.*
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
reference_classes
|
||||
reference_functions
|
||||
reference_exceptions
|
@ -32,19 +32,29 @@ The 10,000 foot view of the release process is that you prepare a release PR and
|
||||
publish a [GitHub Release]. This triggers [release automation](#release-workflows) that
|
||||
builds all release artifacts and publishes them to the various platforms we publish to.
|
||||
|
||||
We now have a `scripts/release.py` script to help with cutting the release PRs.
|
||||
|
||||
- `python3 scripts/release.py --help` is your friend.
|
||||
- `release.py` has only been tested in Python 3.12 (so get with the times :D)
|
||||
|
||||
To cut a release:
|
||||
|
||||
1. Determine the release's version number
|
||||
- **_Black_ follows the [CalVer] versioning standard using the `YY.M.N` format**
|
||||
- So unless there already has been a release during this month, `N` should be `0`
|
||||
- Example: the first release in January, 2022 → `22.1.0`
|
||||
- `release.py` will calculate this and log to stderr for you copy paste pleasure
|
||||
1. File a PR editing `CHANGES.md` and the docs to version the latest changes
|
||||
- Run `python3 scripts/release.py [--debug]` to generate most changes
|
||||
- Sub headings in the template, if they have no bullet points need manual removal
|
||||
_PR welcome to improve :D_
|
||||
1. If `release.py` fail manually edit; otherwise, yay, skip this step!
|
||||
1. Replace the `## Unreleased` header with the version number
|
||||
1. Remove any empty sections for the current release
|
||||
1. (_optional_) Read through and copy-edit the changelog (eg. by moving entries,
|
||||
fixing typos, or rephrasing entries)
|
||||
1. Add a new empty template for the next release above
|
||||
([template below](#changelog-template))
|
||||
1. Double-check that no changelog entries since the last release were put in the
|
||||
wrong section (e.g., run `git diff <last release> CHANGES.md`)
|
||||
1. Update references to the latest version in
|
||||
{doc}`/integrations/source_version_control` and
|
||||
{doc}`/usage_and_configuration/the_basics`
|
||||
@ -61,6 +71,11 @@ To cut a release:
|
||||
description box
|
||||
1. Publish the GitHub Release, triggering [release automation](#release-workflows) that
|
||||
will handle the rest
|
||||
1. Once CI is done add + commit (git push - No review) a new empty template for the next
|
||||
release to CHANGES.md _(Template is able to be copy pasted from release.py should we
|
||||
fail)_
|
||||
1. `python3 scripts/release.py --add-changes-template|-a [--debug]`
|
||||
1. Should that fail, please return to copy + paste
|
||||
1. At this point, you're basically done. It's good practice to go and [watch and verify
|
||||
that all the release workflows pass][black-actions], although you will receive a
|
||||
GitHub notification should something fail.
|
||||
@ -79,59 +94,6 @@ release is probably unnecessary.
|
||||
In the end, use your best judgement and ask other maintainers for their thoughts.
|
||||
```
|
||||
|
||||
### Changelog template
|
||||
|
||||
Use the following template for a clean changelog after the release:
|
||||
|
||||
```
|
||||
## Unreleased
|
||||
|
||||
### Highlights
|
||||
|
||||
<!-- Include any especially major or disruptive changes here -->
|
||||
|
||||
### Stable style
|
||||
|
||||
<!-- Changes that affect Black's stable style -->
|
||||
|
||||
### Preview style
|
||||
|
||||
<!-- Changes that affect Black's preview style -->
|
||||
|
||||
### Configuration
|
||||
|
||||
<!-- Changes to how Black can be configured -->
|
||||
|
||||
### Packaging
|
||||
|
||||
<!-- Changes to how Black is packaged, such as dependency requirements -->
|
||||
|
||||
### Parser
|
||||
|
||||
<!-- Changes to the parser or to version autodetection -->
|
||||
|
||||
### Performance
|
||||
|
||||
<!-- Changes that improve Black's performance. -->
|
||||
|
||||
### Output
|
||||
|
||||
<!-- Changes to Black's terminal output and error messages -->
|
||||
|
||||
### _Blackd_
|
||||
|
||||
<!-- Changes to blackd -->
|
||||
|
||||
### Integrations
|
||||
|
||||
<!-- For example, Docker, GitHub Actions, pre-commit, editors -->
|
||||
|
||||
### Documentation
|
||||
|
||||
<!-- Major changes to documentation and policies. Small docs changes
|
||||
don't need a changelog entry. -->
|
||||
```
|
||||
|
||||
## Release workflows
|
||||
|
||||
All of _Black_'s release automation uses [GitHub Actions]. All workflows are therefore
|
||||
|
@ -7,14 +7,23 @@ An overview on contributing to the _Black_ project.
|
||||
Development on the latest version of Python is preferred. You can use any operating
|
||||
system.
|
||||
|
||||
Install development dependencies inside a virtual environment of your choice, for
|
||||
First clone the _Black_ repository:
|
||||
|
||||
```console
|
||||
$ git clone https://github.com/psf/black.git
|
||||
$ cd black
|
||||
```
|
||||
|
||||
Then install development dependencies inside a virtual environment of your choice, for
|
||||
example:
|
||||
|
||||
```console
|
||||
$ python3 -m venv .venv
|
||||
$ source .venv/bin/activate
|
||||
$ source .venv/bin/activate # activation for linux and mac
|
||||
$ .venv\Scripts\activate # activation for windows
|
||||
|
||||
(.venv)$ pip install -r test_requirements.txt
|
||||
(.venv)$ pip install -e .[d]
|
||||
(.venv)$ pip install -e ".[d]"
|
||||
(.venv)$ pre-commit install
|
||||
```
|
||||
|
||||
@ -30,23 +39,92 @@ the root of the black repo:
|
||||
|
||||
# Optional Fuzz testing
|
||||
(.venv)$ tox -e fuzz
|
||||
|
||||
# Format Black itself
|
||||
(.venv)$ tox -e run_self
|
||||
```
|
||||
|
||||
### Development
|
||||
|
||||
Further examples of invoking the tests
|
||||
|
||||
```console
|
||||
# Run all of the above mentioned, in parallel
|
||||
(.venv)$ tox --parallel=auto
|
||||
|
||||
# Run tests on a specific python version
|
||||
(.venv)$ tox -e py39
|
||||
|
||||
# Run an individual test
|
||||
(.venv)$ pytest -k <test name>
|
||||
|
||||
# Pass arguments to pytest
|
||||
(.venv)$ tox -e py -- --no-cov
|
||||
|
||||
# Print full tree diff, see documentation below
|
||||
(.venv)$ tox -e py -- --print-full-tree
|
||||
|
||||
# Disable diff printing, see documentation below
|
||||
(.venv)$ tox -e py -- --print-tree-diff=False
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
All aspects of the _Black_ style should be tested. Normally, tests should be created as
|
||||
files in the `tests/data/cases` directory. These files consist of up to three parts:
|
||||
|
||||
- A line that starts with `# flags: ` followed by a set of command-line options. For
|
||||
example, if the line is `# flags: --preview --skip-magic-trailing-comma`, the test
|
||||
case will be run with preview mode on and the magic trailing comma off. The options
|
||||
accepted are mostly a subset of those of _Black_ itself, except for the
|
||||
`--minimum-version=` flag, which should be used when testing a grammar feature that
|
||||
works only in newer versions of Python. This flag ensures that we don't try to
|
||||
validate the AST on older versions and tests that we autodetect the Python version
|
||||
correctly when the feature is used. For the exact flags accepted, see the function
|
||||
`get_flags_parser` in `tests/util.py`. If this line is omitted, the default options
|
||||
are used.
|
||||
- A block of Python code used as input for the formatter.
|
||||
- The line `# output`, followed by the output of _Black_ when run on the previous block.
|
||||
If this is omitted, the test asserts that _Black_ will leave the input code unchanged.
|
||||
|
||||
_Black_ has two pytest command-line options affecting test files in `tests/data/` that
|
||||
are split into an input part, and an output part, separated by a line with`# output`.
|
||||
These can be passed to `pytest` through `tox`, or directly into pytest if not using
|
||||
`tox`.
|
||||
|
||||
#### `--print-full-tree`
|
||||
|
||||
Upon a failing test, print the full concrete syntax tree (CST) as it is after processing
|
||||
the input ("actual"), and the tree that's yielded after parsing the output ("expected").
|
||||
Note that a test can fail with different output with the same CST. This used to be the
|
||||
default, but now defaults to `False`.
|
||||
|
||||
#### `--print-tree-diff`
|
||||
|
||||
Upon a failing test, print the diff of the trees as described above. This is the
|
||||
default. To turn it off pass `--print-tree-diff=False`.
|
||||
|
||||
### News / Changelog Requirement
|
||||
|
||||
`Black` has CI that will check for an entry corresponding to your PR in `CHANGES.md`. If
|
||||
you feel this PR does not require a changelog entry please state that in a comment and a
|
||||
maintainer can add a `skip news` label to make the CI pass. Otherwise, please ensure you
|
||||
have a line in the following format:
|
||||
have a line in the following format added below the appropriate header:
|
||||
|
||||
```md
|
||||
- `Black` is now more awesome (#X)
|
||||
```
|
||||
|
||||
<!---
|
||||
The Next PR Number link uses HTML because of a bug in MyST-Parser that double-escapes the ampersand, causing the query parameters to not be processed.
|
||||
MyST-Parser issue: https://github.com/executablebooks/MyST-Parser/issues/760
|
||||
MyST-Parser stalled fix PR: https://github.com/executablebooks/MyST-Parser/pull/929
|
||||
-->
|
||||
|
||||
Note that X should be your PR number, not issue number! To workout X, please use
|
||||
[Next PR Number](https://ichard26.github.io/next-pr-number/?owner=psf&name=black). This
|
||||
is not perfect but saves a lot of release overhead as now the releaser does not need to
|
||||
go back and workout what to add to the `CHANGES.md` for each release.
|
||||
<a href="https://ichard26.github.io/next-pr-number/?owner=psf&name=black">Next PR
|
||||
Number</a>. This is not perfect but saves a lot of release overhead as now the releaser
|
||||
does not need to go back and workout what to add to the `CHANGES.md` for each release.
|
||||
|
||||
### Style Changes
|
||||
|
||||
@ -54,7 +132,7 @@ If a change would affect the advertised code style, please modify the documentat
|
||||
_Black_ code style) to reflect that change. Patches that fix unintended bugs in
|
||||
formatting don't need to be mentioned separately though. If the change is implemented
|
||||
with the `--preview` flag, please include the change in the future style document
|
||||
instead and write the changelog entry under a dedicated "Preview changes" heading.
|
||||
instead and write the changelog entry under the dedicated "Preview style" heading.
|
||||
|
||||
### Docs Testing
|
||||
|
||||
@ -62,17 +140,17 @@ If you make changes to docs, you can test they still build locally too.
|
||||
|
||||
```console
|
||||
(.venv)$ pip install -r docs/requirements.txt
|
||||
(.venv)$ pip install [-e] .[d]
|
||||
(.venv)$ pip install -e ".[d]"
|
||||
(.venv)$ sphinx-build -a -b html -W docs/ docs/_build/
|
||||
```
|
||||
|
||||
## Hygiene
|
||||
|
||||
If you're fixing a bug, add a test. Run it first to confirm it fails, then fix the bug,
|
||||
run it again to confirm it's really fixed.
|
||||
and run the test again to confirm it's really fixed.
|
||||
|
||||
If adding a new feature, add a test. In fact, always add a test. But wait, before adding
|
||||
any large feature, first open an issue for us to discuss the idea first.
|
||||
If adding a new feature, add a test. In fact, always add a test. If adding a large
|
||||
feature, please first open an issue to discuss it beforehand.
|
||||
|
||||
## Finally
|
||||
|
||||
|
35
docs/faq.md
35
docs/faq.md
@ -41,9 +41,10 @@ other tools, such as `# noqa`, may be moved by _Black_. See below for more detai
|
||||
Stable. _Black_ aims to enforce one style and one style only, with some room for
|
||||
pragmatism. See [The Black Code Style](the_black_code_style/index.md) for more details.
|
||||
|
||||
Starting in 2022, the formatting output will be stable for the releases made in the same
|
||||
year (other than unintentional bugs). It is possible to opt-in to the latest formatting
|
||||
styles, using the `--preview` flag.
|
||||
Starting in 2022, the formatting output is stable for the releases made in the same year
|
||||
(other than unintentional bugs). At the beginning of every year, the first release will
|
||||
make changes to the stable style. It is possible to opt in to the latest formatting
|
||||
styles using the `--preview` flag.
|
||||
|
||||
## Why is my file not formatted?
|
||||
|
||||
@ -57,8 +58,8 @@ _Black_ is timid about formatting Jupyter Notebooks. Cells containing any of the
|
||||
following will not be formatted:
|
||||
|
||||
- automagics (e.g. `pip install black`)
|
||||
- non-Python cell magics (e.g. `%%writeline`). These can be added with the flag
|
||||
`--python-cell-magics`, e.g. `black --python-cell-magics writeline hello.ipynb`.
|
||||
- non-Python cell magics (e.g. `%%writefile`). These can be added with the flag
|
||||
`--python-cell-magics`, e.g. `black --python-cell-magics writefile hello.ipynb`.
|
||||
- multiline magics, e.g.:
|
||||
|
||||
```python
|
||||
@ -76,26 +77,26 @@ following will not be formatted:
|
||||
- invalid syntax, as it can't be safely distinguished from automagics in the absence of
|
||||
a running `IPython` kernel.
|
||||
|
||||
## Why are Flake8's E203 and W503 violated?
|
||||
## Why does Flake8 report warnings?
|
||||
|
||||
Because they go against PEP 8. E203 falsely triggers on list
|
||||
[slices](the_black_code_style/current_style.md#slices), and adhering to W503 hinders
|
||||
readability because operators are misaligned. Disable W503 and enable the
|
||||
disabled-by-default counterpart W504. E203 should be disabled while changes are still
|
||||
[discussed](https://github.com/PyCQA/pycodestyle/issues/373).
|
||||
Some of Flake8's rules conflict with Black's style. We recommend disabling these rules.
|
||||
See [Using _Black_ with other tools](labels/why-pycodestyle-warnings).
|
||||
|
||||
## Which Python versions does Black support?
|
||||
|
||||
Currently the runtime requires Python 3.7-3.11. Formatting is supported for files
|
||||
containing syntax from Python 3.3 to 3.11. We promise to support at least all Python
|
||||
versions that have not reached their end of life. This is the case for both running
|
||||
_Black_ and formatting code.
|
||||
_Black_ generally supports all Python versions supported by CPython (see
|
||||
[the Python devguide](https://devguide.python.org/versions/) for current information).
|
||||
We promise to support at least all Python versions that have not reached their end of
|
||||
life. This is the case for both running _Black_ and formatting code.
|
||||
|
||||
Support for formatting Python 2 code was removed in version 22.0. While we've made no
|
||||
plans to stop supporting older Python 3 minor versions immediately, their support might
|
||||
also be removed some time in the future without a deprecation period.
|
||||
|
||||
Runtime support for 3.6 was removed in version 22.10.0.
|
||||
`await`/`async` as soft keywords/indentifiers are no longer supported as of 25.2.0.
|
||||
|
||||
Runtime support for 3.6 was removed in version 22.10.0, for 3.7 in version 23.7.0, and
|
||||
for 3.8 in version 24.10.0.
|
||||
|
||||
## Why does my linter or typechecker complain after I format my code?
|
||||
|
||||
@ -107,7 +108,7 @@ codebase with _Black_.
|
||||
|
||||
## Can I run Black with PyPy?
|
||||
|
||||
Yes, there is support for PyPy 3.7 and higher.
|
||||
Yes, there is support for PyPy 3.8 and higher.
|
||||
|
||||
## Why does Black not detect syntax errors in my code?
|
||||
|
||||
|
@ -16,9 +16,11 @@ Also, you can try out _Black_ online for minimal fuss on the
|
||||
|
||||
## Installation
|
||||
|
||||
_Black_ can be installed by running `pip install black`. It requires Python 3.7+ to run.
|
||||
_Black_ can be installed by running `pip install black`. It requires Python 3.9+ to run.
|
||||
If you want to format Jupyter Notebooks, install with `pip install "black[jupyter]"`.
|
||||
|
||||
If you use pipx, you can install Black with `pipx install black`.
|
||||
|
||||
If you can't wait for the latest _hotness_ and want to install from GitHub, use:
|
||||
|
||||
`pip install git+https://github.com/psf/black`
|
||||
|
@ -18,7 +18,8 @@ previous revision that modified those lines.
|
||||
|
||||
So when migrating your project's code style to _Black_, reformat everything and commit
|
||||
the changes (preferably in one massive commit). Then put the full 40 characters commit
|
||||
identifier(s) into a file.
|
||||
identifier(s) into a file usually called `.git-blame-ignore-revs` at the root of your
|
||||
project directory.
|
||||
|
||||
```text
|
||||
# Migrate code style to Black
|
||||
@ -46,7 +47,6 @@ $ git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
**The one caveat is that some online Git-repositories like GitLab do not yet support
|
||||
ignoring revisions using their native blame UI.** So blame information will be cluttered
|
||||
with a reformatting commit on those platforms. (If you'd like this feature, there's an
|
||||
open issue for [GitLab](https://gitlab.com/gitlab-org/gitlab/-/issues/31423)). This is
|
||||
however supported by
|
||||
[GitHub](https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view),
|
||||
currently in beta.
|
||||
open issue for [GitLab](https://gitlab.com/gitlab-org/gitlab/-/issues/31423)).
|
||||
[GitHub supports `.git-blame-ignore-revs`](https://docs.github.com/en/repositories/working-with-files/using-files/viewing-a-file#ignore-commits-in-the-blame-view)
|
||||
by default in blame views however.
|
||||
|
@ -134,10 +134,10 @@ profile = black
|
||||
|
||||
</details>
|
||||
|
||||
### Flake8
|
||||
### pycodestyle
|
||||
|
||||
[Flake8](https://pypi.org/p/flake8/) is a code linter. It warns you of syntax errors,
|
||||
possible bugs, stylistic errors, etc. For the most part, Flake8 follows
|
||||
[pycodestyle](https://pycodestyle.pycqa.org/) is a code linter. It warns you of syntax
|
||||
errors, possible bugs, stylistic errors, etc. For the most part, pycodestyle follows
|
||||
[PEP 8](https://www.python.org/dev/peps/pep-0008/) when warning about stylistic errors.
|
||||
There are a few deviations that cause incompatibilities with _Black_.
|
||||
|
||||
@ -145,67 +145,115 @@ There are a few deviations that cause incompatibilities with _Black_.
|
||||
|
||||
```
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
ignore = E203,E701
|
||||
```
|
||||
|
||||
(labels/why-pycodestyle-warnings)=
|
||||
|
||||
#### Why those options above?
|
||||
|
||||
##### `max-line-length`
|
||||
|
||||
As with isort, pycodestyle should be configured to allow lines up to the length limit of
|
||||
`88`, _Black_'s default.
|
||||
|
||||
##### `E203`
|
||||
|
||||
In some cases, as determined by PEP 8, _Black_ will enforce an equal amount of
|
||||
whitespace around slice operators. Due to this, Flake8 will raise
|
||||
`E203 whitespace before ':'` warnings. Since this warning is not PEP 8 compliant, Flake8
|
||||
should be configured to ignore it via `extend-ignore = E203`.
|
||||
whitespace around slice operators. Due to this, pycodestyle will raise
|
||||
`E203 whitespace before ':'` warnings. Since this warning is not PEP 8 compliant, it
|
||||
should be disabled.
|
||||
|
||||
##### `E701` / `E704`
|
||||
|
||||
_Black_ will collapse implementations of classes and functions consisting solely of `..`
|
||||
to a single line. This matches how such examples are formatted in PEP 8. It remains true
|
||||
that in all other cases Black will prevent multiple statements on the same line, in
|
||||
accordance with PEP 8 generally discouraging this.
|
||||
|
||||
However, `pycodestyle` does not mirror this logic and may raise
|
||||
`E701 multiple statements on one line (colon)` in this situation. Its
|
||||
disabled-by-default `E704 multiple statements on one line (def)` rule may also raise
|
||||
warnings and should not be enabled.
|
||||
|
||||
##### `W503`
|
||||
|
||||
When breaking a line, _Black_ will break it before a binary operator. This is compliant
|
||||
with PEP 8 as of
|
||||
[April 2016](https://github.com/python/peps/commit/c59c4376ad233a62ca4b3a6060c81368bd21e85b#diff-64ec08cc46db7540f18f2af46037f599).
|
||||
There's a disabled-by-default warning in Flake8 which goes against this PEP 8
|
||||
recommendation called `W503 line break before binary operator`. It should not be enabled
|
||||
in your configuration.
|
||||
|
||||
Also, as like with isort, flake8 should be configured to allow lines up to the length
|
||||
limit of `88`, _Black_'s default. This explains `max-line-length = 88`.
|
||||
in your configuration. You can use its counterpart
|
||||
`W504 line break after binary operator` instead.
|
||||
|
||||
#### Formats
|
||||
|
||||
<details>
|
||||
<summary>.flake8</summary>
|
||||
<summary>setup.cfg, .pycodestyle, tox.ini</summary>
|
||||
|
||||
```ini
|
||||
[flake8]
|
||||
[pycodestyle]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
ignore = E203,E701
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>setup.cfg</summary>
|
||||
### Flake8
|
||||
|
||||
```ini
|
||||
[Flake8](https://pypi.org/p/flake8/) is a wrapper around multiple linters, including
|
||||
pycodestyle. As such, it has many of the same issues.
|
||||
|
||||
#### Bugbear
|
||||
|
||||
It's recommended to use [the Bugbear plugin](https://github.com/PyCQA/flake8-bugbear)
|
||||
and enable
|
||||
[its B950 check](https://github.com/PyCQA/flake8-bugbear#opinionated-warnings#:~:text=you%20expect%20it.-,B950,-%3A%20Line%20too%20long)
|
||||
instead of using Flake8's E501, because it aligns with
|
||||
[Black's 10% rule](labels/line-length).
|
||||
|
||||
Install Bugbear and use the following config:
|
||||
|
||||
```
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
max-line-length = 80
|
||||
extend-select = B950
|
||||
extend-ignore = E203,E501,E701
|
||||
```
|
||||
|
||||
</details>
|
||||
#### Minimal Configuration
|
||||
|
||||
In cases where you can't or don't want to install Bugbear, you can use this minimally
|
||||
compatible config:
|
||||
|
||||
```
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203,E701
|
||||
```
|
||||
|
||||
#### Why those options above?
|
||||
|
||||
See [the pycodestyle section](labels/why-pycodestyle-warnings) above.
|
||||
|
||||
#### Formats
|
||||
|
||||
<details>
|
||||
<summary>tox.ini</summary>
|
||||
<summary>.flake8, setup.cfg, tox.ini</summary>
|
||||
|
||||
```ini
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
extend-ignore = E203,E701
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Pylint
|
||||
|
||||
[Pylint](https://pypi.org/p/pylint/) is also a code linter like Flake8. It has the same
|
||||
checks as flake8 and more. In particular, it has more formatting checks regarding style
|
||||
conventions like variable naming. With so many checks, Pylint is bound to have some
|
||||
mixed feelings about _Black_'s formatting style.
|
||||
[Pylint](https://pypi.org/p/pylint/) is also a code linter like Flake8. It has many of
|
||||
the same checks as Flake8 and more. It particularly has more formatting checks regarding
|
||||
style conventions like variable naming.
|
||||
|
||||
#### Configuration
|
||||
|
||||
@ -252,35 +300,3 @@ max-line-length = "88"
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### pycodestyle
|
||||
|
||||
[pycodestyle](https://pycodestyle.pycqa.org/) is also a code linter like Flake8.
|
||||
|
||||
#### Configuration
|
||||
|
||||
```
|
||||
max-line-length = 88
|
||||
ignore = E203
|
||||
```
|
||||
|
||||
#### Why those options above?
|
||||
|
||||
pycodestyle should be configured to only complain about lines that surpass `88`
|
||||
characters via `max_line_length = 88`.
|
||||
|
||||
See
|
||||
[Why are Flake8’s E203 and W503 violated?](https://black.readthedocs.io/en/stable/faq.html#why-are-flake8-s-e203-and-w503-violated)
|
||||
|
||||
#### Formats
|
||||
|
||||
<details>
|
||||
<summary>setup.cfg</summary>
|
||||
|
||||
```cfg
|
||||
[pycodestyle]
|
||||
ignore = E203
|
||||
max_line_length = 88
|
||||
```
|
||||
|
||||
</details>
|
||||
|
@ -21,7 +21,7 @@ Try it out now using the [Black Playground](https://black.vercel.app).
|
||||
*Black* is [successfully used](https://github.com/psf/black#used-by) by
|
||||
many projects, small and big. *Black* has a comprehensive test suite, with efficient
|
||||
parallel tests, our own auto formatting and parallel Continuous Integration runner.
|
||||
Now that we have become stable, you should not expect large formatting to changes in
|
||||
Now that we have become stable, you should not expect large changes to formatting in
|
||||
the future. Stylistic changes will mostly be responses to bug reports and support for new Python
|
||||
syntax.
|
||||
|
||||
|
@ -10,16 +10,26 @@ Options include the following:
|
||||
|
||||
## PyCharm/IntelliJ IDEA
|
||||
|
||||
There are three different ways you can use _Black_ from PyCharm:
|
||||
There are several different ways you can use _Black_ from PyCharm:
|
||||
|
||||
1. As local server using the BlackConnect plugin
|
||||
1. As external tool
|
||||
1. As file watcher
|
||||
1. Using the built-in _Black_ integration (PyCharm 2023.2 and later). This option is the
|
||||
simplest to set up.
|
||||
1. As local server using the BlackConnect plugin. This option formats the fastest. It
|
||||
spins up {doc}`Black's HTTP server </usage_and_configuration/black_as_a_server>`, to
|
||||
avoid the startup cost on subsequent formats.
|
||||
1. As external tool.
|
||||
1. As file watcher.
|
||||
|
||||
The first option is the simplest to set up and formats the fastest (by spinning up
|
||||
{doc}`Black's HTTP server </usage_and_configuration/black_as_a_server>`, avoiding the
|
||||
startup cost on subsequent formats), but if you would prefer to not install a
|
||||
third-party plugin or blackd's extra dependencies, the other two are also great options.
|
||||
### Built-in _Black_ integration
|
||||
|
||||
1. Install `black`.
|
||||
|
||||
```console
|
||||
$ pip install black
|
||||
```
|
||||
|
||||
1. Go to `Preferences or Settings -> Tools -> Black` and configure _Black_ to your
|
||||
liking.
|
||||
|
||||
### As local server
|
||||
|
||||
@ -226,7 +236,7 @@ Configuration:
|
||||
|
||||
#### Installation
|
||||
|
||||
This plugin **requires Vim 7.0+ built with Python 3.7+ support**. It needs Python 3.7 to
|
||||
This plugin **requires Vim 7.0+ built with Python 3.9+ support**. It needs Python 3.9 to
|
||||
be able to run _Black_ inside the Vim process which is much faster than calling an
|
||||
external command.
|
||||
|
||||
@ -278,8 +288,8 @@ $ git checkout origin/stable -b stable
|
||||
##### Arch Linux
|
||||
|
||||
On Arch Linux, the plugin is shipped with the
|
||||
[`python-black`](https://archlinux.org/packages/community/any/python-black/) package, so
|
||||
you can start using it in Vim after install with no additional setup.
|
||||
[`python-black`](https://archlinux.org/packages/extra/any/python-black/) package, so you
|
||||
can start using it in Vim after install with no additional setup.
|
||||
|
||||
##### Vim 8 Native Plugin Management
|
||||
|
||||
@ -334,60 +344,6 @@ To run _Black_ on a key press (e.g. F9 below), add this:
|
||||
nnoremap <F9> :Black<CR>
|
||||
```
|
||||
|
||||
#### Troubleshooting
|
||||
|
||||
**How to get Vim with Python 3.6?** On Ubuntu 17.10 Vim comes with Python 3.6 by
|
||||
default. On macOS with Homebrew run: `brew install vim`. When building Vim from source,
|
||||
use: `./configure --enable-python3interp=yes`. There's many guides online how to do
|
||||
this.
|
||||
|
||||
**I get an import error when using _Black_ from a virtual environment**: If you get an
|
||||
error message like this:
|
||||
|
||||
```text
|
||||
Traceback (most recent call last):
|
||||
File "<string>", line 63, in <module>
|
||||
File "/home/gui/.vim/black/lib/python3.7/site-packages/black.py", line 45, in <module>
|
||||
from typed_ast import ast3, ast27
|
||||
File "/home/gui/.vim/black/lib/python3.7/site-packages/typed_ast/ast3.py", line 40, in <module>
|
||||
from typed_ast import _ast3
|
||||
ImportError: /home/gui/.vim/black/lib/python3.7/site-packages/typed_ast/_ast3.cpython-37m-x86_64-linux-gnu.so: undefined symbool: PyExc_KeyboardInterrupt
|
||||
```
|
||||
|
||||
Then you need to install `typed_ast` directly from the source code. The error happens
|
||||
because `pip` will download [Python wheels](https://pythonwheels.com/) if they are
|
||||
available. Python wheels are a new standard of distributing Python packages and packages
|
||||
that have Cython and extensions written in C are already compiled, so the installation
|
||||
is much more faster. The problem here is that somehow the Python environment inside Vim
|
||||
does not match with those already compiled C extensions and these kind of errors are the
|
||||
result. Luckily there is an easy fix: installing the packages from the source code.
|
||||
|
||||
The package that causes problems is:
|
||||
|
||||
- [typed-ast](https://pypi.org/project/typed-ast/)
|
||||
|
||||
Now remove those two packages:
|
||||
|
||||
```console
|
||||
$ pip uninstall typed-ast -y
|
||||
```
|
||||
|
||||
And now you can install them with:
|
||||
|
||||
```console
|
||||
$ pip install --no-binary :all: typed-ast
|
||||
```
|
||||
|
||||
The C extensions will be compiled and now Vim's Python environment will match. Note that
|
||||
you need to have the GCC compiler and the Python development files installed (on
|
||||
Ubuntu/Debian do `sudo apt-get install build-essential python3-dev`).
|
||||
|
||||
If you later want to update _Black_, you should do it like this:
|
||||
|
||||
```console
|
||||
$ pip install -U black --no-binary typed-ast
|
||||
```
|
||||
|
||||
### With ALE
|
||||
|
||||
1. Install [`ale`](https://github.com/dense-analysis/ale)
|
||||
@ -435,7 +391,7 @@ close and reopen your File, _Black_ will be done with its job.
|
||||
|
||||
- Use the
|
||||
[Python extension](https://marketplace.visualstudio.com/items?itemName=ms-python.python)
|
||||
([instructions](https://code.visualstudio.com/docs/python/editing#_formatting)).
|
||||
([instructions](https://code.visualstudio.com/docs/python/formatting)).
|
||||
|
||||
- Alternatively the pre-release
|
||||
[Black Formatter](https://marketplace.visualstudio.com/items?itemName=ms-python.black-formatter)
|
||||
@ -443,9 +399,10 @@ close and reopen your File, _Black_ will be done with its job.
|
||||
server for Black. Formatting is much more responsive using this extension, **but the
|
||||
minimum supported version of Black is 22.3.0**.
|
||||
|
||||
## SublimeText 3
|
||||
## SublimeText
|
||||
|
||||
Use [sublack plugin](https://github.com/jgirardet/sublack).
|
||||
For SublimeText 3, use [sublack plugin](https://github.com/jgirardet/sublack). For
|
||||
higher versions, it is recommended to use [LSP](#python-lsp-server) as documented below.
|
||||
|
||||
## Python LSP Server
|
||||
|
||||
@ -475,4 +432,4 @@ hook global WinSetOption filetype=python %{
|
||||
|
||||
## Thonny
|
||||
|
||||
Use [Thonny-black-code-format](https://github.com/Franccisco/thonny-black-code-format).
|
||||
Use [Thonny-black-formatter](https://pypi.org/project/thonny-black-formatter/).
|
||||
|
@ -24,7 +24,7 @@ jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: psf/black@stable
|
||||
```
|
||||
|
||||
@ -32,12 +32,15 @@ We recommend the use of the `@stable` tag, but per version tags also exist if yo
|
||||
that. Note that the action's version you select is independent of the version of _Black_
|
||||
the action will use.
|
||||
|
||||
The version of _Black_ the action will use can be configured via `version`. This can be
|
||||
any
|
||||
The version of _Black_ the action will use can be configured via `version` or read from
|
||||
the `pyproject.toml` file. `version` can be any
|
||||
[valid version specifier](https://packaging.python.org/en/latest/glossary/#term-Version-Specifier)
|
||||
or just the version number if you want an exact version. The action defaults to the
|
||||
latest release available on PyPI. Only versions available from PyPI are supported, so no
|
||||
commit SHAs or branch names.
|
||||
or just the version number if you want an exact version. To read the version from the
|
||||
`pyproject.toml` file instead, set `use_pyproject` to `true`. This will first look into
|
||||
the `tool.black.required-version` field, then the `dependency-groups` table, then the
|
||||
`project.dependencies` array and finally the `project.optional-dependencies` table. The
|
||||
action defaults to the latest release available on PyPI. Only versions available from
|
||||
PyPI are supported, so no commit SHAs or branch names.
|
||||
|
||||
If you want to include Jupyter Notebooks, _Black_ must be installed with the `jupyter`
|
||||
extra. Installing the extra and including Jupyter Notebook files can be configured via
|
||||
@ -70,3 +73,18 @@ If you want to match versions covered by Black's
|
||||
src: "./src"
|
||||
version: "~= 22.0"
|
||||
```
|
||||
|
||||
If you want to read the version from `pyproject.toml`, set `use_pyproject` to `true`.
|
||||
Note that this requires Python >= 3.11, so using the setup-python action may be
|
||||
required, for example:
|
||||
|
||||
```yaml
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.13"
|
||||
- uses: psf/black@stable
|
||||
with:
|
||||
options: "--check --verbose"
|
||||
src: "./src"
|
||||
use_pyproject: true
|
||||
```
|
||||
|
@ -6,29 +6,48 @@ Use [pre-commit](https://pre-commit.com/). Once you
|
||||
|
||||
```yaml
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.3.0
|
||||
# Using this mirror lets us use mypyc-compiled black, which is about 2x faster
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 25.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
# It is recommended to specify the latest version of Python
|
||||
# supported by your project here, or alternatively use
|
||||
# pre-commit's default_language_version, see
|
||||
# https://pre-commit.com/#top_level-default_language_version
|
||||
language_version: python3.9
|
||||
language_version: python3.11
|
||||
```
|
||||
|
||||
Feel free to switch out the `rev` value to something else, like another
|
||||
[tag/version][black-tags] or even a specific commit. Although we discourage the use of
|
||||
Feel free to switch out the `rev` value to a different version of Black.
|
||||
|
||||
Note if you'd like to use a specific commit in `rev`, you'll need to swap the repo
|
||||
specified from the mirror to https://github.com/psf/black. We discourage the use of
|
||||
branches or other mutable refs since the hook [won't auto update as you may
|
||||
expect][pre-commit-mutable-rev].
|
||||
|
||||
If you want support for Jupyter Notebooks as well, then replace `id: black` with
|
||||
`id: black-jupyter`.
|
||||
## Jupyter Notebooks
|
||||
|
||||
```{note}
|
||||
The `black-jupyter` hook is only available from version 21.8b0 and onwards.
|
||||
There is an alternate hook `black-jupyter` that expands the targets of `black` to
|
||||
include Jupyter Notebooks. To use this hook, simply replace the hook's `id: black` with
|
||||
`id: black-jupyter` in the `.pre-commit-config.yaml`:
|
||||
|
||||
```yaml
|
||||
repos:
|
||||
# Using this mirror lets us use mypyc-compiled black, which is about 2x faster
|
||||
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||
rev: 25.1.0
|
||||
hooks:
|
||||
- id: black-jupyter
|
||||
# It is recommended to specify the latest version of Python
|
||||
# supported by your project here, or alternatively use
|
||||
# pre-commit's default_language_version, see
|
||||
# https://pre-commit.com/#top_level-default_language_version
|
||||
language_version: python3.11
|
||||
```
|
||||
|
||||
```{note}
|
||||
The `black-jupyter` hook became available in version 21.8b0.
|
||||
```
|
||||
|
||||
[black-tags]: https://github.com/psf/black/tags
|
||||
[pre-commit-mutable-rev]:
|
||||
https://pre-commit.com/#using-the-latest-version-for-a-repository
|
||||
|
@ -1,9 +1,9 @@
|
||||
# Used by ReadTheDocs; pinned requirements for stability.
|
||||
|
||||
myst-parser==1.0.0
|
||||
Sphinx==6.1.3
|
||||
myst-parser==4.0.1
|
||||
Sphinx==8.2.3
|
||||
# Older versions break Sphinx even though they're declared to be supported.
|
||||
docutils==0.19
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx_copybutton==0.5.1
|
||||
furo==2023.3.23
|
||||
docutils==0.21.2
|
||||
sphinxcontrib-programoutput==0.18
|
||||
sphinx_copybutton==0.5.2
|
||||
furo==2024.8.6
|
||||
|
@ -8,16 +8,9 @@ deliberately limited and rarely added. Previous formatting is taken into account
|
||||
little as possible, with rare exceptions like the magic trailing comma. The coding style
|
||||
used by _Black_ can be viewed as a strict subset of PEP 8.
|
||||
|
||||
_Black_ reformats entire files in place. It doesn't reformat lines that end with
|
||||
`# fmt: skip` or blocks that start with `# fmt: off` and end with `# fmt: on`.
|
||||
`# fmt: on/off` must be on the same level of indentation and in the same block, meaning
|
||||
no unindents beyond the initial indentation level between them. It also recognizes
|
||||
[YAPF](https://github.com/google/yapf)'s block comments to the same effect, as a
|
||||
courtesy for straddling code.
|
||||
|
||||
The rest of this document describes the current formatting style. If you're interested
|
||||
in trying out where the style is heading, see [future style](./future_style.md) and try
|
||||
running `black --preview`.
|
||||
This document describes the current formatting style. If you're interested in trying out
|
||||
where the style is heading, see [future style](./future_style.md) and try running
|
||||
`black --preview`.
|
||||
|
||||
### How _Black_ wraps lines
|
||||
|
||||
@ -140,6 +133,8 @@ If you're reaching for backslashes, that's a clear signal that you can do better
|
||||
slightly refactor your code. I hope some of the examples above show you that there are
|
||||
many ways in which you can do it.
|
||||
|
||||
(labels/line-length)=
|
||||
|
||||
### Line length
|
||||
|
||||
You probably noticed the peculiar default line length. _Black_ defaults to 88 characters
|
||||
@ -148,7 +143,7 @@ significantly shorter files than sticking with 80 (the most popular), or even 79
|
||||
by the standard library). In general,
|
||||
[90-ish seems like the wise choice](https://youtu.be/wf-BqAjZb8M?t=260).
|
||||
|
||||
If you're paid by the line of code you write, you can pass `--line-length` with a lower
|
||||
If you're paid by the lines of code you write, you can pass `--line-length` with a lower
|
||||
number. _Black_ will try to respect that. However, sometimes it won't be able to without
|
||||
breaking other rules. In those rare cases, auto-formatted code will exceed your allotted
|
||||
limit.
|
||||
@ -158,33 +153,10 @@ harder to work with line lengths exceeding 100 characters. It also adversely aff
|
||||
side-by-side diff review on typical screen resolutions. Long lines also make it harder
|
||||
to present code neatly in documentation or talk slides.
|
||||
|
||||
If you're using Flake8, you can bump `max-line-length` to 88 and mostly forget about it.
|
||||
However, it's better if you use [Bugbear](https://github.com/PyCQA/flake8-bugbear)'s
|
||||
B950 warning instead of E501, and bump the max line length to 88 (or the `--line-length`
|
||||
you used for black), which will align more with black's _"try to respect
|
||||
`--line-length`, but don't become crazy if you can't"_. You'd do it like this:
|
||||
#### Flake8 and other linters
|
||||
|
||||
```ini
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
...
|
||||
select = C,E,F,W,B,B950
|
||||
extend-ignore = E203, E501
|
||||
```
|
||||
|
||||
Explanation of why E203 is disabled can be found further in this documentation. And if
|
||||
you're curious about the reasoning behind B950,
|
||||
[Bugbear's documentation](https://github.com/PyCQA/flake8-bugbear#opinionated-warnings)
|
||||
explains it. The tl;dr is "it's like highway speed limits, we won't bother you if you
|
||||
overdo it by a few km/h".
|
||||
|
||||
**If you're looking for a minimal, black-compatible flake8 configuration:**
|
||||
|
||||
```ini
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
extend-ignore = E203
|
||||
```
|
||||
See [Using _Black_ with other tools](../guides/using_black_with_other_tools.md) about
|
||||
linter compatibility.
|
||||
|
||||
### Empty lines
|
||||
|
||||
@ -194,44 +166,35 @@ that in-function vertical whitespace should only be used sparingly.
|
||||
_Black_ will allow single empty lines inside functions, and single and double empty
|
||||
lines on module level left by the original editors, except when they're within
|
||||
parenthesized expressions. Since such expressions are always reformatted to fit minimal
|
||||
space, this whitespace is lost. The other exception is that it will remove any empty
|
||||
lines immediately following a statement that introduces a new indentation level.
|
||||
space, this whitespace is lost.
|
||||
|
||||
```python
|
||||
# in:
|
||||
|
||||
def foo():
|
||||
def function(
|
||||
some_argument: int,
|
||||
|
||||
print("All the newlines above me should be deleted!")
|
||||
other_argument: int = 5,
|
||||
) -> EmptyLineInParenWillBeDeleted:
|
||||
|
||||
|
||||
if condition:
|
||||
|
||||
print("No newline above me!")
|
||||
|
||||
print("There is a newline above me, and that's OK!")
|
||||
|
||||
|
||||
class Point:
|
||||
|
||||
x: int
|
||||
y: int
|
||||
print("One empty line above me will be kept!")
|
||||
|
||||
def this_is_okay_too():
|
||||
print("No empty line here")
|
||||
# out:
|
||||
|
||||
def foo():
|
||||
print("All the newlines above me should be deleted!")
|
||||
def function(
|
||||
some_argument: int,
|
||||
other_argument: int = 5,
|
||||
) -> EmptyLineInParenWillBeDeleted:
|
||||
|
||||
print("One empty line above me will be kept!")
|
||||
|
||||
|
||||
if condition:
|
||||
print("No newline above me!")
|
||||
|
||||
print("There is a newline above me, and that's OK!")
|
||||
|
||||
|
||||
class Point:
|
||||
x: int
|
||||
y: int
|
||||
def this_is_okay_too():
|
||||
print("No empty line here")
|
||||
```
|
||||
|
||||
It will also insert proper spacing before and after function definitions. It's one line
|
||||
@ -250,11 +213,12 @@ required due to an inner function starting immediately after.
|
||||
|
||||
_Black_ does not format comment contents, but it enforces two spaces between code and a
|
||||
comment on the same line, and a space before the comment text begins. Some types of
|
||||
comments that require specific spacing rules are respected: doc comments (`#: comment`),
|
||||
section comments with long runs of hashes, and Spyder cells. Non-breaking spaces after
|
||||
hashes are also preserved. Comments may sometimes be moved because of formatting
|
||||
changes, which can break tools that assign special meaning to them. See
|
||||
[AST before and after formatting](#ast-before-and-after-formatting) for more discussion.
|
||||
comments that require specific spacing rules are respected: shebangs (`#! comment`), doc
|
||||
comments (`#: comment`), section comments with long runs of hashes, and Spyder cells.
|
||||
Non-breaking spaces after hashes are also preserved. Comments may sometimes be moved
|
||||
because of formatting changes, which can break tools that assign special meaning to
|
||||
them. See [AST before and after formatting](#ast-before-and-after-formatting) for more
|
||||
discussion.
|
||||
|
||||
### Trailing commas
|
||||
|
||||
@ -273,6 +237,8 @@ A pre-existing trailing comma informs _Black_ to always explode contents of the
|
||||
bracket pair into one item per line. Read more about this in the
|
||||
[Pragmatism](#pragmatism) section below.
|
||||
|
||||
(labels/strings)=
|
||||
|
||||
### Strings
|
||||
|
||||
_Black_ prefers double quotes (`"` and `"""`) over single quotes (`'` and `'''`). It
|
||||
@ -284,6 +250,11 @@ exception of [capital "R" prefixes](#rstrings-and-rstrings), unicode literal mar
|
||||
(`u`) are removed because they are meaningless in Python 3, and in the case of multiple
|
||||
characters "r" is put first as in spoken language: "raw f-string".
|
||||
|
||||
Another area where Python allows multiple ways to format a string is escape sequences.
|
||||
For example, `"\uabcd"` and `"\uABCD"` evaluate to the same string. _Black_ normalizes
|
||||
such escape sequences to lowercase, but uses uppercase for `\N` named character escapes,
|
||||
such as `"\N{MEETEI MAYEK LETTER HUK}"`.
|
||||
|
||||
The main reason to standardize on a single form of quotes is aesthetics. Having one kind
|
||||
of quotes everywhere reduces reader distraction. It will also enable a future version of
|
||||
_Black_ to merge consecutive string literals that ended up on the same line (see
|
||||
@ -449,6 +420,12 @@ file that are not enforced yet but might be in a future version of the formatter
|
||||
_Black_ will normalize line endings (`\n` or `\r\n`) based on the first line ending of
|
||||
the file.
|
||||
|
||||
### Form feed characters
|
||||
|
||||
_Black_ will retain form feed characters on otherwise empty lines at the module level.
|
||||
Only one form feed is retained for a group of consecutive empty lines. Where there are
|
||||
two empty lines in a row, the form feed is placed on the second line.
|
||||
|
||||
## Pragmatism
|
||||
|
||||
Early versions of _Black_ used to be absolutist in some respects. They took after its
|
||||
@ -457,6 +434,8 @@ there were not many users anyway. Not many edge cases were reported. As a mature
|
||||
_Black_ does make some exceptions to rules it otherwise holds. This section documents
|
||||
what those exceptions are and why this is the case.
|
||||
|
||||
(labels/magic-trailing-comma)=
|
||||
|
||||
### The magic trailing comma
|
||||
|
||||
_Black_ in general does not take existing formatting into account.
|
||||
@ -493,6 +472,8 @@ default by (among others) GitHub and Visual Studio Code, differentiates between
|
||||
r-strings and R-strings. The former are syntax highlighted as regular expressions while
|
||||
the latter are treated as true raw strings with no special semantics.
|
||||
|
||||
(labels/ast-changes)=
|
||||
|
||||
### AST before and after formatting
|
||||
|
||||
When run with `--safe` (the default), _Black_ checks that the code before and after is
|
||||
|
@ -1,99 +1,55 @@
|
||||
# The (future of the) Black code style
|
||||
|
||||
```{warning}
|
||||
Changes to this document often aren't tied and don't relate to releases of
|
||||
_Black_. It's recommended that you read the latest version available.
|
||||
```
|
||||
|
||||
## Using backslashes for with statements
|
||||
|
||||
[Backslashes are bad and should be never be used](labels/why-no-backslashes) however
|
||||
there is one exception: `with` statements using multiple context managers. Before Python
|
||||
3.9 Python's grammar does not allow organizing parentheses around the series of context
|
||||
managers.
|
||||
|
||||
We don't want formatting like:
|
||||
|
||||
```py3
|
||||
with make_context_manager1() as cm1, make_context_manager2() as cm2, make_context_manager3() as cm3, make_context_manager4() as cm4:
|
||||
... # nothing to split on - line too long
|
||||
```
|
||||
|
||||
So _Black_ will, when we implement this, format it like this:
|
||||
|
||||
```py3
|
||||
with \
|
||||
make_context_manager1() as cm1, \
|
||||
make_context_manager2() as cm2, \
|
||||
make_context_manager3() as cm3, \
|
||||
make_context_manager4() as cm4 \
|
||||
:
|
||||
... # backslashes and an ugly stranded colon
|
||||
```
|
||||
|
||||
Although when the target version is Python 3.9 or higher, _Black_ uses parentheses
|
||||
instead in `--preview` mode (see below) since they're allowed in Python 3.9 and higher.
|
||||
|
||||
An alternative to consider if the backslashes in the above formatting are undesirable is
|
||||
to use {external:py:obj}`contextlib.ExitStack` to combine context managers in the
|
||||
following way:
|
||||
|
||||
```python
|
||||
with contextlib.ExitStack() as exit_stack:
|
||||
cm1 = exit_stack.enter_context(make_context_manager1())
|
||||
cm2 = exit_stack.enter_context(make_context_manager2())
|
||||
cm3 = exit_stack.enter_context(make_context_manager3())
|
||||
cm4 = exit_stack.enter_context(make_context_manager4())
|
||||
...
|
||||
```
|
||||
|
||||
## Preview style
|
||||
|
||||
(labels/preview-style)=
|
||||
|
||||
Experimental, potentially disruptive style changes are gathered under the `--preview`
|
||||
CLI flag. At the end of each year, these changes may be adopted into the default style,
|
||||
as described in [The Black Code Style](index.md). Because the functionality is
|
||||
experimental, feedback and issue reports are highly encouraged!
|
||||
|
||||
### Improved string processing
|
||||
In the past, the preview style included some features with known bugs, so that we were
|
||||
unable to move these features to the stable style. Therefore, such features are now
|
||||
moved to the `--unstable` style. All features in the `--preview` style are expected to
|
||||
make it to next year's stable style; features in the `--unstable` style will be
|
||||
stabilized only if issues with them are fixed. If bugs are discovered in a `--preview`
|
||||
feature, it is demoted to the `--unstable` style. To avoid thrash when a feature is
|
||||
demoted from the `--preview` to the `--unstable` style, users can use the
|
||||
`--enable-unstable-feature` flag to enable specific unstable features.
|
||||
|
||||
_Black_ will split long string literals and merge short ones. Parentheses are used where
|
||||
appropriate. When split, parts of f-strings that don't need formatting are converted to
|
||||
plain strings. User-made splits are respected when they do not exceed the line length
|
||||
limit. Line continuation backslashes are converted into parenthesized strings.
|
||||
Unnecessary parentheses are stripped. The stability and status of this feature is
|
||||
tracked in [this issue](https://github.com/psf/black/issues/2188).
|
||||
(labels/preview-features)=
|
||||
|
||||
### Improved line breaks
|
||||
Currently, the following features are included in the preview style:
|
||||
|
||||
For assignment expressions, _Black_ now prefers to split and wrap the right side of the
|
||||
assignment instead of left side. For example:
|
||||
- `always_one_newline_after_import`: Always force one blank line after import
|
||||
statements, except when the line after the import is a comment or an import statement
|
||||
- `wrap_long_dict_values_in_parens`: Add parentheses around long values in dictionaries
|
||||
([see below](labels/wrap-long-dict-values))
|
||||
- `fix_fmt_skip_in_one_liners`: Fix `# fmt: skip` behaviour on one-liner declarations,
|
||||
such as `def foo(): return "mock" # fmt: skip`, where previously the declaration
|
||||
would have been incorrectly collapsed.
|
||||
|
||||
```python
|
||||
some_dict[
|
||||
"with_a_long_key"
|
||||
] = some_looooooooong_module.some_looooooooooooooong_function_name(
|
||||
first_argument, second_argument, third_argument
|
||||
)
|
||||
```
|
||||
(labels/unstable-features)=
|
||||
|
||||
will be changed to:
|
||||
The unstable style additionally includes the following features:
|
||||
|
||||
```python
|
||||
some_dict["with_a_long_key"] = (
|
||||
some_looooooooong_module.some_looooooooooooooong_function_name(
|
||||
first_argument, second_argument, third_argument
|
||||
)
|
||||
)
|
||||
```
|
||||
- `string_processing`: split long string literals and related changes
|
||||
([see below](labels/string-processing))
|
||||
- `multiline_string_handling`: more compact formatting of expressions involving
|
||||
multiline strings ([see below](labels/multiline-string-handling))
|
||||
- `hug_parens_with_braces_and_square_brackets`: more compact formatting of nested
|
||||
brackets ([see below](labels/hug-parens))
|
||||
|
||||
### Improved parentheses management
|
||||
(labels/wrap-long-dict-values)=
|
||||
|
||||
### Improved parentheses management in dicts
|
||||
|
||||
For dict literals with long values, they are now wrapped in parentheses. Unnecessary
|
||||
parentheses are now removed. For example:
|
||||
|
||||
```python
|
||||
my_dict = {
|
||||
my_dict = {
|
||||
"a key in my dict": a_very_long_variable
|
||||
* and_a_very_long_function_call()
|
||||
/ 100000.0,
|
||||
@ -112,6 +68,94 @@ my_dict = {
|
||||
}
|
||||
```
|
||||
|
||||
(labels/hug-parens)=
|
||||
|
||||
### Improved multiline dictionary and list indentation for sole function parameter
|
||||
|
||||
For better readability and less verticality, _Black_ now pairs parentheses ("(", ")")
|
||||
with braces ("{", "}") and square brackets ("[", "]") on the same line. For example:
|
||||
|
||||
```python
|
||||
foo(
|
||||
[
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
]
|
||||
)
|
||||
|
||||
nested_array = [
|
||||
[
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
]
|
||||
]
|
||||
```
|
||||
|
||||
will be changed to:
|
||||
|
||||
```python
|
||||
foo([
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
])
|
||||
|
||||
nested_array = [[
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
]]
|
||||
```
|
||||
|
||||
This also applies to list and dictionary unpacking:
|
||||
|
||||
```python
|
||||
foo(
|
||||
*[
|
||||
a_long_function_name(a_long_variable_name)
|
||||
for a_long_variable_name in some_generator
|
||||
]
|
||||
)
|
||||
```
|
||||
|
||||
will become:
|
||||
|
||||
```python
|
||||
foo(*[
|
||||
a_long_function_name(a_long_variable_name)
|
||||
for a_long_variable_name in some_generator
|
||||
])
|
||||
```
|
||||
|
||||
You can use a magic trailing comma to avoid this compacting behavior; by default,
|
||||
_Black_ will not reformat the following code:
|
||||
|
||||
```python
|
||||
foo(
|
||||
[
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
],
|
||||
)
|
||||
```
|
||||
|
||||
(labels/string-processing)=
|
||||
|
||||
### Improved string processing
|
||||
|
||||
_Black_ will split long string literals and merge short ones. Parentheses are used where
|
||||
appropriate. When split, parts of f-strings that don't need formatting are converted to
|
||||
plain strings. f-strings will not be merged if they contain internal quotes and it would
|
||||
change their quotation mark style. User-made splits are respected when they do not
|
||||
exceed the line length limit. Line continuation backslashes are converted into
|
||||
parenthesized strings. Unnecessary parentheses are stripped. The stability and status of
|
||||
this feature istracked in [this issue](https://github.com/psf/black/issues/2188).
|
||||
|
||||
(labels/multiline-string-handling)=
|
||||
|
||||
### Improved multiline string handling
|
||||
|
||||
_Black_ is smarter when formatting multiline strings, especially in function arguments,
|
||||
@ -159,3 +203,67 @@ MULTILINE = """
|
||||
foobar
|
||||
""".replace("\n", "")
|
||||
```
|
||||
|
||||
Implicit multiline strings are special, because they can have inline comments. Strings
|
||||
without comments are merged, for example
|
||||
|
||||
```python
|
||||
s = (
|
||||
"An "
|
||||
"implicit "
|
||||
"multiline "
|
||||
"string"
|
||||
)
|
||||
```
|
||||
|
||||
becomes
|
||||
|
||||
```python
|
||||
s = "An implicit multiline string"
|
||||
```
|
||||
|
||||
A comment on any line of the string (or between two string lines) will block the
|
||||
merging, so
|
||||
|
||||
```python
|
||||
s = (
|
||||
"An " # Important comment concerning just this line
|
||||
"implicit "
|
||||
"multiline "
|
||||
"string"
|
||||
)
|
||||
```
|
||||
|
||||
and
|
||||
|
||||
```python
|
||||
s = (
|
||||
"An "
|
||||
"implicit "
|
||||
# Comment in between
|
||||
"multiline "
|
||||
"string"
|
||||
)
|
||||
```
|
||||
|
||||
will not be merged. Having the comment after or before the string lines (but still
|
||||
inside the parens) will merge the string. For example
|
||||
|
||||
```python
|
||||
s = ( # Top comment
|
||||
"An "
|
||||
"implicit "
|
||||
"multiline "
|
||||
"string"
|
||||
# Bottom comment
|
||||
)
|
||||
```
|
||||
|
||||
becomes
|
||||
|
||||
```python
|
||||
s = ( # Top comment
|
||||
"An implicit multiline string"
|
||||
# Bottom comment
|
||||
)
|
||||
```
|
||||
|
@ -42,9 +42,11 @@ _Black_:
|
||||
enabled by newer Python language syntax as well as due to improvements in the
|
||||
formatting logic.
|
||||
|
||||
- The `--preview` flag is exempt from this policy. There are no guarantees around the
|
||||
stability of the output with that flag passed into _Black_. This flag is intended for
|
||||
allowing experimentation with the proposed changes to the _Black_ code style.
|
||||
- The `--preview` and `--unstable` flags are exempt from this policy. There are no
|
||||
guarantees around the stability of the output with these flags passed into _Black_.
|
||||
They are intended for allowing experimentation with proposed changes to the _Black_
|
||||
code style. The `--preview` style at the end of a year should closely match the stable
|
||||
style for the next year, but we may always make changes.
|
||||
|
||||
Documentation for both the current and future styles can be found:
|
||||
|
||||
|
@ -62,6 +62,12 @@ The headers controlling how source code is formatted are:
|
||||
- `X-Preview`: corresponds to the `--preview` command line flag. If present and its
|
||||
value is not an empty string, experimental and potentially disruptive style changes
|
||||
will be used.
|
||||
- `X-Unstable`: corresponds to the `--unstable` command line flag. If present and its
|
||||
value is not an empty string, experimental style changes that are known to be buggy
|
||||
will be used.
|
||||
- `X-Enable-Unstable-Feature`: corresponds to the `--enable-unstable-feature` flag. The
|
||||
contents of the flag must be a comma-separated list of unstable features to be
|
||||
enabled. Example: `X-Enable-Unstable-Feature: feature1, feature2`.
|
||||
- `X-Fast-Or-Safe`: if set to `fast`, `blackd` will act as _Black_ does when passed the
|
||||
`--fast` command line flag.
|
||||
- `X-Python-Variant`: if set to `pyi`, `blackd` will act as _Black_ does when passed the
|
||||
|
@ -8,22 +8,24 @@ _Black_ images with the following tags are available:
|
||||
- release numbers, e.g. `21.5b2`, `21.6b0`, `21.7b0` etc.\
|
||||
ℹ Recommended for users who want to use a particular version of _Black_.
|
||||
- `latest_release` - tag created when a new version of _Black_ is released.\
|
||||
ℹ Recommended for users who want to use released versions of _Black_. It maps to [the latest release](https://github.com/psf/black/releases/latest)
|
||||
of _Black_.
|
||||
ℹ Recommended for users who want to use released versions of _Black_. It maps to
|
||||
[the latest release](https://github.com/psf/black/releases/latest) of _Black_.
|
||||
- `latest_prerelease` - tag created when a new alpha (prerelease) version of _Black_ is
|
||||
released.\
|
||||
ℹ Recommended for users who want to preview or test alpha versions of _Black_. Note that
|
||||
the most recent release may be newer than any prerelease, because no prereleases are created
|
||||
before most releases.
|
||||
ℹ Recommended for users who want to preview or test alpha versions of _Black_. Note
|
||||
that the most recent release may be newer than any prerelease, because no prereleases
|
||||
are created before most releases.
|
||||
- `latest` - tag used for the newest image of _Black_.\
|
||||
ℹ Recommended for users who always want to use the latest version of _Black_, even before
|
||||
it is released.
|
||||
ℹ Recommended for users who always want to use the latest version of _Black_, even
|
||||
before it is released.
|
||||
|
||||
There is one more tag used for _Black_ Docker images - `latest_non_release`. It is
|
||||
created for all unreleased
|
||||
[commits on the `main` branch](https://github.com/psf/black/commits/main). This tag is
|
||||
not meant to be used by external users.
|
||||
|
||||
From version 23.11.0 the Docker image installs a compiled black into the image.
|
||||
|
||||
## Usage
|
||||
|
||||
A permanent container doesn't have to be created to use _Black_ as a Docker image. It's
|
||||
|
@ -12,7 +12,8 @@ _Black_ is a well-behaved Unix-style command-line tool:
|
||||
|
||||
## Usage
|
||||
|
||||
To get started right away with sensible defaults:
|
||||
_Black_ will reformat entire files in place. To get started right away with sensible
|
||||
defaults:
|
||||
|
||||
```sh
|
||||
black {source_file_or_directory}
|
||||
@ -24,71 +25,167 @@ You can run _Black_ as a package if running it as a script doesn't work:
|
||||
python -m black {source_file_or_directory}
|
||||
```
|
||||
|
||||
### Ignoring sections
|
||||
|
||||
Black will not reformat lines that contain `# fmt: skip` or blocks that start with
|
||||
`# fmt: off` and end with `# fmt: on`. `# fmt: skip` can be mixed with other
|
||||
pragmas/comments either with multiple comments (e.g. `# fmt: skip # pylint # noqa`) or
|
||||
as a semicolon separated list (e.g. `# fmt: skip; pylint; noqa`). `# fmt: on/off` must
|
||||
be on the same level of indentation and in the same block, meaning no unindents beyond
|
||||
the initial indentation level between them. Black also recognizes
|
||||
[YAPF](https://github.com/google/yapf)'s block comments to the same effect, as a
|
||||
courtesy for straddling code.
|
||||
|
||||
### Command line options
|
||||
|
||||
The CLI options of _Black_ can be displayed by expanding the view below or by running
|
||||
`black --help`. While _Black_ has quite a few knobs these days, it is still opinionated
|
||||
so style options are deliberately limited and rarely added.
|
||||
The CLI options of _Black_ can be displayed by running `black --help`. All options are
|
||||
also covered in more detail below.
|
||||
|
||||
<details>
|
||||
|
||||
<summary>CLI reference</summary>
|
||||
|
||||
```{program-output} black --help
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
While _Black_ has quite a few knobs these days, it is still opinionated so style options
|
||||
are deliberately limited and rarely added.
|
||||
|
||||
Note that all command-line options listed above can also be configured using a
|
||||
`pyproject.toml` file (more on that below).
|
||||
|
||||
### Code input alternatives
|
||||
#### `-h`, `--help`
|
||||
|
||||
#### Standard Input
|
||||
Show available command-line options and exit.
|
||||
|
||||
_Black_ supports formatting code via stdin, with the result being printed to stdout.
|
||||
Just let _Black_ know with `-` as the path.
|
||||
#### `-c`, `--code`
|
||||
|
||||
```console
|
||||
$ echo "print ( 'hello, world' )" | black -
|
||||
print("hello, world")
|
||||
reformatted -
|
||||
All done! ✨ 🍰 ✨
|
||||
1 file reformatted.
|
||||
```
|
||||
|
||||
**Tip:** if you need _Black_ to treat stdin input as a file passed directly via the CLI,
|
||||
use `--stdin-filename`. Useful to make sure _Black_ will respect the `--force-exclude`
|
||||
option on some editors that rely on using stdin.
|
||||
|
||||
#### As a string
|
||||
|
||||
You can also pass code as a string using the `-c` / `--code` option.
|
||||
Format the code passed in as a string.
|
||||
|
||||
```console
|
||||
$ black --code "print ( 'hello, world' )"
|
||||
print("hello, world")
|
||||
```
|
||||
|
||||
### Writeback and reporting
|
||||
#### `-l`, `--line-length`
|
||||
|
||||
By default _Black_ reformats the files given and/or found in place. Sometimes you need
|
||||
_Black_ to just tell you what it _would_ do without actually rewriting the Python files.
|
||||
How many characters per line to allow. The default is 88.
|
||||
|
||||
There's two variations to this mode that are independently enabled by their respective
|
||||
flags. Both variations can be enabled at once.
|
||||
See also [the style documentation](labels/line-length).
|
||||
|
||||
#### `-t`, `--target-version`
|
||||
|
||||
Python versions that should be supported by Black's output. You can run `black --help`
|
||||
and look for the `--target-version` option to see the full list of supported versions.
|
||||
You should include all versions that your code supports. If you support Python 3.11
|
||||
through 3.13, you should write:
|
||||
|
||||
```console
|
||||
$ black -t py311 -t py312 -t py313
|
||||
```
|
||||
|
||||
In a [configuration file](#configuration-via-a-file), you can write:
|
||||
|
||||
```toml
|
||||
target-version = ["py311", "py312", "py313"]
|
||||
```
|
||||
|
||||
By default, Black will infer target versions from the project metadata in
|
||||
`pyproject.toml`, specifically the `[project.requires-python]` field. If this does not
|
||||
yield conclusive results, Black will use per-file auto-detection.
|
||||
|
||||
_Black_ uses this option to decide what grammar to use to parse your code. In addition,
|
||||
it may use it to decide what style to use. For example, support for a trailing comma
|
||||
after `*args` in a function call was added in Python 3.5, so _Black_ will add this comma
|
||||
only if the target versions are all Python 3.5 or higher:
|
||||
|
||||
```console
|
||||
$ black --line-length=10 --target-version=py35 -c 'f(a, *args)'
|
||||
f(
|
||||
a,
|
||||
*args,
|
||||
)
|
||||
$ black --line-length=10 --target-version=py34 -c 'f(a, *args)'
|
||||
f(
|
||||
a,
|
||||
*args
|
||||
)
|
||||
$ black --line-length=10 --target-version=py34 --target-version=py35 -c 'f(a, *args)'
|
||||
f(
|
||||
a,
|
||||
*args
|
||||
)
|
||||
```
|
||||
|
||||
#### `--pyi`
|
||||
|
||||
Format all input files like typing stubs regardless of file extension. This is useful
|
||||
when piping source on standard input.
|
||||
|
||||
#### `--ipynb`
|
||||
|
||||
Format all input files like Jupyter Notebooks regardless of file extension. This is
|
||||
useful when piping source on standard input.
|
||||
|
||||
#### `--python-cell-magics`
|
||||
|
||||
When processing Jupyter Notebooks, add the given magic to the list of known python-
|
||||
magics. Useful for formatting cells with custom python magics.
|
||||
|
||||
#### `-x, --skip-source-first-line`
|
||||
|
||||
Skip the first line of the source code.
|
||||
|
||||
#### `-S, --skip-string-normalization`
|
||||
|
||||
By default, _Black_ uses double quotes for all strings and normalizes string prefixes,
|
||||
as described in [the style documentation](labels/strings). If this option is given,
|
||||
strings are left unchanged instead.
|
||||
|
||||
#### `-C, --skip-magic-trailing-comma`
|
||||
|
||||
By default, _Black_ uses existing trailing commas as an indication that short lines
|
||||
should be left separate, as described in
|
||||
[the style documentation](labels/magic-trailing-comma). If this option is given, the
|
||||
magic trailing comma is ignored.
|
||||
|
||||
#### `--preview`
|
||||
|
||||
Enable potentially disruptive style changes that we expect to add to Black's main
|
||||
functionality in the next major release. Use this if you want a taste of what next
|
||||
year's style will look like.
|
||||
|
||||
Read more about [our preview style](labels/preview-style).
|
||||
|
||||
There is no guarantee on the code style produced by this flag across releases.
|
||||
|
||||
#### `--unstable`
|
||||
|
||||
Enable all style changes in `--preview`, plus additional changes that we would like to
|
||||
make eventually, but that have known issues that need to be fixed before they can move
|
||||
back to the `--preview` style. Use this if you want to experiment with these changes and
|
||||
help fix issues with them.
|
||||
|
||||
There is no guarantee on the code style produced by this flag across releases.
|
||||
|
||||
#### `--enable-unstable-feature`
|
||||
|
||||
Enable specific features from the `--unstable` style. See
|
||||
[the preview style documentation](labels/unstable-features) for the list of supported
|
||||
features. This flag can only be used when `--preview` is enabled. Users are encouraged
|
||||
to use this flag if they use `--preview` style and a feature that affects their code is
|
||||
moved from the `--preview` to the `--unstable` style, but they want to avoid the thrash
|
||||
from undoing this change.
|
||||
|
||||
There are no guarantees on the behavior of these features, or even their existence,
|
||||
across releases.
|
||||
|
||||
(labels/exit-code)=
|
||||
|
||||
#### Exit code
|
||||
#### `--check`
|
||||
|
||||
Passing `--check` will make _Black_ exit with:
|
||||
Don't write the files back, just return the status. _Black_ will exit with:
|
||||
|
||||
- code 0 if nothing would change;
|
||||
- code 1 if some files would be reformatted; or
|
||||
- code 123 if there was an internal error
|
||||
|
||||
If used in combination with `--quiet` then only the exit code will be returned, unless
|
||||
there was an internal error.
|
||||
|
||||
```console
|
||||
$ black test.py --check
|
||||
All done! ✨ 🍰 ✨
|
||||
@ -111,17 +208,17 @@ $ echo $?
|
||||
123
|
||||
```
|
||||
|
||||
#### Diffs
|
||||
#### `--diff`
|
||||
|
||||
Passing `--diff` will make _Black_ print out diffs that indicate what changes _Black_
|
||||
would've made. They are printed to stdout so capturing them is simple.
|
||||
Don't write the files back, just output a diff to indicate what changes _Black_ would've
|
||||
made. They are printed to stdout so capturing them is simple.
|
||||
|
||||
If you'd like colored diffs, you can enable them with the `--color`.
|
||||
If you'd like colored diffs, you can enable them with `--color`.
|
||||
|
||||
```console
|
||||
$ black test.py --diff
|
||||
--- test.py 2021-03-08 22:23:40.848954 +0000
|
||||
+++ test.py 2021-03-08 22:23:47.126319 +0000
|
||||
--- test.py 2021-03-08 22:23:40.848954+00:00
|
||||
+++ test.py 2021-03-08 22:23:47.126319+00:00
|
||||
@@ -1 +1 @@
|
||||
-print ( 'hello, world' )
|
||||
+print("hello, world")
|
||||
@ -130,6 +227,198 @@ All done! ✨ 🍰 ✨
|
||||
1 file would be reformatted.
|
||||
```
|
||||
|
||||
#### `--color` / `--no-color`
|
||||
|
||||
Show (or do not show) colored diff. Only applies when `--diff` is given.
|
||||
|
||||
#### `--line-ranges`
|
||||
|
||||
When specified, _Black_ will try its best to only format these lines.
|
||||
|
||||
This option can be specified multiple times, and a union of the lines will be formatted.
|
||||
Each range must be specified as two integers connected by a `-`: `<START>-<END>`. The
|
||||
`<START>` and `<END>` integer indices are 1-based and inclusive on both ends.
|
||||
|
||||
_Black_ may still format lines outside of the ranges for multi-line statements.
|
||||
Formatting more than one file or any ipynb files with this option is not supported. This
|
||||
option cannot be specified in the `pyproject.toml` config.
|
||||
|
||||
Example: `black --line-ranges=1-10 --line-ranges=21-30 test.py` will format lines from
|
||||
`1` to `10` and `21` to `30`.
|
||||
|
||||
This option is mainly for editor integrations, such as "Format Selection".
|
||||
|
||||
```{note}
|
||||
Due to [#4052](https://github.com/psf/black/issues/4052), `--line-ranges` might format
|
||||
extra lines outside of the ranges when ther are unformatted lines with the exact
|
||||
content. It also disables _Black_'s formatting stability check in `--safe` mode.
|
||||
```
|
||||
|
||||
#### `--fast` / `--safe`
|
||||
|
||||
By default, _Black_ performs [an AST safety check](labels/ast-changes) after formatting
|
||||
your code. The `--fast` flag turns off this check and the `--safe` flag explicitly
|
||||
enables it.
|
||||
|
||||
#### `--required-version`
|
||||
|
||||
Require a specific version of _Black_ to be running. This is useful for ensuring that
|
||||
all contributors to your project are using the same version, because different versions
|
||||
of _Black_ may format code a little differently. This option can be set in a
|
||||
configuration file for consistent results across environments.
|
||||
|
||||
```console
|
||||
$ black --version
|
||||
black, 25.1.0 (compiled: yes)
|
||||
$ black --required-version 25.1.0 -c "format = 'this'"
|
||||
format = "this"
|
||||
$ black --required-version 31.5b2 -c "still = 'beta?!'"
|
||||
Oh no! 💥 💔 💥 The required version does not match the running version!
|
||||
```
|
||||
|
||||
You can also pass just the major version:
|
||||
|
||||
```console
|
||||
$ black --required-version 22 -c "format = 'this'"
|
||||
format = "this"
|
||||
$ black --required-version 31 -c "still = 'beta?!'"
|
||||
Oh no! 💥 💔 💥 The required version does not match the running version!
|
||||
```
|
||||
|
||||
Because of our [stability policy](../the_black_code_style/index.md), this will guarantee
|
||||
stable formatting, but still allow you to take advantage of improvements that do not
|
||||
affect formatting.
|
||||
|
||||
#### `--exclude`
|
||||
|
||||
A regular expression that matches files and directories that should be excluded on
|
||||
recursive searches. An empty value means no paths are excluded. Use forward slashes for
|
||||
directories on all platforms (Windows, too). By default, Black also ignores all paths
|
||||
listed in `.gitignore`. Changing this value will override all default exclusions.
|
||||
|
||||
If the regular expression contains newlines, it is treated as a
|
||||
[verbose regular expression](https://docs.python.org/3/library/re.html#re.VERBOSE). This
|
||||
is typically useful when setting these options in a `pyproject.toml` configuration file;
|
||||
see [Configuration format](#configuration-format) for more information.
|
||||
|
||||
#### `--extend-exclude`
|
||||
|
||||
Like `--exclude`, but adds additional files and directories on top of the default values
|
||||
instead of overriding them.
|
||||
|
||||
#### `--force-exclude`
|
||||
|
||||
Like `--exclude`, but files and directories matching this regex will be excluded even
|
||||
when they are passed explicitly as arguments. This is useful when invoking Black
|
||||
programmatically on changed files, such as in a pre-commit hook or editor plugin.
|
||||
|
||||
#### `--stdin-filename`
|
||||
|
||||
The name of the file when passing it through stdin. Useful to make sure Black will
|
||||
respect the `--force-exclude` option on some editors that rely on using stdin.
|
||||
|
||||
#### `--include`
|
||||
|
||||
A regular expression that matches files and directories that should be included on
|
||||
recursive searches. An empty value means all files are included regardless of the name.
|
||||
Use forward slashes for directories on all platforms (Windows, too). Overrides all
|
||||
exclusions, including from `.gitignore` and command line options.
|
||||
|
||||
#### `-W`, `--workers`
|
||||
|
||||
When _Black_ formats multiple files, it may use a process pool to speed up formatting.
|
||||
This option controls the number of parallel workers. This can also be specified via the
|
||||
`BLACK_NUM_WORKERS` environment variable. Defaults to the number of CPUs in the system.
|
||||
|
||||
#### `-q`, `--quiet`
|
||||
|
||||
Stop emitting all non-critical output. Error messages will still be emitted (which can
|
||||
silenced by `2>/dev/null`).
|
||||
|
||||
```console
|
||||
$ black src/ -q
|
||||
error: cannot format src/black_primer/cli.py: Cannot parse: 5:6: mport asyncio
|
||||
```
|
||||
|
||||
#### `-v`, `--verbose`
|
||||
|
||||
Emit messages about files that were not changed or were ignored due to exclusion
|
||||
patterns. If _Black_ is using a configuration file, a message detailing which one it is
|
||||
using will be emitted.
|
||||
|
||||
```console
|
||||
$ black src/ -v
|
||||
Using configuration from /tmp/pyproject.toml.
|
||||
src/blib2to3 ignored: matches the --extend-exclude regular expression
|
||||
src/_black_version.py wasn't modified on disk since last run.
|
||||
src/black/__main__.py wasn't modified on disk since last run.
|
||||
error: cannot format src/black_primer/cli.py: Cannot parse: 5:6: mport asyncio
|
||||
reformatted src/black_primer/lib.py
|
||||
reformatted src/blackd/__init__.py
|
||||
reformatted src/black/__init__.py
|
||||
Oh no! 💥 💔 💥
|
||||
3 files reformatted, 2 files left unchanged, 1 file failed to reformat
|
||||
```
|
||||
|
||||
#### `--version`
|
||||
|
||||
You can check the version of _Black_ you have installed using the `--version` flag.
|
||||
|
||||
```console
|
||||
$ black --version
|
||||
black, 25.1.0
|
||||
```
|
||||
|
||||
#### `--config`
|
||||
|
||||
Read configuration options from a configuration file. See
|
||||
[below](#configuration-via-a-file) for more details on the configuration file.
|
||||
|
||||
### Environment variable options
|
||||
|
||||
_Black_ supports the following configuration via environment variables.
|
||||
|
||||
#### `BLACK_CACHE_DIR`
|
||||
|
||||
The directory where _Black_ should store its cache.
|
||||
|
||||
#### `BLACK_NUM_WORKERS`
|
||||
|
||||
The number of parallel workers _Black_ should use. The command line option `-W` /
|
||||
`--workers` takes precedence over this environment variable.
|
||||
|
||||
### Code input alternatives
|
||||
|
||||
_Black_ supports formatting code via stdin, with the result being printed to stdout.
|
||||
Just let _Black_ know with `-` as the path.
|
||||
|
||||
```console
|
||||
$ echo "print ( 'hello, world' )" | black -
|
||||
print("hello, world")
|
||||
reformatted -
|
||||
All done! ✨ 🍰 ✨
|
||||
1 file reformatted.
|
||||
```
|
||||
|
||||
**Tip:** if you need _Black_ to treat stdin input as a file passed directly via the CLI,
|
||||
use `--stdin-filename`. Useful to make sure _Black_ will respect the `--force-exclude`
|
||||
option on some editors that rely on using stdin.
|
||||
|
||||
You can also pass code as a string using the `--code` option.
|
||||
|
||||
### Writeback and reporting
|
||||
|
||||
By default _Black_ reformats the files given and/or found in place. Sometimes you need
|
||||
_Black_ to just tell you what it _would_ do without actually rewriting the Python files.
|
||||
|
||||
There's two variations to this mode that are independently enabled by their respective
|
||||
flags:
|
||||
|
||||
- `--check` (exit with code 1 if any file would be reformatted)
|
||||
- `--diff` (print a diff instead of reformatting files)
|
||||
|
||||
Both variations can be enabled at once.
|
||||
|
||||
### Output verbosity
|
||||
|
||||
_Black_ in general tries to produce the right amount of output, balancing between
|
||||
@ -146,53 +435,7 @@ Oh no! 💥 💔 💥
|
||||
3 files reformatted, 2 files left unchanged, 1 file failed to reformat.
|
||||
```
|
||||
|
||||
Passing `-v` / `--verbose` will cause _Black_ to also emit messages about files that
|
||||
were not changed or were ignored due to exclusion patterns. If _Black_ is using a
|
||||
configuration file, a blue message detailing which one it is using will be emitted.
|
||||
|
||||
```console
|
||||
$ black src/ -v
|
||||
Using configuration from /tmp/pyproject.toml.
|
||||
src/blib2to3 ignored: matches the --extend-exclude regular expression
|
||||
src/_black_version.py wasn't modified on disk since last run.
|
||||
src/black/__main__.py wasn't modified on disk since last run.
|
||||
error: cannot format src/black_primer/cli.py: Cannot parse: 5:6: mport asyncio
|
||||
reformatted src/black_primer/lib.py
|
||||
reformatted src/blackd/__init__.py
|
||||
reformatted src/black/__init__.py
|
||||
Oh no! 💥 💔 💥
|
||||
3 files reformatted, 2 files left unchanged, 1 file failed to reformat
|
||||
```
|
||||
|
||||
Passing `-q` / `--quiet` will cause _Black_ to stop emitting all non-critial output.
|
||||
Error messages will still be emitted (which can silenced by `2>/dev/null`).
|
||||
|
||||
```console
|
||||
$ black src/ -q
|
||||
error: cannot format src/black_primer/cli.py: Cannot parse: 5:6: mport asyncio
|
||||
```
|
||||
|
||||
### Versions
|
||||
|
||||
You can check the version of _Black_ you have installed using the `--version` flag.
|
||||
|
||||
```console
|
||||
$ black --version
|
||||
black, version 23.3.0
|
||||
```
|
||||
|
||||
An option to require a specific version to be running is also provided.
|
||||
|
||||
```console
|
||||
$ black --required-version 21.9b0 -c "format = 'this'"
|
||||
format = "this"
|
||||
$ black --required-version 31.5b2 -c "still = 'beta?!'"
|
||||
Oh no! 💥 💔 💥 The required version does not match the running version!
|
||||
```
|
||||
|
||||
This is useful for example when running _Black_ in multiple environments that haven't
|
||||
necessarily installed the correct version. This option can be set in a configuration
|
||||
file for consistent results across environments.
|
||||
The `--quiet` and `--verbose` flags control output verbosity.
|
||||
|
||||
## Configuration via a file
|
||||
|
||||
@ -216,10 +459,11 @@ of tools like [Poetry](https://python-poetry.org/),
|
||||
|
||||
### Where _Black_ looks for the file
|
||||
|
||||
By default _Black_ looks for `pyproject.toml` starting from the common base directory of
|
||||
all files and directories passed on the command line. If it's not there, it looks in
|
||||
parent directories. It stops looking when it finds the file, or a `.git` directory, or a
|
||||
`.hg` directory, or the root of the file system, whichever comes first.
|
||||
By default _Black_ looks for `pyproject.toml` containing a `[tool.black]` section
|
||||
starting from the common base directory of all files and directories passed on the
|
||||
command line. If it's not there, it looks in parent directories. It stops looking when
|
||||
it finds the file, or a `.git` directory, or a `.hg` directory, or the root of the file
|
||||
system, whichever comes first.
|
||||
|
||||
If you're formatting standard input, _Black_ will look for configuration starting from
|
||||
the current working directory.
|
||||
@ -234,15 +478,15 @@ operating system, this configuration file should be stored as:
|
||||
`XDG_CONFIG_HOME` environment variable is not set)
|
||||
|
||||
Note that these are paths to the TOML file itself (meaning that they shouldn't be named
|
||||
as `pyproject.toml`), not directories where you store the configuration. Here, `~`
|
||||
refers to the path to your home directory. On Windows, this will be something like
|
||||
`C:\\Users\UserName`.
|
||||
as `pyproject.toml`), not directories where you store the configuration (i.e.,
|
||||
`black`/`.black` is the file to create and add your configuration options to, in the
|
||||
`~/.config/` directory). Here, `~` refers to the path to your home directory. On
|
||||
Windows, this will be something like `C:\\Users\UserName`.
|
||||
|
||||
You can also explicitly specify the path to a particular file that you want with
|
||||
`--config`. In this situation _Black_ will not look for any other file.
|
||||
|
||||
If you're running with `--verbose`, you will see a blue message if a file was found and
|
||||
used.
|
||||
If you're running with `--verbose`, you will see a message if a file was found and used.
|
||||
|
||||
Please note `blackd` will not use `pyproject.toml` configuration.
|
||||
|
||||
|
@ -7,15 +7,16 @@
|
||||
import venv
|
||||
import zipfile
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from collections.abc import Generator
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from functools import lru_cache, partial
|
||||
from pathlib import Path
|
||||
from typing import Generator, List, NamedTuple, Optional, Tuple, Union, cast
|
||||
from typing import NamedTuple, Optional, Union, cast
|
||||
from urllib.request import urlopen, urlretrieve
|
||||
|
||||
PYPI_INSTANCE = "https://pypi.org/pypi"
|
||||
PYPI_TOP_PACKAGES = (
|
||||
"https://hugovk.github.io/top-pypi-packages/top-pypi-packages-30-days.min.json"
|
||||
"https://hugovk.github.io/top-pypi-packages/top-pypi-packages.min.json"
|
||||
)
|
||||
INTERNAL_BLACK_REPO = f"{tempfile.gettempdir()}/__black"
|
||||
|
||||
@ -54,7 +55,7 @@ def get_pypi_download_url(package: str, version: Optional[str]) -> str:
|
||||
return cast(str, source["url"])
|
||||
|
||||
|
||||
def get_top_packages() -> List[str]:
|
||||
def get_top_packages() -> list[str]:
|
||||
with urlopen(PYPI_TOP_PACKAGES) as page:
|
||||
result = json.load(page)
|
||||
|
||||
@ -150,7 +151,7 @@ def git_switch_branch(
|
||||
subprocess.run(args, cwd=repo)
|
||||
|
||||
|
||||
def init_repos(options: Namespace) -> Tuple[Path, ...]:
|
||||
def init_repos(options: Namespace) -> tuple[Path, ...]:
|
||||
options.output.mkdir(exist_ok=True)
|
||||
|
||||
if options.top_packages:
|
||||
@ -206,7 +207,7 @@ def format_repo_with_version(
|
||||
git_switch_branch(black_version.version, repo=black_repo)
|
||||
git_switch_branch(current_branch, repo=repo, new=True, from_branch=from_branch)
|
||||
|
||||
format_cmd: List[Union[Path, str]] = [
|
||||
format_cmd: list[Union[Path, str]] = [
|
||||
black_runner(black_version.version, black_repo),
|
||||
(black_repo / "black.py").resolve(),
|
||||
".",
|
||||
@ -222,7 +223,7 @@ def format_repo_with_version(
|
||||
return current_branch
|
||||
|
||||
|
||||
def format_repos(repos: Tuple[Path, ...], options: Namespace) -> None:
|
||||
def format_repos(repos: tuple[Path, ...], options: Namespace) -> None:
|
||||
black_versions = tuple(
|
||||
BlackVersion(*version.split(":")) for version in options.versions
|
||||
)
|
||||
@ -243,11 +244,9 @@ def format_repos(repos: Tuple[Path, ...], options: Namespace) -> None:
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = ArgumentParser(
|
||||
description="""Black Gallery is a script that
|
||||
parser = ArgumentParser(description="""Black Gallery is a script that
|
||||
automates the process of applying different Black versions to a selected
|
||||
PyPI package and seeing the results between versions."""
|
||||
)
|
||||
PyPI package and seeing the results between versions.""")
|
||||
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument("-p", "--pypi-package", help="PyPI package to download.")
|
||||
|
41
mypy.ini
41
mypy.ini
@ -1,41 +0,0 @@
|
||||
[mypy]
|
||||
# Specify the target platform details in config, so your developers are
|
||||
# free to run mypy on Windows, Linux, or macOS and get consistent
|
||||
# results.
|
||||
python_version=3.7
|
||||
|
||||
mypy_path=src
|
||||
|
||||
show_column_numbers=True
|
||||
show_error_codes=True
|
||||
|
||||
# be strict
|
||||
strict=True
|
||||
|
||||
# except for...
|
||||
no_implicit_reexport = False
|
||||
|
||||
# Unreachable blocks have been an issue when compiling mypyc, let's try
|
||||
# to avoid 'em in the first place.
|
||||
warn_unreachable=True
|
||||
|
||||
[mypy-blib2to3.driver.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-IPython.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-colorama.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pathspec.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-tokenize_rt.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-uvloop.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-_black_version.*]
|
||||
ignore_missing_imports = True
|
@ -21,7 +21,7 @@ endif
|
||||
|
||||
if v:version < 700 || !has('python3')
|
||||
func! __BLACK_MISSING()
|
||||
echo "The black.vim plugin requires vim7.0+ with Python 3.6 support."
|
||||
echo "The black.vim plugin requires vim7.0+ with Python 3.9 support."
|
||||
endfunc
|
||||
command! Black :call __BLACK_MISSING()
|
||||
command! BlackUpgrade :call __BLACK_MISSING()
|
||||
@ -72,12 +72,11 @@ endif
|
||||
|
||||
function BlackComplete(ArgLead, CmdLine, CursorPos)
|
||||
return [
|
||||
\ 'target_version=py27',
|
||||
\ 'target_version=py36',
|
||||
\ 'target_version=py37',
|
||||
\ 'target_version=py38',
|
||||
\ 'target_version=py39',
|
||||
\ 'target_version=py310',
|
||||
\ 'target_version=py311',
|
||||
\ 'target_version=py312',
|
||||
\ 'target_version=py313',
|
||||
\ ]
|
||||
endfunction
|
||||
|
||||
|
119
pyproject.toml
119
pyproject.toml
@ -7,33 +7,34 @@
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ['py37', 'py38']
|
||||
target-version = ['py39']
|
||||
include = '\.pyi?$'
|
||||
extend-exclude = '''
|
||||
/(
|
||||
# The following are specific to Black, you probably don't want those.
|
||||
| blib2to3
|
||||
| tests/data
|
||||
| profiling
|
||||
)/
|
||||
tests/data/
|
||||
| profiling/
|
||||
| scripts/generate_schema.py # Uses match syntax
|
||||
)
|
||||
'''
|
||||
# We use preview style for formatting Black itself. If you
|
||||
# want stable formatting across releases, you should keep
|
||||
# this off.
|
||||
preview = true
|
||||
# We use the unstable style for formatting Black itself. If you
|
||||
# want bug-free formatting, you should keep this off. If you want
|
||||
# stable formatting across releases, you should also keep `preview = true`
|
||||
# (which is implied by this flag) off.
|
||||
unstable = true
|
||||
|
||||
# Build system information and other project-specific configuration below.
|
||||
# NOTE: You don't need this in your own Black configuration.
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling>=1.8.0", "hatch-vcs", "hatch-fancy-pypi-readme"]
|
||||
requires = ["hatchling>=1.20.0", "hatch-vcs", "hatch-fancy-pypi-readme"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "black"
|
||||
description = "The uncompromising code formatter."
|
||||
license = { text = "MIT" }
|
||||
requires-python = ">=3.7"
|
||||
license = "MIT"
|
||||
requires-python = ">=3.9"
|
||||
authors = [
|
||||
{ name = "Łukasz Langa", email = "lukasz@langa.pl" },
|
||||
]
|
||||
@ -54,11 +55,11 @@ classifiers = [
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
]
|
||||
@ -68,18 +69,16 @@ dependencies = [
|
||||
"packaging>=22.0",
|
||||
"pathspec>=0.9.0",
|
||||
"platformdirs>=2",
|
||||
"pytokens>=0.1.10",
|
||||
"tomli>=1.1.0; python_version < '3.11'",
|
||||
"typed-ast>=1.4.2; python_version < '3.8' and implementation_name == 'cpython'",
|
||||
"typing_extensions>=3.10.0.0; python_version < '3.10'",
|
||||
"typing_extensions>=4.0.1; python_version < '3.11'",
|
||||
]
|
||||
dynamic = ["readme", "version"]
|
||||
|
||||
[project.optional-dependencies]
|
||||
colorama = ["colorama>=0.4.3"]
|
||||
uvloop = ["uvloop>=0.15.2"]
|
||||
d = [
|
||||
"aiohttp>=3.7.4",
|
||||
]
|
||||
d = ["aiohttp>=3.10"]
|
||||
jupyter = [
|
||||
"ipython>=7.8.0",
|
||||
"tokenize-rt>=3.2.0",
|
||||
@ -89,9 +88,14 @@ jupyter = [
|
||||
black = "black:patched_main"
|
||||
blackd = "blackd:patched_main [d]"
|
||||
|
||||
[project.entry-points."validate_pyproject.tool_schema"]
|
||||
black = "black.schema:get_schema"
|
||||
|
||||
[project.urls]
|
||||
Documentation = "https://black.readthedocs.io/"
|
||||
Changelog = "https://github.com/psf/black/blob/main/CHANGES.md"
|
||||
Homepage = "https://github.com/psf/black"
|
||||
Repository = "https://github.com/psf/black"
|
||||
Issues = "https://github.com/psf/black/issues"
|
||||
|
||||
[tool.hatch.metadata.hooks.fancy-pypi-readme]
|
||||
content-type = "text/markdown"
|
||||
@ -115,14 +119,15 @@ exclude = ["/profiling"]
|
||||
[tool.hatch.build.targets.wheel]
|
||||
only-include = ["src"]
|
||||
sources = ["src"]
|
||||
# Note that we change the behaviour of this flag below
|
||||
macos-max-compat = true
|
||||
|
||||
[tool.hatch.build.targets.wheel.hooks.mypyc]
|
||||
enable-by-default = false
|
||||
dependencies = [
|
||||
"hatch-mypyc>=0.13.0",
|
||||
"mypy==0.991",
|
||||
# Required stubs to be removed when the packages support PEP 561 themselves
|
||||
"types-typed-ast>=1.4.2",
|
||||
"hatch-mypyc>=0.16.0",
|
||||
"mypy>=1.12",
|
||||
"click>=8.1.7",
|
||||
]
|
||||
require-runtime-dependencies = true
|
||||
exclude = [
|
||||
@ -140,16 +145,19 @@ exclude = [
|
||||
# Compiled modules can't be run directly and that's a problem here:
|
||||
"/src/black/__main__.py",
|
||||
]
|
||||
mypy-args = ["--ignore-missing-imports"]
|
||||
options = { debug_level = "0" }
|
||||
|
||||
[tool.cibuildwheel]
|
||||
build-verbosity = 1
|
||||
|
||||
# So these are the environments we target:
|
||||
# - Python: CPython 3.7+ only
|
||||
# - Python: CPython 3.9+ only
|
||||
# - Architecture (64-bit only): amd64 / x86_64, universal2, and arm64
|
||||
# - OS: Linux (no musl), Windows, and macOS
|
||||
build = "cp3*-*"
|
||||
skip = ["*-manylinux_i686", "*-musllinux_*", "*-win32", "pp-*"]
|
||||
build = "cp3*"
|
||||
skip = ["*-manylinux_i686", "*-musllinux_*", "*-win32", "pp*"]
|
||||
|
||||
# This is the bare minimum needed to run the test suite. Pulling in the full
|
||||
# test_requirements.txt would download a bunch of other packages not necessary
|
||||
# here and would slow down the testing step a fair bit.
|
||||
@ -164,11 +172,9 @@ test-skip = ["*-macosx_arm64", "*-macosx_universal2:arm64"]
|
||||
HATCH_BUILD_HOOKS_ENABLE = "1"
|
||||
MYPYC_OPT_LEVEL = "3"
|
||||
MYPYC_DEBUG_LEVEL = "0"
|
||||
# CPython 3.11 wheels aren't available for aiohttp and building a Cython extension
|
||||
# from source also doesn't work.
|
||||
AIOHTTP_NO_EXTENSIONS = "1"
|
||||
|
||||
[tool.cibuildwheel.linux]
|
||||
manylinux-x86_64-image = "manylinux_2_28"
|
||||
before-build = [
|
||||
"yum install -y clang gcc",
|
||||
]
|
||||
@ -177,16 +183,16 @@ before-build = [
|
||||
HATCH_BUILD_HOOKS_ENABLE = "1"
|
||||
MYPYC_OPT_LEVEL = "3"
|
||||
MYPYC_DEBUG_LEVEL = "0"
|
||||
|
||||
# Black needs Clang to compile successfully on Linux.
|
||||
CC = "clang"
|
||||
AIOHTTP_NO_EXTENSIONS = "1"
|
||||
|
||||
[tool.isort]
|
||||
atomic = true
|
||||
profile = "black"
|
||||
line_length = 88
|
||||
skip_gitignore = true
|
||||
skip_glob = ["src/blib2to3", "tests/data", "profiling"]
|
||||
skip_glob = ["tests/data", "profiling"]
|
||||
known_first_party = ["black", "blib2to3", "blackd", "_black_version"]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
@ -200,18 +206,39 @@ markers = [
|
||||
"incompatible_with_mypyc: run when testing mypyc compiled black"
|
||||
]
|
||||
xfail_strict = true
|
||||
filterwarnings = [
|
||||
"error",
|
||||
# this is mitigated by a try/catch in https://github.com/psf/black/pull/2974/
|
||||
# this ignore can be removed when support for aiohttp 3.7 is dropped.
|
||||
'''ignore:Decorator `@unittest_run_loop` is no longer needed in aiohttp 3\.8\+:DeprecationWarning''',
|
||||
# this is mitigated by a try/catch in https://github.com/psf/black/pull/3198/
|
||||
# this ignore can be removed when support for aiohttp 3.x is dropped.
|
||||
'''ignore:Middleware decorator is deprecated since 4\.0 and its behaviour is default, you can simply remove this decorator:DeprecationWarning''',
|
||||
# this is mitigated by https://github.com/python/cpython/issues/79071 in python 3.8+
|
||||
# this ignore can be removed when support for 3.7 is dropped.
|
||||
'''ignore:Bare functions are deprecated, use async ones:DeprecationWarning''',
|
||||
# aiohttp is using deprecated cgi modules - Safe to remove when fixed:
|
||||
# https://github.com/aio-libs/aiohttp/issues/6905
|
||||
'''ignore:'cgi' is deprecated and slated for removal in Python 3.13:DeprecationWarning''',
|
||||
filterwarnings = ["error"]
|
||||
[tool.coverage.report]
|
||||
omit = [
|
||||
"src/blib2to3/*",
|
||||
"tests/data/*",
|
||||
"*/site-packages/*",
|
||||
".tox/*"
|
||||
]
|
||||
[tool.coverage.run]
|
||||
relative_files = true
|
||||
branch = true
|
||||
|
||||
[tool.mypy]
|
||||
# Specify the target platform details in config, so your developers are
|
||||
# free to run mypy on Windows, Linux, or macOS and get consistent
|
||||
# results.
|
||||
python_version = "3.9"
|
||||
mypy_path = "src"
|
||||
strict = true
|
||||
strict_bytes = true
|
||||
local_partial_types = true
|
||||
# Unreachable blocks have been an issue when compiling mypyc, let's try to avoid 'em in the first place.
|
||||
warn_unreachable = true
|
||||
implicit_reexport = true
|
||||
show_error_codes = true
|
||||
show_column_numbers = true
|
||||
|
||||
[[tool.mypy.overrides]]
|
||||
module = ["pathspec.*", "IPython.*", "colorama.*", "tokenize_rt.*", "uvloop.*", "_black_version.*"]
|
||||
ignore_missing_imports = true
|
||||
|
||||
# CI only checks src/, but in case users are running LSP or similar we explicitly ignore
|
||||
# errors in test data files.
|
||||
[[tool.mypy.overrides]]
|
||||
module = ["tests.data.*"]
|
||||
ignore_errors = true
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
import commonmark
|
||||
import yaml
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import BeautifulSoup # type: ignore[import-untyped]
|
||||
|
||||
|
||||
def main(changes: str, source_version_control: str) -> None:
|
||||
|
@ -8,7 +8,7 @@
|
||||
import sys
|
||||
|
||||
import commonmark
|
||||
from bs4 import BeautifulSoup
|
||||
from bs4 import BeautifulSoup # type: ignore[import-untyped]
|
||||
|
||||
|
||||
def main(changes: str, the_basics: str) -> None:
|
||||
@ -20,17 +20,18 @@ def main(changes: str, the_basics: str) -> None:
|
||||
|
||||
the_basics_html = commonmark.commonmark(the_basics)
|
||||
the_basics_soup = BeautifulSoup(the_basics_html, "html.parser")
|
||||
(version_example,) = [
|
||||
version_examples = [
|
||||
code_block.string
|
||||
for code_block in the_basics_soup.find_all(class_="language-console")
|
||||
if "$ black --version" in code_block.string
|
||||
]
|
||||
|
||||
for tag in tags:
|
||||
for version_example in version_examples:
|
||||
if tag in version_example and tag != latest_tag:
|
||||
print(
|
||||
"Please set the version in the ``black --version`` "
|
||||
"example from ``the_basics.md`` to be the latest one.\n"
|
||||
"examples from ``the_basics.md`` to be the latest one.\n"
|
||||
f"Expected {latest_tag}, got {tag}.\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
@ -21,19 +21,15 @@
|
||||
import subprocess
|
||||
import sys
|
||||
import zipfile
|
||||
from base64 import b64encode
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from typing import Any, Final, Literal
|
||||
|
||||
import click
|
||||
import urllib3
|
||||
from packaging.version import Version
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final, Literal
|
||||
else:
|
||||
from typing_extensions import Final, Literal
|
||||
|
||||
COMMENT_FILE: Final = ".pr-comment.json"
|
||||
DIFF_STEP_NAME: Final = "Generate HTML diff report"
|
||||
DOCS_URL: Final = (
|
||||
@ -52,7 +48,17 @@ def set_output(name: str, value: str) -> None:
|
||||
print(f"[INFO]: setting '{name}' to '{value}'")
|
||||
else:
|
||||
print(f"[INFO]: setting '{name}' to [{len(value)} chars]")
|
||||
print(f"::set-output name={name}::{value}")
|
||||
|
||||
if "GITHUB_OUTPUT" in os.environ:
|
||||
if "\n" in value:
|
||||
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
|
||||
delimiter = b64encode(os.urandom(16)).decode()
|
||||
value = f"{delimiter}\n{value}\n{delimiter}"
|
||||
command = f"{name}<<{value}"
|
||||
else:
|
||||
command = f"{name}={value}"
|
||||
with open(os.environ["GITHUB_OUTPUT"], "a") as f:
|
||||
print(command, file=f)
|
||||
|
||||
|
||||
def http_get(url: str, *, is_json: bool = True, **kwargs: Any) -> Any:
|
||||
@ -108,7 +114,7 @@ def main() -> None:
|
||||
@main.command("config", help="Acquire run configuration and metadata.")
|
||||
@click.argument("event", type=click.Choice(["push", "pull_request"]))
|
||||
def config(event: Literal["push", "pull_request"]) -> None:
|
||||
import diff_shades
|
||||
import diff_shades # type: ignore[import-not-found]
|
||||
|
||||
if event == "push":
|
||||
jobs = [{"mode": "preview-changes", "force-flag": "--force-preview-style"}]
|
||||
@ -218,9 +224,7 @@ def comment_details(run_id: str) -> None:
|
||||
# while it's still in progress seems impossible).
|
||||
body = body.replace("$workflow-run-url", data["html_url"])
|
||||
body = body.replace("$job-diff-url", diff_url)
|
||||
# https://github.community/t/set-output-truncates-multiline-strings/16852/3
|
||||
escaped = body.replace("%", "%25").replace("\n", "%0A").replace("\r", "%0D")
|
||||
set_output("comment-body", escaped)
|
||||
set_output("comment-body", body)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -5,14 +5,11 @@
|
||||
a coverage-guided fuzzer I'm working on.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
import hypothesmith
|
||||
from hypothesis import HealthCheck, given, settings
|
||||
from hypothesis import strategies as st
|
||||
|
||||
import black
|
||||
from blib2to3.pgen2.tokenize import TokenError
|
||||
|
||||
|
||||
# This test uses the Hypothesis and Hypothesmith libraries to generate random
|
||||
@ -21,7 +18,7 @@
|
||||
max_examples=1000, # roughly 1k tests/minute, or half that under coverage
|
||||
derandomize=True, # deterministic mode to avoid CI flakiness
|
||||
deadline=None, # ignore Hypothesis' health checks; we already know that
|
||||
suppress_health_check=HealthCheck.all(), # this is slow and filter-heavy.
|
||||
suppress_health_check=list(HealthCheck), # this is slow and filter-heavy.
|
||||
)
|
||||
@given(
|
||||
# Note that while Hypothesmith might generate code unlike that written by
|
||||
@ -45,23 +42,7 @@ def test_idempotent_any_syntatically_valid_python(
|
||||
compile(src_contents, "<string>", "exec") # else the bug is in hypothesmith
|
||||
|
||||
# Then format the code...
|
||||
try:
|
||||
dst_contents = black.format_str(src_contents, mode=mode)
|
||||
except black.InvalidInput:
|
||||
# This is a bug - if it's valid Python code, as above, Black should be
|
||||
# able to cope with it. See issues #970, #1012
|
||||
# TODO: remove this try-except block when issues are resolved.
|
||||
return
|
||||
except TokenError as e:
|
||||
if ( # Special-case logic for backslashes followed by newlines or end-of-input
|
||||
e.args[0] == "EOF in multi-line statement"
|
||||
and re.search(r"\\($|\r?\n)", src_contents) is not None
|
||||
):
|
||||
# This is a bug - if it's valid Python code, as above, Black should be
|
||||
# able to cope with it. See issue #1012.
|
||||
# TODO: remove this block when the issue is resolved.
|
||||
return
|
||||
raise
|
||||
|
||||
# And check that we got equivalent and stable output.
|
||||
black.assert_equivalent(src_contents, dst_contents)
|
||||
|
74
scripts/generate_schema.py
Executable file
74
scripts/generate_schema.py
Executable file
@ -0,0 +1,74 @@
|
||||
import json
|
||||
from typing import IO, Any
|
||||
|
||||
import click
|
||||
|
||||
import black
|
||||
|
||||
|
||||
def generate_schema_from_click(
|
||||
cmd: click.Command,
|
||||
) -> dict[str, Any]:
|
||||
result: dict[str, dict[str, Any]] = {}
|
||||
for param in cmd.params:
|
||||
if not isinstance(param, click.Option) or param.is_eager:
|
||||
continue
|
||||
|
||||
assert param.name
|
||||
name = param.name.replace("_", "-")
|
||||
|
||||
result[name] = {}
|
||||
|
||||
match param.type:
|
||||
case click.types.IntParamType():
|
||||
result[name]["type"] = "integer"
|
||||
case click.types.StringParamType() | click.types.Path():
|
||||
result[name]["type"] = "string"
|
||||
case click.types.Choice(choices=choices):
|
||||
result[name]["enum"] = choices
|
||||
case click.types.BoolParamType():
|
||||
result[name]["type"] = "boolean"
|
||||
case _:
|
||||
msg = f"{param.type!r} not a known type for {param}"
|
||||
raise TypeError(msg)
|
||||
|
||||
if param.multiple:
|
||||
result[name] = {"type": "array", "items": result[name]}
|
||||
|
||||
result[name]["description"] = param.help
|
||||
|
||||
if param.default is not None and not param.multiple:
|
||||
result[name]["default"] = param.default
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@click.command(context_settings={"help_option_names": ["-h", "--help"]})
|
||||
@click.option("--schemastore", is_flag=True, help="SchemaStore format")
|
||||
@click.option("--outfile", type=click.File(mode="w"), help="Write to file")
|
||||
def main(schemastore: bool, outfile: IO[str]) -> None:
|
||||
properties = generate_schema_from_click(black.main)
|
||||
del properties["line-ranges"]
|
||||
|
||||
schema: dict[str, Any] = {
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": (
|
||||
"https://github.com/psf/black/blob/main/src/black/resources/black.schema.json"
|
||||
),
|
||||
"$comment": "tool.black table in pyproject.toml",
|
||||
"type": "object",
|
||||
"additionalProperties": False,
|
||||
"properties": properties,
|
||||
}
|
||||
|
||||
if schemastore:
|
||||
schema["$id"] = "https://json.schemastore.org/partial-black.json"
|
||||
# The precise list of unstable features may change frequently, so don't
|
||||
# bother putting it in SchemaStore
|
||||
schema["properties"]["enable-unstable-feature"]["items"] = {"type": "string"}
|
||||
|
||||
print(json.dumps(schema, indent=2), file=outfile)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -15,14 +15,15 @@
|
||||
pip install -U wcwidth
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
from collections.abc import Iterable
|
||||
from os.path import basename, dirname, join
|
||||
from typing import Iterable, Tuple
|
||||
|
||||
import wcwidth
|
||||
import wcwidth # type: ignore[import-not-found]
|
||||
|
||||
|
||||
def make_width_table() -> Iterable[Tuple[int, int, int]]:
|
||||
def make_width_table() -> Iterable[tuple[int, int, int]]:
|
||||
start_codepoint = -1
|
||||
end_codepoint = -1
|
||||
range_width = -2
|
||||
@ -49,21 +50,13 @@ def make_width_table() -> Iterable[Tuple[int, int, int]]:
|
||||
def main() -> None:
|
||||
table_path = join(dirname(__file__), "..", "src", "black", "_width_table.py")
|
||||
with open(table_path, "w") as f:
|
||||
f.write(
|
||||
f"""# Generated by {basename(__file__)}
|
||||
f.write(f"""# Generated by {basename(__file__)}
|
||||
# wcwidth {wcwidth.__version__}
|
||||
# Unicode {wcwidth.list_versions()[-1]}
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
from typing import Final
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Final
|
||||
else:
|
||||
from typing import Final
|
||||
|
||||
WIDTH_TABLE: Final[List[Tuple[int, int, int]]] = [
|
||||
"""
|
||||
)
|
||||
WIDTH_TABLE: Final[list[tuple[int, int, int]]] = [
|
||||
""")
|
||||
for triple in make_width_table():
|
||||
f.write(f" {triple!r},\n")
|
||||
f.write("]\n")
|
||||
|
@ -9,7 +9,7 @@
|
||||
|
||||
|
||||
def git(*args: str) -> str:
|
||||
return check_output(["git"] + list(args)).decode("utf8").strip()
|
||||
return check_output(["git", *args]).decode("utf8").strip()
|
||||
|
||||
|
||||
def blackify(base_branch: str, black_command: str, logger: logging.Logger) -> int:
|
||||
@ -26,19 +26,19 @@ def blackify(base_branch: str, black_command: str, logger: logging.Logger) -> in
|
||||
merge_base = git("merge-base", "HEAD", base_branch)
|
||||
if not merge_base:
|
||||
logger.error(
|
||||
"Could not find a common commit for current head and %s" % base_branch
|
||||
f"Could not find a common commit for current head and {base_branch}"
|
||||
)
|
||||
return 1
|
||||
|
||||
commits = git(
|
||||
"log", "--reverse", "--pretty=format:%H", "%s~1..HEAD" % merge_base
|
||||
"log", "--reverse", "--pretty=format:%H", f"{merge_base}~1..HEAD"
|
||||
).split()
|
||||
for commit in commits:
|
||||
git("checkout", commit, "-b%s-black" % commit)
|
||||
git("checkout", commit, f"-b{commit}-black")
|
||||
check_output(black_command, shell=True)
|
||||
git("commit", "-aqm", "blackify")
|
||||
|
||||
git("checkout", base_branch, "-b%s-black" % current_branch)
|
||||
git("checkout", base_branch, f"-b{current_branch}-black")
|
||||
|
||||
for last_commit, commit in zip(commits, commits[1:]):
|
||||
allow_empty = (
|
||||
@ -51,7 +51,7 @@ def blackify(base_branch: str, black_command: str, logger: logging.Logger) -> in
|
||||
"diff",
|
||||
"--binary",
|
||||
"--find-copies",
|
||||
"%s-black..%s-black" % (last_commit, commit),
|
||||
f"{last_commit}-black..{commit}-black",
|
||||
],
|
||||
stdout=PIPE,
|
||||
)
|
||||
@ -77,7 +77,7 @@ def blackify(base_branch: str, black_command: str, logger: logging.Logger) -> in
|
||||
git("commit", "--allow-empty", "-aqC", commit)
|
||||
|
||||
for commit in commits:
|
||||
git("branch", "-qD", "%s-black" % commit)
|
||||
git("branch", "-qD", f"{commit}-black")
|
||||
|
||||
return 0
|
||||
|
||||
|
244
scripts/release.py
Executable file
244
scripts/release.py
Executable file
@ -0,0 +1,244 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
"""
|
||||
Tool to help automate changes needed in commits during and after releases
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from subprocess import run
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
NEW_VERSION_CHANGELOG_TEMPLATE = """\
|
||||
## Unreleased
|
||||
|
||||
### Highlights
|
||||
|
||||
<!-- Include any especially major or disruptive changes here -->
|
||||
|
||||
### Stable style
|
||||
|
||||
<!-- Changes that affect Black's stable style -->
|
||||
|
||||
### Preview style
|
||||
|
||||
<!-- Changes that affect Black's preview style -->
|
||||
|
||||
### Configuration
|
||||
|
||||
<!-- Changes to how Black can be configured -->
|
||||
|
||||
### Packaging
|
||||
|
||||
<!-- Changes to how Black is packaged, such as dependency requirements -->
|
||||
|
||||
### Parser
|
||||
|
||||
<!-- Changes to the parser or to version autodetection -->
|
||||
|
||||
### Performance
|
||||
|
||||
<!-- Changes that improve Black's performance. -->
|
||||
|
||||
### Output
|
||||
|
||||
<!-- Changes to Black's terminal output and error messages -->
|
||||
|
||||
### _Blackd_
|
||||
|
||||
<!-- Changes to blackd -->
|
||||
|
||||
### Integrations
|
||||
|
||||
<!-- For example, Docker, GitHub Actions, pre-commit, editors -->
|
||||
|
||||
### Documentation
|
||||
|
||||
<!-- Major changes to documentation and policies. Small docs changes
|
||||
don't need a changelog entry. -->
|
||||
"""
|
||||
|
||||
|
||||
class NoGitTagsError(Exception): ... # noqa: E701,E761
|
||||
|
||||
|
||||
# TODO: Do better with alpha + beta releases
|
||||
# Maybe we vendor packaging library
|
||||
def get_git_tags(versions_only: bool = True) -> list[str]:
|
||||
"""Pull out all tags or calvers only"""
|
||||
cp = run(["git", "tag"], capture_output=True, check=True, encoding="utf8")
|
||||
if not cp.stdout:
|
||||
LOG.error(f"Returned no git tags stdout: {cp.stderr}")
|
||||
raise NoGitTagsError
|
||||
git_tags = cp.stdout.splitlines()
|
||||
if versions_only:
|
||||
return [t for t in git_tags if t[0].isdigit()]
|
||||
return git_tags
|
||||
|
||||
|
||||
# TODO: Support sorting alhpa/beta releases correctly
|
||||
def tuple_calver(calver: str) -> tuple[int, ...]: # mypy can't notice maxsplit below
|
||||
"""Convert a calver string into a tuple of ints for sorting"""
|
||||
try:
|
||||
return tuple(map(int, calver.split(".", maxsplit=2)))
|
||||
except ValueError:
|
||||
return (0, 0, 0)
|
||||
|
||||
|
||||
class SourceFiles:
|
||||
def __init__(self, black_repo_dir: Path):
|
||||
# File path fun all pathlib to be platform agnostic
|
||||
self.black_repo_path = black_repo_dir
|
||||
self.changes_path = self.black_repo_path / "CHANGES.md"
|
||||
self.docs_path = self.black_repo_path / "docs"
|
||||
self.version_doc_paths = (
|
||||
self.docs_path / "integrations" / "source_version_control.md",
|
||||
self.docs_path / "usage_and_configuration" / "the_basics.md",
|
||||
)
|
||||
self.current_version = self.get_current_version()
|
||||
self.next_version = self.get_next_version()
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"""\
|
||||
> SourceFiles ENV:
|
||||
Repo path: {self.black_repo_path}
|
||||
CHANGES.md path: {self.changes_path}
|
||||
docs path: {self.docs_path}
|
||||
Current version: {self.current_version}
|
||||
Next version: {self.next_version}
|
||||
"""
|
||||
|
||||
def add_template_to_changes(self) -> int:
|
||||
"""Add the template to CHANGES.md if it does not exist"""
|
||||
LOG.info(f"Adding template to {self.changes_path}")
|
||||
|
||||
with self.changes_path.open("r") as cfp:
|
||||
changes_string = cfp.read()
|
||||
|
||||
if "## Unreleased" in changes_string:
|
||||
LOG.error(f"{self.changes_path} already has unreleased template")
|
||||
return 1
|
||||
|
||||
templated_changes_string = changes_string.replace(
|
||||
"# Change Log\n",
|
||||
f"# Change Log\n\n{NEW_VERSION_CHANGELOG_TEMPLATE}",
|
||||
)
|
||||
|
||||
with self.changes_path.open("w") as cfp:
|
||||
cfp.write(templated_changes_string)
|
||||
|
||||
LOG.info(f"Added template to {self.changes_path}")
|
||||
return 0
|
||||
|
||||
def cleanup_changes_template_for_release(self) -> None:
|
||||
LOG.info(f"Cleaning up {self.changes_path}")
|
||||
|
||||
with self.changes_path.open("r") as cfp:
|
||||
changes_string = cfp.read()
|
||||
|
||||
# Change Unreleased to next version
|
||||
versioned_changes = changes_string.replace(
|
||||
"## Unreleased", f"## {self.next_version}"
|
||||
)
|
||||
|
||||
# Remove all comments (subheadings are harder - Human required still)
|
||||
no_comments_changes = []
|
||||
for line in versioned_changes.splitlines():
|
||||
if line.startswith("<!--") or line.endswith("-->"):
|
||||
continue
|
||||
no_comments_changes.append(line)
|
||||
|
||||
with self.changes_path.open("w") as cfp:
|
||||
cfp.write("\n".join(no_comments_changes) + "\n")
|
||||
|
||||
LOG.debug(f"Finished Cleaning up {self.changes_path}")
|
||||
|
||||
def get_current_version(self) -> str:
|
||||
"""Get the latest git (version) tag as latest version"""
|
||||
return sorted(get_git_tags(), key=lambda k: tuple_calver(k))[-1]
|
||||
|
||||
def get_next_version(self) -> str:
|
||||
"""Workout the year and month + version number we need to move to"""
|
||||
base_calver = datetime.today().strftime("%y.%m")
|
||||
calver_parts = base_calver.split(".")
|
||||
base_calver = f"{calver_parts[0]}.{int(calver_parts[1])}" # Remove leading 0
|
||||
git_tags = get_git_tags()
|
||||
same_month_releases = [
|
||||
t for t in git_tags if t.startswith(base_calver) and "a" not in t
|
||||
]
|
||||
if len(same_month_releases) < 1:
|
||||
return f"{base_calver}.0"
|
||||
same_month_version = same_month_releases[-1].split(".", 2)[-1]
|
||||
return f"{base_calver}.{int(same_month_version) + 1}"
|
||||
|
||||
def update_repo_for_release(self) -> int:
|
||||
"""Update CHANGES.md + doc files ready for release"""
|
||||
self.cleanup_changes_template_for_release()
|
||||
self.update_version_in_docs()
|
||||
return 0 # return 0 if no exceptions hit
|
||||
|
||||
def update_version_in_docs(self) -> None:
|
||||
for doc_path in self.version_doc_paths:
|
||||
LOG.info(f"Updating black version to {self.next_version} in {doc_path}")
|
||||
|
||||
with doc_path.open("r") as dfp:
|
||||
doc_string = dfp.read()
|
||||
|
||||
next_version_doc = doc_string.replace(
|
||||
self.current_version, self.next_version
|
||||
)
|
||||
|
||||
with doc_path.open("w") as dfp:
|
||||
dfp.write(next_version_doc)
|
||||
|
||||
LOG.debug(
|
||||
f"Finished updating black version to {self.next_version} in {doc_path}"
|
||||
)
|
||||
|
||||
|
||||
def _handle_debug(debug: bool) -> None:
|
||||
"""Turn on debugging if asked otherwise INFO default"""
|
||||
log_level = logging.DEBUG if debug else logging.INFO
|
||||
logging.basicConfig(
|
||||
format="[%(asctime)s] %(levelname)s: %(message)s (%(filename)s:%(lineno)d)",
|
||||
level=log_level,
|
||||
)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-a",
|
||||
"--add-changes-template",
|
||||
action="store_true",
|
||||
help="Add the Unreleased template to CHANGES.md",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d", "--debug", action="store_true", help="Verbose debug output"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
_handle_debug(args.debug)
|
||||
return args
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
|
||||
# Need parent.parent cause script is in scripts/ directory
|
||||
sf = SourceFiles(Path(__file__).parent.parent)
|
||||
|
||||
if args.add_changes_template:
|
||||
return sf.add_template_to_changes()
|
||||
|
||||
LOG.info(f"Current version detected to be {sf.current_version}")
|
||||
LOG.info(f"Next version will be {sf.next_version}")
|
||||
return sf.update_repo_for_release()
|
||||
|
||||
|
||||
if __name__ == "__main__": # pragma: no cover
|
||||
sys.exit(main())
|
69
scripts/release_tests.py
Normal file
69
scripts/release_tests.py
Normal file
@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from shutil import rmtree
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from release import SourceFiles, tuple_calver # type: ignore
|
||||
|
||||
|
||||
class FakeDateTime:
|
||||
"""Used to mock the date to test generating next calver function"""
|
||||
|
||||
def today(*args: Any, **kwargs: Any) -> "FakeDateTime": # noqa
|
||||
return FakeDateTime()
|
||||
|
||||
# Add leading 0 on purpose to ensure we remove it
|
||||
def strftime(*args: Any, **kwargs: Any) -> str: # noqa
|
||||
return "69.01"
|
||||
|
||||
|
||||
class TestRelease(unittest.TestCase):
|
||||
def setUp(self) -> None:
|
||||
# We only test on >= 3.12
|
||||
self.tempdir = TemporaryDirectory(delete=False) # type: ignore
|
||||
self.tempdir_path = Path(self.tempdir.name)
|
||||
self.sf = SourceFiles(self.tempdir_path)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
rmtree(self.tempdir.name)
|
||||
return super().tearDown()
|
||||
|
||||
@patch("release.get_git_tags")
|
||||
def test_get_current_version(self, mocked_git_tags: Mock) -> None:
|
||||
mocked_git_tags.return_value = ["1.1.0", "69.1.0", "69.1.1", "2.2.0"]
|
||||
self.assertEqual("69.1.1", self.sf.get_current_version())
|
||||
|
||||
@patch("release.get_git_tags")
|
||||
@patch("release.datetime", FakeDateTime)
|
||||
def test_get_next_version(self, mocked_git_tags: Mock) -> None:
|
||||
# test we handle no args
|
||||
mocked_git_tags.return_value = []
|
||||
self.assertEqual(
|
||||
"69.1.0",
|
||||
self.sf.get_next_version(),
|
||||
"Unable to get correct next version with no git tags",
|
||||
)
|
||||
|
||||
# test we handle
|
||||
mocked_git_tags.return_value = ["1.1.0", "69.1.0", "69.1.1", "2.2.0"]
|
||||
self.assertEqual(
|
||||
"69.1.2",
|
||||
self.sf.get_next_version(),
|
||||
"Unable to get correct version with 2 previous versions released this"
|
||||
" month",
|
||||
)
|
||||
|
||||
def test_tuple_calver(self) -> None:
|
||||
first_month_release = tuple_calver("69.1.0")
|
||||
second_month_release = tuple_calver("69.1.1")
|
||||
self.assertEqual((69, 1, 0), first_month_release)
|
||||
self.assertEqual((0, 0, 0), tuple_calver("69.1.1a0")) # Hack for alphas/betas
|
||||
self.assertTrue(first_month_release < second_month_release)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
File diff suppressed because it is too large
Load Diff
@ -1,15 +1,9 @@
|
||||
# Generated by make_width_table.py
|
||||
# wcwidth 0.2.6
|
||||
# Unicode 15.0.0
|
||||
import sys
|
||||
from typing import List, Tuple
|
||||
from typing import Final
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Final
|
||||
else:
|
||||
from typing import Final
|
||||
|
||||
WIDTH_TABLE: Final[List[Tuple[int, int, int]]] = [
|
||||
WIDTH_TABLE: Final[list[tuple[int, int, int]]] = [
|
||||
(0, 0, 0),
|
||||
(1, 31, -1),
|
||||
(127, 159, -1),
|
||||
|
@ -1,13 +1,8 @@
|
||||
"""Builds on top of nodes.py to track brackets."""
|
||||
|
||||
import sys
|
||||
from collections.abc import Iterable, Sequence
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, Iterable, List, Optional, Sequence, Set, Tuple, Union
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Final
|
||||
else:
|
||||
from typing import Final
|
||||
from typing import Final, Optional, Union
|
||||
|
||||
from black.nodes import (
|
||||
BRACKET,
|
||||
@ -66,12 +61,12 @@ class BracketTracker:
|
||||
"""Keeps track of brackets on a line."""
|
||||
|
||||
depth: int = 0
|
||||
bracket_match: Dict[Tuple[Depth, NodeType], Leaf] = field(default_factory=dict)
|
||||
delimiters: Dict[LeafID, Priority] = field(default_factory=dict)
|
||||
bracket_match: dict[tuple[Depth, NodeType], Leaf] = field(default_factory=dict)
|
||||
delimiters: dict[LeafID, Priority] = field(default_factory=dict)
|
||||
previous: Optional[Leaf] = None
|
||||
_for_loop_depths: List[int] = field(default_factory=list)
|
||||
_lambda_argument_depths: List[int] = field(default_factory=list)
|
||||
invisible: List[Leaf] = field(default_factory=list)
|
||||
_for_loop_depths: list[int] = field(default_factory=list)
|
||||
_lambda_argument_depths: list[int] = field(default_factory=list)
|
||||
invisible: list[Leaf] = field(default_factory=list)
|
||||
|
||||
def mark(self, leaf: Leaf) -> None:
|
||||
"""Mark `leaf` with bracket-related metadata. Keep track of delimiters.
|
||||
@ -121,7 +116,7 @@ def mark(self, leaf: Leaf) -> None:
|
||||
if delim and self.previous is not None:
|
||||
self.delimiters[id(self.previous)] = delim
|
||||
else:
|
||||
delim = is_split_after_delimiter(leaf, self.previous)
|
||||
delim = is_split_after_delimiter(leaf)
|
||||
if delim:
|
||||
self.delimiters[id(leaf)] = delim
|
||||
if leaf.type in OPENING_BRACKETS:
|
||||
@ -133,6 +128,13 @@ def mark(self, leaf: Leaf) -> None:
|
||||
self.maybe_increment_lambda_arguments(leaf)
|
||||
self.maybe_increment_for_loop_variable(leaf)
|
||||
|
||||
def any_open_for_or_lambda(self) -> bool:
|
||||
"""Return True if there is an open for or lambda expression on the line.
|
||||
|
||||
See maybe_increment_for_loop_variable and maybe_increment_lambda_arguments
|
||||
for details."""
|
||||
return bool(self._for_loop_depths or self._lambda_argument_depths)
|
||||
|
||||
def any_open_brackets(self) -> bool:
|
||||
"""Return True if there is an yet unmatched open bracket on the line."""
|
||||
return bool(self.bracket_match)
|
||||
@ -214,7 +216,7 @@ def get_open_lsqb(self) -> Optional[Leaf]:
|
||||
return self.bracket_match.get((self.depth - 1, token.RSQB))
|
||||
|
||||
|
||||
def is_split_after_delimiter(leaf: Leaf, previous: Optional[Leaf] = None) -> Priority:
|
||||
def is_split_after_delimiter(leaf: Leaf) -> Priority:
|
||||
"""Return the priority of the `leaf` delimiter, given a line break after it.
|
||||
|
||||
The delimiter priorities returned here are from those delimiters that would
|
||||
@ -352,7 +354,7 @@ def max_delimiter_priority_in_atom(node: LN) -> Priority:
|
||||
return 0
|
||||
|
||||
|
||||
def get_leaves_inside_matching_brackets(leaves: Sequence[Leaf]) -> Set[LeafID]:
|
||||
def get_leaves_inside_matching_brackets(leaves: Sequence[Leaf]) -> set[LeafID]:
|
||||
"""Return leaves that are inside matching brackets.
|
||||
|
||||
The input `leaves` can have non-matching brackets at the head or tail parts.
|
||||
|
@ -1,21 +1,31 @@
|
||||
"""Caching of formatted files with feature-based invalidation."""
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import pickle
|
||||
import sys
|
||||
import tempfile
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import Dict, Iterable, Set, Tuple
|
||||
from typing import NamedTuple
|
||||
|
||||
from platformdirs import user_cache_dir
|
||||
|
||||
from _black_version import version as __version__
|
||||
from black.mode import Mode
|
||||
from black.output import err
|
||||
|
||||
# types
|
||||
Timestamp = float
|
||||
FileSize = int
|
||||
CacheInfo = Tuple[Timestamp, FileSize]
|
||||
Cache = Dict[str, CacheInfo]
|
||||
if sys.version_info >= (3, 11):
|
||||
from typing import Self
|
||||
else:
|
||||
from typing_extensions import Self
|
||||
|
||||
|
||||
class FileData(NamedTuple):
|
||||
st_mtime: float
|
||||
st_size: int
|
||||
hash: str
|
||||
|
||||
|
||||
def get_cache_dir() -> Path:
|
||||
@ -29,69 +39,112 @@ def get_cache_dir() -> Path:
|
||||
repeated calls.
|
||||
"""
|
||||
# NOTE: Function mostly exists as a clean way to test getting the cache directory.
|
||||
default_cache_dir = user_cache_dir("black", version=__version__)
|
||||
default_cache_dir = user_cache_dir("black")
|
||||
cache_dir = Path(os.environ.get("BLACK_CACHE_DIR", default_cache_dir))
|
||||
cache_dir = cache_dir / __version__
|
||||
return cache_dir
|
||||
|
||||
|
||||
CACHE_DIR = get_cache_dir()
|
||||
|
||||
|
||||
def read_cache(mode: Mode) -> Cache:
|
||||
"""Read the cache if it exists and is well formed.
|
||||
|
||||
If it is not well formed, the call to write_cache later should resolve the issue.
|
||||
"""
|
||||
cache_file = get_cache_file(mode)
|
||||
if not cache_file.exists():
|
||||
return {}
|
||||
|
||||
with cache_file.open("rb") as fobj:
|
||||
try:
|
||||
cache: Cache = pickle.load(fobj)
|
||||
except (pickle.UnpicklingError, ValueError, IndexError):
|
||||
return {}
|
||||
|
||||
return cache
|
||||
|
||||
|
||||
def get_cache_file(mode: Mode) -> Path:
|
||||
return CACHE_DIR / f"cache.{mode.get_cache_key()}.pickle"
|
||||
|
||||
|
||||
def get_cache_info(path: Path) -> CacheInfo:
|
||||
"""Return the information used to check if a file is already formatted or not."""
|
||||
@dataclass
|
||||
class Cache:
|
||||
mode: Mode
|
||||
cache_file: Path
|
||||
file_data: dict[str, FileData] = field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def read(cls, mode: Mode) -> Self:
|
||||
"""Read the cache if it exists and is well-formed.
|
||||
|
||||
If it is not well-formed, the call to write later should
|
||||
resolve the issue.
|
||||
"""
|
||||
cache_file = get_cache_file(mode)
|
||||
try:
|
||||
exists = cache_file.exists()
|
||||
except OSError as e:
|
||||
# Likely file too long; see #4172 and #4174
|
||||
err(f"Unable to read cache file {cache_file} due to {e}")
|
||||
return cls(mode, cache_file)
|
||||
if not exists:
|
||||
return cls(mode, cache_file)
|
||||
|
||||
with cache_file.open("rb") as fobj:
|
||||
try:
|
||||
data: dict[str, tuple[float, int, str]] = pickle.load(fobj)
|
||||
file_data = {k: FileData(*v) for k, v in data.items()}
|
||||
except (pickle.UnpicklingError, ValueError, IndexError):
|
||||
return cls(mode, cache_file)
|
||||
|
||||
return cls(mode, cache_file, file_data)
|
||||
|
||||
@staticmethod
|
||||
def hash_digest(path: Path) -> str:
|
||||
"""Return hash digest for path."""
|
||||
|
||||
data = path.read_bytes()
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
@staticmethod
|
||||
def get_file_data(path: Path) -> FileData:
|
||||
"""Return file data for path."""
|
||||
|
||||
stat = path.stat()
|
||||
return stat.st_mtime, stat.st_size
|
||||
hash = Cache.hash_digest(path)
|
||||
return FileData(stat.st_mtime, stat.st_size, hash)
|
||||
|
||||
def is_changed(self, source: Path) -> bool:
|
||||
"""Check if source has changed compared to cached version."""
|
||||
res_src = source.resolve()
|
||||
old = self.file_data.get(str(res_src))
|
||||
if old is None:
|
||||
return True
|
||||
|
||||
def filter_cached(cache: Cache, sources: Iterable[Path]) -> Tuple[Set[Path], Set[Path]]:
|
||||
st = res_src.stat()
|
||||
if st.st_size != old.st_size:
|
||||
return True
|
||||
if st.st_mtime != old.st_mtime:
|
||||
new_hash = Cache.hash_digest(res_src)
|
||||
if new_hash != old.hash:
|
||||
return True
|
||||
return False
|
||||
|
||||
def filtered_cached(self, sources: Iterable[Path]) -> tuple[set[Path], set[Path]]:
|
||||
"""Split an iterable of paths in `sources` into two sets.
|
||||
|
||||
The first contains paths of files that modified on disk or are not in the
|
||||
cache. The other contains paths to non-modified files.
|
||||
"""
|
||||
todo, done = set(), set()
|
||||
changed: set[Path] = set()
|
||||
done: set[Path] = set()
|
||||
for src in sources:
|
||||
res_src = src.resolve()
|
||||
if cache.get(str(res_src)) != get_cache_info(res_src):
|
||||
todo.add(src)
|
||||
if self.is_changed(src):
|
||||
changed.add(src)
|
||||
else:
|
||||
done.add(src)
|
||||
return todo, done
|
||||
return changed, done
|
||||
|
||||
|
||||
def write_cache(cache: Cache, sources: Iterable[Path], mode: Mode) -> None:
|
||||
"""Update the cache file."""
|
||||
cache_file = get_cache_file(mode)
|
||||
def write(self, sources: Iterable[Path]) -> None:
|
||||
"""Update the cache file data and write a new cache file."""
|
||||
self.file_data.update(
|
||||
**{str(src.resolve()): Cache.get_file_data(src) for src in sources}
|
||||
)
|
||||
try:
|
||||
CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
new_cache = {
|
||||
**cache,
|
||||
**{str(src.resolve()): get_cache_info(src) for src in sources},
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir=str(self.cache_file.parent), delete=False
|
||||
) as f:
|
||||
# We store raw tuples in the cache because it's faster.
|
||||
data: dict[str, tuple[float, int, str]] = {
|
||||
k: (*v,) for k, v in self.file_data.items()
|
||||
}
|
||||
with tempfile.NamedTemporaryFile(dir=str(cache_file.parent), delete=False) as f:
|
||||
pickle.dump(new_cache, f, protocol=4)
|
||||
os.replace(f.name, cache_file)
|
||||
pickle.dump(data, f, protocol=4)
|
||||
os.replace(f.name, self.cache_file)
|
||||
except OSError:
|
||||
pass
|
||||
|
@ -1,20 +1,17 @@
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Collection, Iterator
|
||||
from dataclasses import dataclass
|
||||
from functools import lru_cache
|
||||
from typing import Iterator, List, Optional, Union
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final
|
||||
else:
|
||||
from typing_extensions import Final
|
||||
from typing import Final, Optional, Union
|
||||
|
||||
from black.mode import Mode, Preview
|
||||
from black.nodes import (
|
||||
CLOSING_BRACKETS,
|
||||
STANDALONE_COMMENT,
|
||||
WHITESPACE,
|
||||
container_of,
|
||||
first_leaf_of,
|
||||
make_simple_prefix,
|
||||
preceding_leaf,
|
||||
syms,
|
||||
)
|
||||
@ -26,10 +23,11 @@
|
||||
|
||||
FMT_OFF: Final = {"# fmt: off", "# fmt:off", "# yapf: disable"}
|
||||
FMT_SKIP: Final = {"# fmt: skip", "# fmt:skip"}
|
||||
FMT_PASS: Final = {*FMT_OFF, *FMT_SKIP}
|
||||
FMT_ON: Final = {"# fmt: on", "# fmt:on", "# yapf: enable"}
|
||||
|
||||
COMMENT_EXCEPTIONS = " !:#'"
|
||||
_COMMENT_PREFIX = "# "
|
||||
_COMMENT_LIST_SEPARATOR = ";"
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -48,6 +46,8 @@ class ProtoComment:
|
||||
value: str # content of the comment
|
||||
newlines: int # how many newlines before the comment
|
||||
consumed: int # how many characters of the original leaf's prefix did we consume
|
||||
form_feed: bool # is there a form feed before the comment
|
||||
leading_whitespace: str # leading whitespace before the comment, if any
|
||||
|
||||
|
||||
def generate_comments(leaf: LN) -> Iterator[Leaf]:
|
||||
@ -69,25 +69,34 @@ def generate_comments(leaf: LN) -> Iterator[Leaf]:
|
||||
Inline comments are emitted as regular token.COMMENT leaves. Standalone
|
||||
are emitted with a fake STANDALONE_COMMENT token identifier.
|
||||
"""
|
||||
total_consumed = 0
|
||||
for pc in list_comments(leaf.prefix, is_endmarker=leaf.type == token.ENDMARKER):
|
||||
yield Leaf(pc.type, pc.value, prefix="\n" * pc.newlines)
|
||||
total_consumed = pc.consumed
|
||||
prefix = make_simple_prefix(pc.newlines, pc.form_feed)
|
||||
yield Leaf(pc.type, pc.value, prefix=prefix)
|
||||
normalize_trailing_prefix(leaf, total_consumed)
|
||||
|
||||
|
||||
@lru_cache(maxsize=4096)
|
||||
def list_comments(prefix: str, *, is_endmarker: bool) -> List[ProtoComment]:
|
||||
def list_comments(prefix: str, *, is_endmarker: bool) -> list[ProtoComment]:
|
||||
"""Return a list of :class:`ProtoComment` objects parsed from the given `prefix`."""
|
||||
result: List[ProtoComment] = []
|
||||
result: list[ProtoComment] = []
|
||||
if not prefix or "#" not in prefix:
|
||||
return result
|
||||
|
||||
consumed = 0
|
||||
nlines = 0
|
||||
ignored_lines = 0
|
||||
for index, line in enumerate(re.split("\r?\n", prefix)):
|
||||
consumed += len(line) + 1 # adding the length of the split '\n'
|
||||
line = line.lstrip()
|
||||
form_feed = False
|
||||
for index, full_line in enumerate(re.split("\r?\n", prefix)):
|
||||
consumed += len(full_line) + 1 # adding the length of the split '\n'
|
||||
match = re.match(r"^(\s*)(\S.*|)$", full_line)
|
||||
assert match
|
||||
whitespace, line = match.groups()
|
||||
if not line:
|
||||
nlines += 1
|
||||
if "\f" in full_line:
|
||||
form_feed = True
|
||||
if not line.startswith("#"):
|
||||
# Escaped newlines outside of a comment are not really newlines at
|
||||
# all. We treat a single-line comment following an escaped newline
|
||||
@ -103,13 +112,34 @@ def list_comments(prefix: str, *, is_endmarker: bool) -> List[ProtoComment]:
|
||||
comment = make_comment(line)
|
||||
result.append(
|
||||
ProtoComment(
|
||||
type=comment_type, value=comment, newlines=nlines, consumed=consumed
|
||||
type=comment_type,
|
||||
value=comment,
|
||||
newlines=nlines,
|
||||
consumed=consumed,
|
||||
form_feed=form_feed,
|
||||
leading_whitespace=whitespace,
|
||||
)
|
||||
)
|
||||
form_feed = False
|
||||
nlines = 0
|
||||
return result
|
||||
|
||||
|
||||
def normalize_trailing_prefix(leaf: LN, total_consumed: int) -> None:
|
||||
"""Normalize the prefix that's left over after generating comments.
|
||||
|
||||
Note: don't use backslashes for formatting or you'll lose your voting rights.
|
||||
"""
|
||||
remainder = leaf.prefix[total_consumed:]
|
||||
if "\\" not in remainder:
|
||||
nl_count = remainder.count("\n")
|
||||
form_feed = "\f" in remainder and remainder.endswith("\n")
|
||||
leaf.prefix = make_simple_prefix(nl_count, form_feed)
|
||||
return
|
||||
|
||||
leaf.prefix = ""
|
||||
|
||||
|
||||
def make_comment(content: str) -> str:
|
||||
"""Return a consistently formatted comment from the given `content` string.
|
||||
|
||||
@ -136,14 +166,18 @@ def make_comment(content: str) -> str:
|
||||
return "#" + content
|
||||
|
||||
|
||||
def normalize_fmt_off(node: Node) -> None:
|
||||
def normalize_fmt_off(
|
||||
node: Node, mode: Mode, lines: Collection[tuple[int, int]]
|
||||
) -> None:
|
||||
"""Convert content between `# fmt: off`/`# fmt: on` into standalone comments."""
|
||||
try_again = True
|
||||
while try_again:
|
||||
try_again = convert_one_fmt_off_pair(node)
|
||||
try_again = convert_one_fmt_off_pair(node, mode, lines)
|
||||
|
||||
|
||||
def convert_one_fmt_off_pair(node: Node) -> bool:
|
||||
def convert_one_fmt_off_pair(
|
||||
node: Node, mode: Mode, lines: Collection[tuple[int, int]]
|
||||
) -> bool:
|
||||
"""Convert content of a single `# fmt: off`/`# fmt: on` into a standalone comment.
|
||||
|
||||
Returns True if a pair was converted.
|
||||
@ -151,21 +185,27 @@ def convert_one_fmt_off_pair(node: Node) -> bool:
|
||||
for leaf in node.leaves():
|
||||
previous_consumed = 0
|
||||
for comment in list_comments(leaf.prefix, is_endmarker=False):
|
||||
if comment.value not in FMT_PASS:
|
||||
is_fmt_off = comment.value in FMT_OFF
|
||||
is_fmt_skip = _contains_fmt_skip_comment(comment.value, mode)
|
||||
if (not is_fmt_off and not is_fmt_skip) or (
|
||||
# Invalid use when `# fmt: off` is applied before a closing bracket.
|
||||
is_fmt_off
|
||||
and leaf.type in CLOSING_BRACKETS
|
||||
):
|
||||
previous_consumed = comment.consumed
|
||||
continue
|
||||
# We only want standalone comments. If there's no previous leaf or
|
||||
# the previous leaf is indentation, it's a standalone comment in
|
||||
# disguise.
|
||||
if comment.value in FMT_PASS and comment.type != STANDALONE_COMMENT:
|
||||
if comment.type != STANDALONE_COMMENT:
|
||||
prev = preceding_leaf(leaf)
|
||||
if prev:
|
||||
if comment.value in FMT_OFF and prev.type not in WHITESPACE:
|
||||
if is_fmt_off and prev.type not in WHITESPACE:
|
||||
continue
|
||||
if comment.value in FMT_SKIP and prev.type in WHITESPACE:
|
||||
if is_fmt_skip and prev.type in WHITESPACE:
|
||||
continue
|
||||
|
||||
ignored_nodes = list(generate_ignored_nodes(leaf, comment))
|
||||
ignored_nodes = list(generate_ignored_nodes(leaf, comment, mode))
|
||||
if not ignored_nodes:
|
||||
continue
|
||||
|
||||
@ -174,7 +214,7 @@ def convert_one_fmt_off_pair(node: Node) -> bool:
|
||||
prefix = first.prefix
|
||||
if comment.value in FMT_OFF:
|
||||
first.prefix = prefix[comment.consumed :]
|
||||
if comment.value in FMT_SKIP:
|
||||
if is_fmt_skip:
|
||||
first.prefix = ""
|
||||
standalone_comment_prefix = prefix
|
||||
else:
|
||||
@ -182,10 +222,20 @@ def convert_one_fmt_off_pair(node: Node) -> bool:
|
||||
prefix[:previous_consumed] + "\n" * comment.newlines
|
||||
)
|
||||
hidden_value = "".join(str(n) for n in ignored_nodes)
|
||||
comment_lineno = leaf.lineno - comment.newlines
|
||||
if comment.value in FMT_OFF:
|
||||
fmt_off_prefix = ""
|
||||
if len(lines) > 0 and not any(
|
||||
line[0] <= comment_lineno <= line[1] for line in lines
|
||||
):
|
||||
# keeping indentation of comment by preserving original whitespaces.
|
||||
fmt_off_prefix = prefix.split(comment.value)[0]
|
||||
if "\n" in fmt_off_prefix:
|
||||
fmt_off_prefix = fmt_off_prefix.split("\n")[-1]
|
||||
standalone_comment_prefix += fmt_off_prefix
|
||||
hidden_value = comment.value + "\n" + hidden_value
|
||||
if comment.value in FMT_SKIP:
|
||||
hidden_value += " " + comment.value
|
||||
if is_fmt_skip:
|
||||
hidden_value += comment.leading_whitespace + comment.value
|
||||
if hidden_value.endswith("\n"):
|
||||
# That happens when one of the `ignored_nodes` ended with a NEWLINE
|
||||
# leaf (possibly followed by a DEDENT).
|
||||
@ -211,14 +261,16 @@ def convert_one_fmt_off_pair(node: Node) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def generate_ignored_nodes(leaf: Leaf, comment: ProtoComment) -> Iterator[LN]:
|
||||
def generate_ignored_nodes(
|
||||
leaf: Leaf, comment: ProtoComment, mode: Mode
|
||||
) -> Iterator[LN]:
|
||||
"""Starting from the container of `leaf`, generate all leaves until `# fmt: on`.
|
||||
|
||||
If comment is skip, returns leaf only.
|
||||
Stops at the end of the block.
|
||||
"""
|
||||
if comment.value in FMT_SKIP:
|
||||
yield from _generate_ignored_nodes_from_fmt_skip(leaf, comment)
|
||||
if _contains_fmt_skip_comment(comment.value, mode):
|
||||
yield from _generate_ignored_nodes_from_fmt_skip(leaf, comment, mode)
|
||||
return
|
||||
container: Optional[LN] = container_of(leaf)
|
||||
while container is not None and container.type != token.ENDMARKER:
|
||||
@ -257,23 +309,67 @@ def generate_ignored_nodes(leaf: Leaf, comment: ProtoComment) -> Iterator[LN]:
|
||||
|
||||
|
||||
def _generate_ignored_nodes_from_fmt_skip(
|
||||
leaf: Leaf, comment: ProtoComment
|
||||
leaf: Leaf, comment: ProtoComment, mode: Mode
|
||||
) -> Iterator[LN]:
|
||||
"""Generate all leaves that should be ignored by the `# fmt: skip` from `leaf`."""
|
||||
prev_sibling = leaf.prev_sibling
|
||||
parent = leaf.parent
|
||||
ignored_nodes: list[LN] = []
|
||||
# Need to properly format the leaf prefix to compare it to comment.value,
|
||||
# which is also formatted
|
||||
comments = list_comments(leaf.prefix, is_endmarker=False)
|
||||
if not comments or comment.value != comments[0].value:
|
||||
return
|
||||
if prev_sibling is not None:
|
||||
leaf.prefix = ""
|
||||
leaf.prefix = leaf.prefix[comment.consumed :]
|
||||
|
||||
if Preview.fix_fmt_skip_in_one_liners not in mode:
|
||||
siblings = [prev_sibling]
|
||||
while "\n" not in prev_sibling.prefix and prev_sibling.prev_sibling is not None:
|
||||
while (
|
||||
"\n" not in prev_sibling.prefix
|
||||
and prev_sibling.prev_sibling is not None
|
||||
):
|
||||
prev_sibling = prev_sibling.prev_sibling
|
||||
siblings.insert(0, prev_sibling)
|
||||
yield from siblings
|
||||
return
|
||||
|
||||
# Generates the nodes to be ignored by `fmt: skip`.
|
||||
|
||||
# Nodes to ignore are the ones on the same line as the
|
||||
# `# fmt: skip` comment, excluding the `# fmt: skip`
|
||||
# node itself.
|
||||
|
||||
# Traversal process (starting at the `# fmt: skip` node):
|
||||
# 1. Move to the `prev_sibling` of the current node.
|
||||
# 2. If `prev_sibling` has children, go to its rightmost leaf.
|
||||
# 3. If there’s no `prev_sibling`, move up to the parent
|
||||
# node and repeat.
|
||||
# 4. Continue until:
|
||||
# a. You encounter an `INDENT` or `NEWLINE` node (indicates
|
||||
# start of the line).
|
||||
# b. You reach the root node.
|
||||
|
||||
# Include all visited LEAVES in the ignored list, except INDENT
|
||||
# or NEWLINE leaves.
|
||||
|
||||
current_node = prev_sibling
|
||||
ignored_nodes = [current_node]
|
||||
if current_node.prev_sibling is None and current_node.parent is not None:
|
||||
current_node = current_node.parent
|
||||
while "\n" not in current_node.prefix and current_node.prev_sibling is not None:
|
||||
leaf_nodes = list(current_node.prev_sibling.leaves())
|
||||
current_node = leaf_nodes[-1] if leaf_nodes else current_node
|
||||
|
||||
if current_node.type in (token.NEWLINE, token.INDENT):
|
||||
current_node.prefix = ""
|
||||
break
|
||||
|
||||
ignored_nodes.insert(0, current_node)
|
||||
|
||||
if current_node.prev_sibling is None and current_node.parent is not None:
|
||||
current_node = current_node.parent
|
||||
yield from ignored_nodes
|
||||
elif (
|
||||
parent is not None and parent.type == syms.suite and leaf.type == token.NEWLINE
|
||||
):
|
||||
@ -281,7 +377,6 @@ def _generate_ignored_nodes_from_fmt_skip(
|
||||
# statements. The ignored nodes should be previous siblings of the
|
||||
# parent suite node.
|
||||
leaf.prefix = ""
|
||||
ignored_nodes: List[LN] = []
|
||||
parent_sibling = parent.prev_sibling
|
||||
while parent_sibling is not None and parent_sibling.type != syms.suite:
|
||||
ignored_nodes.insert(0, parent_sibling)
|
||||
@ -321,7 +416,7 @@ def children_contains_fmt_on(container: LN) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def contains_pragma_comment(comment_list: List[Leaf]) -> bool:
|
||||
def contains_pragma_comment(comment_list: list[Leaf]) -> bool:
|
||||
"""
|
||||
Returns:
|
||||
True iff one of the comments in @comment_list is a pragma used by one
|
||||
@ -333,3 +428,28 @@ def contains_pragma_comment(comment_list: List[Leaf]) -> bool:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _contains_fmt_skip_comment(comment_line: str, mode: Mode) -> bool:
|
||||
"""
|
||||
Checks if the given comment contains FMT_SKIP alone or paired with other comments.
|
||||
Matching styles:
|
||||
# fmt:skip <-- single comment
|
||||
# noqa:XXX # fmt:skip # a nice line <-- multiple comments (Preview)
|
||||
# pylint:XXX; fmt:skip <-- list of comments (; separated, Preview)
|
||||
"""
|
||||
semantic_comment_blocks = [
|
||||
comment_line,
|
||||
*[
|
||||
_COMMENT_PREFIX + comment.strip()
|
||||
for comment in comment_line.split(_COMMENT_PREFIX)[1:]
|
||||
],
|
||||
*[
|
||||
_COMMENT_PREFIX + comment.strip()
|
||||
for comment in comment_line.strip(_COMMENT_PREFIX).split(
|
||||
_COMMENT_LIST_SEPARATOR
|
||||
)
|
||||
],
|
||||
]
|
||||
|
||||
return any(comment in FMT_SKIP for comment in semantic_comment_blocks)
|
||||
|
@ -9,15 +9,17 @@
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
from collections.abc import Iterable
|
||||
from concurrent.futures import Executor, ProcessPoolExecutor, ThreadPoolExecutor
|
||||
from multiprocessing import Manager
|
||||
from pathlib import Path
|
||||
from typing import Any, Iterable, Optional, Set
|
||||
from typing import Any, Optional
|
||||
|
||||
from mypy_extensions import mypyc_attr
|
||||
|
||||
from black import WriteBack, format_file_in_place
|
||||
from black.cache import Cache, filter_cached, read_cache, write_cache
|
||||
from black.cache import Cache
|
||||
from black.mode import Mode
|
||||
from black.output import err
|
||||
from black.report import Changed, Report
|
||||
@ -37,7 +39,7 @@ def maybe_install_uvloop() -> None:
|
||||
pass
|
||||
|
||||
|
||||
def cancel(tasks: Iterable["asyncio.Task[Any]"]) -> None:
|
||||
def cancel(tasks: Iterable["asyncio.Future[Any]"]) -> None:
|
||||
"""asyncio signal handler that cancels all `tasks` and reports to stderr."""
|
||||
err("Aborted!")
|
||||
for task in tasks:
|
||||
@ -68,7 +70,7 @@ def shutdown(loop: asyncio.AbstractEventLoop) -> None:
|
||||
# not ideal, but this shouldn't cause any issues ... hopefully. ~ichard26
|
||||
@mypyc_attr(patchable=True)
|
||||
def reformat_many(
|
||||
sources: Set[Path],
|
||||
sources: set[Path],
|
||||
fast: bool,
|
||||
write_back: WriteBack,
|
||||
mode: Mode,
|
||||
@ -80,7 +82,8 @@ def reformat_many(
|
||||
|
||||
executor: Executor
|
||||
if workers is None:
|
||||
workers = os.cpu_count() or 1
|
||||
workers = int(os.environ.get("BLACK_NUM_WORKERS", 0))
|
||||
workers = workers or os.cpu_count() or 1
|
||||
if sys.platform == "win32":
|
||||
# Work around https://bugs.python.org/issue26903
|
||||
workers = min(workers, 60)
|
||||
@ -117,7 +120,7 @@ def reformat_many(
|
||||
|
||||
|
||||
async def schedule_formatting(
|
||||
sources: Set[Path],
|
||||
sources: set[Path],
|
||||
fast: bool,
|
||||
write_back: WriteBack,
|
||||
mode: Mode,
|
||||
@ -132,10 +135,9 @@ async def schedule_formatting(
|
||||
`write_back`, `fast`, and `mode` options are passed to
|
||||
:func:`format_file_in_place`.
|
||||
"""
|
||||
cache: Cache = {}
|
||||
cache = Cache.read(mode)
|
||||
if write_back not in (WriteBack.DIFF, WriteBack.COLOR_DIFF):
|
||||
cache = read_cache(mode)
|
||||
sources, cached = filter_cached(cache, sources)
|
||||
sources, cached = cache.filtered_cached(sources)
|
||||
for src in sorted(cached):
|
||||
report.done(src, Changed.CACHED)
|
||||
if not sources:
|
||||
@ -170,8 +172,10 @@ async def schedule_formatting(
|
||||
src = tasks.pop(task)
|
||||
if task.cancelled():
|
||||
cancelled.append(task)
|
||||
elif task.exception():
|
||||
report.failed(src, str(task.exception()))
|
||||
elif exc := task.exception():
|
||||
if report.verbose:
|
||||
traceback.print_exception(type(exc), exc, exc.__traceback__)
|
||||
report.failed(src, str(exc))
|
||||
else:
|
||||
changed = Changed.YES if task.result() else Changed.NO
|
||||
# If the file was written back or was successfully checked as
|
||||
@ -184,4 +188,4 @@ async def schedule_formatting(
|
||||
if cancelled:
|
||||
await asyncio.gather(*cancelled, return_exceptions=True)
|
||||
if sources_to_cache:
|
||||
write_cache(cache, sources_to_cache, mode)
|
||||
cache.write(sources_to_cache)
|
||||
|
@ -1,4 +1,4 @@
|
||||
DEFAULT_LINE_LENGTH = 88
|
||||
DEFAULT_EXCLUDES = r"/(\.direnv|\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|venv|\.svn|\.ipynb_checkpoints|_build|buck-out|build|dist|__pypackages__)/" # noqa: B950
|
||||
DEFAULT_EXCLUDES = r"/(\.direnv|\.eggs|\.git|\.hg|\.ipynb_checkpoints|\.mypy_cache|\.nox|\.pytest_cache|\.ruff_cache|\.tox|\.svn|\.venv|\.vscode|__pypackages__|_build|buck-out|build|dist|venv)/" # noqa: B950
|
||||
DEFAULT_INCLUDES = r"(\.pyi?|\.ipynb)$"
|
||||
STDIN_PLACEHOLDER = "__BLACK_STDIN_FILENAME__"
|
||||
|
@ -1,5 +1,6 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterator, TypeVar, Union
|
||||
from collections.abc import Iterator
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, TypeVar, Union
|
||||
|
||||
from black.nodes import Visitor
|
||||
from black.output import out
|
||||
@ -14,26 +15,33 @@
|
||||
@dataclass
|
||||
class DebugVisitor(Visitor[T]):
|
||||
tree_depth: int = 0
|
||||
list_output: list[str] = field(default_factory=list)
|
||||
print_output: bool = True
|
||||
|
||||
def out(self, message: str, *args: Any, **kwargs: Any) -> None:
|
||||
self.list_output.append(message)
|
||||
if self.print_output:
|
||||
out(message, *args, **kwargs)
|
||||
|
||||
def visit_default(self, node: LN) -> Iterator[T]:
|
||||
indent = " " * (2 * self.tree_depth)
|
||||
if isinstance(node, Node):
|
||||
_type = type_repr(node.type)
|
||||
out(f"{indent}{_type}", fg="yellow")
|
||||
self.out(f"{indent}{_type}", fg="yellow")
|
||||
self.tree_depth += 1
|
||||
for child in node.children:
|
||||
yield from self.visit(child)
|
||||
|
||||
self.tree_depth -= 1
|
||||
out(f"{indent}/{_type}", fg="yellow", bold=False)
|
||||
self.out(f"{indent}/{_type}", fg="yellow", bold=False)
|
||||
else:
|
||||
_type = token.tok_name.get(node.type, str(node.type))
|
||||
out(f"{indent}{_type}", fg="blue", nl=False)
|
||||
self.out(f"{indent}{_type}", fg="blue", nl=False)
|
||||
if node.prefix:
|
||||
# We don't have to handle prefixes for `Node` objects since
|
||||
# that delegates to the first child anyway.
|
||||
out(f" {node.prefix!r}", fg="green", bold=False, nl=False)
|
||||
out(f" {node.value!r}", fg="blue", bold=False)
|
||||
self.out(f" {node.prefix!r}", fg="green", bold=False, nl=False)
|
||||
self.out(f" {node.value!r}", fg="blue", bold=False)
|
||||
|
||||
@classmethod
|
||||
def show(cls, code: Union[str, Leaf, Node]) -> None:
|
||||
|
@ -1,21 +1,11 @@
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
from collections.abc import Iterable, Iterator, Sequence
|
||||
from functools import lru_cache
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Pattern,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Union,
|
||||
)
|
||||
from re import Pattern
|
||||
from typing import TYPE_CHECKING, Any, Optional, Union
|
||||
|
||||
from mypy_extensions import mypyc_attr
|
||||
from packaging.specifiers import InvalidSpecifier, Specifier, SpecifierSet
|
||||
@ -42,12 +32,26 @@
|
||||
import colorama # noqa: F401
|
||||
|
||||
|
||||
@lru_cache()
|
||||
@lru_cache
|
||||
def _load_toml(path: Union[Path, str]) -> dict[str, Any]:
|
||||
with open(path, "rb") as f:
|
||||
return tomllib.load(f)
|
||||
|
||||
|
||||
@lru_cache
|
||||
def _cached_resolve(path: Path) -> Path:
|
||||
return path.resolve()
|
||||
|
||||
|
||||
@lru_cache
|
||||
def find_project_root(
|
||||
srcs: Sequence[str], stdin_filename: Optional[str] = None
|
||||
) -> Tuple[Path, str]:
|
||||
) -> tuple[Path, str]:
|
||||
"""Return a directory containing .git, .hg, or pyproject.toml.
|
||||
|
||||
pyproject.toml files are only considered if they contain a [tool.black]
|
||||
section and are ignored otherwise.
|
||||
|
||||
That directory will be a common parent of all files and directories
|
||||
passed in `srcs`.
|
||||
|
||||
@ -61,9 +65,9 @@ def find_project_root(
|
||||
if stdin_filename is not None:
|
||||
srcs = tuple(stdin_filename if s == "-" else s for s in srcs)
|
||||
if not srcs:
|
||||
srcs = [str(Path.cwd().resolve())]
|
||||
srcs = [str(_cached_resolve(Path.cwd()))]
|
||||
|
||||
path_srcs = [Path(Path.cwd(), src).resolve() for src in srcs]
|
||||
path_srcs = [_cached_resolve(Path(Path.cwd(), src)) for src in srcs]
|
||||
|
||||
# A list of lists of parents for each 'src'. 'src' is included as a
|
||||
# "parent" of itself if it is a directory
|
||||
@ -84,14 +88,18 @@ def find_project_root(
|
||||
return directory, ".hg directory"
|
||||
|
||||
if (directory / "pyproject.toml").is_file():
|
||||
pyproject_toml = _load_toml(directory / "pyproject.toml")
|
||||
if "black" in pyproject_toml.get("tool", {}):
|
||||
return directory, "pyproject.toml"
|
||||
|
||||
return directory, "file system root"
|
||||
|
||||
|
||||
def find_pyproject_toml(path_search_start: Tuple[str, ...]) -> Optional[str]:
|
||||
def find_pyproject_toml(
|
||||
path_search_start: tuple[str, ...], stdin_filename: Optional[str] = None
|
||||
) -> Optional[str]:
|
||||
"""Find the absolute filepath to a pyproject.toml if it exists"""
|
||||
path_project_root, _ = find_project_root(path_search_start)
|
||||
path_project_root, _ = find_project_root(path_search_start, stdin_filename)
|
||||
path_pyproject_toml = path_project_root / "pyproject.toml"
|
||||
if path_pyproject_toml.is_file():
|
||||
return str(path_pyproject_toml)
|
||||
@ -110,14 +118,13 @@ def find_pyproject_toml(path_search_start: Tuple[str, ...]) -> Optional[str]:
|
||||
|
||||
|
||||
@mypyc_attr(patchable=True)
|
||||
def parse_pyproject_toml(path_config: str) -> Dict[str, Any]:
|
||||
def parse_pyproject_toml(path_config: str) -> dict[str, Any]:
|
||||
"""Parse a pyproject toml file, pulling out relevant parts for Black.
|
||||
|
||||
If parsing fails, will raise a tomllib.TOMLDecodeError.
|
||||
"""
|
||||
with open(path_config, "rb") as f:
|
||||
pyproject_toml = tomllib.load(f)
|
||||
config: Dict[str, Any] = pyproject_toml.get("tool", {}).get("black", {})
|
||||
pyproject_toml = _load_toml(path_config)
|
||||
config: dict[str, Any] = pyproject_toml.get("tool", {}).get("black", {})
|
||||
config = {k.replace("--", "").replace("-", "_"): v for k, v in config.items()}
|
||||
|
||||
if "target_version" not in config:
|
||||
@ -129,8 +136,8 @@ def parse_pyproject_toml(path_config: str) -> Dict[str, Any]:
|
||||
|
||||
|
||||
def infer_target_version(
|
||||
pyproject_toml: Dict[str, Any]
|
||||
) -> Optional[List[TargetVersion]]:
|
||||
pyproject_toml: dict[str, Any],
|
||||
) -> Optional[list[TargetVersion]]:
|
||||
"""Infer Black's target version from the project metadata in pyproject.toml.
|
||||
|
||||
Supports the PyPA standard format (PEP 621):
|
||||
@ -153,7 +160,7 @@ def infer_target_version(
|
||||
return None
|
||||
|
||||
|
||||
def parse_req_python_version(requires_python: str) -> Optional[List[TargetVersion]]:
|
||||
def parse_req_python_version(requires_python: str) -> Optional[list[TargetVersion]]:
|
||||
"""Parse a version string (i.e. ``"3.7"``) to a list of TargetVersion.
|
||||
|
||||
If parsing fails, will raise a packaging.version.InvalidVersion error.
|
||||
@ -168,7 +175,7 @@ def parse_req_python_version(requires_python: str) -> Optional[List[TargetVersio
|
||||
return None
|
||||
|
||||
|
||||
def parse_req_python_specifier(requires_python: str) -> Optional[List[TargetVersion]]:
|
||||
def parse_req_python_specifier(requires_python: str) -> Optional[list[TargetVersion]]:
|
||||
"""Parse a specifier string (i.e. ``">=3.7,<3.10"``) to a list of TargetVersion.
|
||||
|
||||
If parsing fails, will raise a packaging.specifiers.InvalidSpecifier error.
|
||||
@ -179,7 +186,7 @@ def parse_req_python_specifier(requires_python: str) -> Optional[List[TargetVers
|
||||
return None
|
||||
|
||||
target_version_map = {f"3.{v.value}": v for v in TargetVersion}
|
||||
compatible_versions: List[str] = list(specifier_set.filter(target_version_map))
|
||||
compatible_versions: list[str] = list(specifier_set.filter(target_version_map))
|
||||
if compatible_versions:
|
||||
return [target_version_map[v] for v in compatible_versions]
|
||||
return None
|
||||
@ -210,7 +217,7 @@ def strip_specifier_set(specifier_set: SpecifierSet) -> SpecifierSet:
|
||||
return SpecifierSet(",".join(str(s) for s in specifiers))
|
||||
|
||||
|
||||
@lru_cache()
|
||||
@lru_cache
|
||||
def find_user_pyproject_toml() -> Path:
|
||||
r"""Return the path to the top-level user configuration for black.
|
||||
|
||||
@ -227,14 +234,14 @@ def find_user_pyproject_toml() -> Path:
|
||||
else:
|
||||
config_root = os.environ.get("XDG_CONFIG_HOME", "~/.config")
|
||||
user_config_path = Path(config_root).expanduser() / "black"
|
||||
return user_config_path.resolve()
|
||||
return _cached_resolve(user_config_path)
|
||||
|
||||
|
||||
@lru_cache()
|
||||
@lru_cache
|
||||
def get_gitignore(root: Path) -> PathSpec:
|
||||
"""Return a PathSpec matching gitignore content if present."""
|
||||
gitignore = root / ".gitignore"
|
||||
lines: List[str] = []
|
||||
lines: list[str] = []
|
||||
if gitignore.is_file():
|
||||
with gitignore.open(encoding="utf-8") as gf:
|
||||
lines = gf.readlines()
|
||||
@ -245,44 +252,59 @@ def get_gitignore(root: Path) -> PathSpec:
|
||||
raise
|
||||
|
||||
|
||||
def normalize_path_maybe_ignore(
|
||||
def resolves_outside_root_or_cannot_stat(
|
||||
path: Path,
|
||||
root: Path,
|
||||
report: Optional[Report] = None,
|
||||
) -> Optional[str]:
|
||||
"""Normalize `path`. May return `None` if `path` was ignored.
|
||||
|
||||
`report` is where "path ignored" output goes.
|
||||
) -> bool:
|
||||
"""
|
||||
Returns whether the path is a symbolic link that points outside the
|
||||
root directory. Also returns True if we failed to resolve the path.
|
||||
"""
|
||||
try:
|
||||
abspath = path if path.is_absolute() else Path.cwd() / path
|
||||
normalized_path = abspath.resolve()
|
||||
try:
|
||||
root_relative_path = normalized_path.relative_to(root).as_posix()
|
||||
except ValueError:
|
||||
if report:
|
||||
report.path_ignored(
|
||||
path, f"is a symbolic link that points outside {root}"
|
||||
)
|
||||
return None
|
||||
|
||||
resolved_path = _cached_resolve(path)
|
||||
except OSError as e:
|
||||
if report:
|
||||
report.path_ignored(path, f"cannot be read because {e}")
|
||||
return None
|
||||
|
||||
return root_relative_path
|
||||
return True
|
||||
try:
|
||||
resolved_path.relative_to(root)
|
||||
except ValueError:
|
||||
if report:
|
||||
report.path_ignored(path, f"is a symbolic link that points outside {root}")
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def path_is_ignored(
|
||||
path: Path, gitignore_dict: Dict[Path, PathSpec], report: Report
|
||||
def best_effort_relative_path(path: Path, root: Path) -> Path:
|
||||
# Precondition: resolves_outside_root_or_cannot_stat(path, root) is False
|
||||
try:
|
||||
return path.absolute().relative_to(root)
|
||||
except ValueError:
|
||||
pass
|
||||
root_parent = next((p for p in path.parents if _cached_resolve(p) == root), None)
|
||||
if root_parent is not None:
|
||||
return path.relative_to(root_parent)
|
||||
# something adversarial, fallback to path guaranteed by precondition
|
||||
return _cached_resolve(path).relative_to(root)
|
||||
|
||||
|
||||
def _path_is_ignored(
|
||||
root_relative_path: str,
|
||||
root: Path,
|
||||
gitignore_dict: dict[Path, PathSpec],
|
||||
) -> bool:
|
||||
path = root / root_relative_path
|
||||
# Note that this logic is sensitive to the ordering of gitignore_dict. Callers must
|
||||
# ensure that gitignore_dict is ordered from least specific to most specific.
|
||||
for gitignore_path, pattern in gitignore_dict.items():
|
||||
relative_path = normalize_path_maybe_ignore(path, gitignore_path, report)
|
||||
if relative_path is None:
|
||||
try:
|
||||
relative_path = path.relative_to(gitignore_path).as_posix()
|
||||
if path.is_dir():
|
||||
relative_path = relative_path + "/"
|
||||
except ValueError:
|
||||
break
|
||||
if pattern.match_file(relative_path):
|
||||
report.path_ignored(path, "matches a .gitignore file content")
|
||||
return True
|
||||
return False
|
||||
|
||||
@ -303,7 +325,7 @@ def gen_python_files(
|
||||
extend_exclude: Optional[Pattern[str]],
|
||||
force_exclude: Optional[Pattern[str]],
|
||||
report: Report,
|
||||
gitignore_dict: Optional[Dict[Path, PathSpec]],
|
||||
gitignore_dict: Optional[dict[Path, PathSpec]],
|
||||
*,
|
||||
verbose: bool,
|
||||
quiet: bool,
|
||||
@ -319,33 +341,38 @@ def gen_python_files(
|
||||
|
||||
assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
|
||||
for child in paths:
|
||||
normalized_path = normalize_path_maybe_ignore(child, root, report)
|
||||
if normalized_path is None:
|
||||
continue
|
||||
assert child.is_absolute()
|
||||
root_relative_path = child.relative_to(root).as_posix()
|
||||
|
||||
# First ignore files matching .gitignore, if passed
|
||||
if gitignore_dict and path_is_ignored(child, gitignore_dict, report):
|
||||
if gitignore_dict and _path_is_ignored(
|
||||
root_relative_path, root, gitignore_dict
|
||||
):
|
||||
report.path_ignored(child, "matches a .gitignore file content")
|
||||
continue
|
||||
|
||||
# Then ignore with `--exclude` `--extend-exclude` and `--force-exclude` options.
|
||||
normalized_path = "/" + normalized_path
|
||||
root_relative_path = "/" + root_relative_path
|
||||
if child.is_dir():
|
||||
normalized_path += "/"
|
||||
root_relative_path += "/"
|
||||
|
||||
if path_is_excluded(normalized_path, exclude):
|
||||
if path_is_excluded(root_relative_path, exclude):
|
||||
report.path_ignored(child, "matches the --exclude regular expression")
|
||||
continue
|
||||
|
||||
if path_is_excluded(normalized_path, extend_exclude):
|
||||
if path_is_excluded(root_relative_path, extend_exclude):
|
||||
report.path_ignored(
|
||||
child, "matches the --extend-exclude regular expression"
|
||||
)
|
||||
continue
|
||||
|
||||
if path_is_excluded(normalized_path, force_exclude):
|
||||
if path_is_excluded(root_relative_path, force_exclude):
|
||||
report.path_ignored(child, "matches the --force-exclude regular expression")
|
||||
continue
|
||||
|
||||
if resolves_outside_root_or_cannot_stat(child, root, report):
|
||||
continue
|
||||
|
||||
if child.is_dir():
|
||||
# If gitignore is None, gitignore usage is disabled, while a Falsey
|
||||
# gitignore is when the directory doesn't have a .gitignore file.
|
||||
@ -371,10 +398,10 @@ def gen_python_files(
|
||||
|
||||
elif child.is_file():
|
||||
if child.suffix == ".ipynb" and not jupyter_dependencies_are_installed(
|
||||
verbose=verbose, quiet=quiet
|
||||
warn=verbose or not quiet
|
||||
):
|
||||
continue
|
||||
include_match = include.search(normalized_path) if include else True
|
||||
include_match = include.search(root_relative_path) if include else True
|
||||
if include_match:
|
||||
yield child
|
||||
|
||||
|
@ -3,29 +3,29 @@
|
||||
import ast
|
||||
import collections
|
||||
import dataclasses
|
||||
import re
|
||||
import secrets
|
||||
import sys
|
||||
from functools import lru_cache
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
from importlib.util import find_spec
|
||||
from typing import Optional
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeGuard
|
||||
else:
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from black.mode import Mode
|
||||
from black.output import out
|
||||
from black.report import NothingChanged
|
||||
|
||||
TRANSFORMED_MAGICS = frozenset(
|
||||
(
|
||||
TRANSFORMED_MAGICS = frozenset((
|
||||
"get_ipython().run_cell_magic",
|
||||
"get_ipython().system",
|
||||
"get_ipython().getoutput",
|
||||
"get_ipython().run_line_magic",
|
||||
)
|
||||
)
|
||||
TOKENS_TO_IGNORE = frozenset(
|
||||
(
|
||||
))
|
||||
TOKENS_TO_IGNORE = frozenset((
|
||||
"ENDMARKER",
|
||||
"NL",
|
||||
"NEWLINE",
|
||||
@ -33,10 +33,8 @@
|
||||
"DEDENT",
|
||||
"UNIMPORTANT_WS",
|
||||
"ESCAPED_NL",
|
||||
)
|
||||
)
|
||||
PYTHON_CELL_MAGICS = frozenset(
|
||||
(
|
||||
))
|
||||
PYTHON_CELL_MAGICS = frozenset((
|
||||
"capture",
|
||||
"prun",
|
||||
"pypy",
|
||||
@ -44,9 +42,7 @@
|
||||
"python3",
|
||||
"time",
|
||||
"timeit",
|
||||
)
|
||||
)
|
||||
TOKEN_HEX = secrets.token_hex
|
||||
))
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
@ -55,24 +51,49 @@ class Replacement:
|
||||
src: str
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def jupyter_dependencies_are_installed(*, verbose: bool, quiet: bool) -> bool:
|
||||
try:
|
||||
import IPython # noqa:F401
|
||||
import tokenize_rt # noqa:F401
|
||||
except ModuleNotFoundError:
|
||||
if verbose or not quiet:
|
||||
@lru_cache
|
||||
def jupyter_dependencies_are_installed(*, warn: bool) -> bool:
|
||||
installed = (
|
||||
find_spec("tokenize_rt") is not None and find_spec("IPython") is not None
|
||||
)
|
||||
if not installed and warn:
|
||||
msg = (
|
||||
"Skipping .ipynb files as Jupyter dependencies are not installed.\n"
|
||||
'You can fix this by running ``pip install "black[jupyter]"``'
|
||||
)
|
||||
out(msg)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
return installed
|
||||
|
||||
|
||||
def remove_trailing_semicolon(src: str) -> Tuple[str, bool]:
|
||||
def validate_cell(src: str, mode: Mode) -> None:
|
||||
"""Check that cell does not already contain TransformerManager transformations,
|
||||
or non-Python cell magics, which might cause tokenizer_rt to break because of
|
||||
indentations.
|
||||
|
||||
If a cell contains ``!ls``, then it'll be transformed to
|
||||
``get_ipython().system('ls')``. However, if the cell originally contained
|
||||
``get_ipython().system('ls')``, then it would get transformed in the same way:
|
||||
|
||||
>>> TransformerManager().transform_cell("get_ipython().system('ls')")
|
||||
"get_ipython().system('ls')\n"
|
||||
>>> TransformerManager().transform_cell("!ls")
|
||||
"get_ipython().system('ls')\n"
|
||||
|
||||
Due to the impossibility of safely roundtripping in such situations, cells
|
||||
containing transformed magics will be ignored.
|
||||
"""
|
||||
if any(transformed_magic in src for transformed_magic in TRANSFORMED_MAGICS):
|
||||
raise NothingChanged
|
||||
|
||||
line = _get_code_start(src)
|
||||
if line.startswith("%%") and (
|
||||
line.split(maxsplit=1)[0][2:]
|
||||
not in PYTHON_CELL_MAGICS | mode.python_cell_magics
|
||||
):
|
||||
raise NothingChanged
|
||||
|
||||
|
||||
def remove_trailing_semicolon(src: str) -> tuple[str, bool]:
|
||||
"""Remove trailing semicolon from Jupyter notebook cell.
|
||||
|
||||
For example,
|
||||
@ -128,7 +149,7 @@ def put_trailing_semicolon_back(src: str, has_trailing_semicolon: bool) -> str:
|
||||
return str(tokens_to_src(tokens))
|
||||
|
||||
|
||||
def mask_cell(src: str) -> Tuple[str, List[Replacement]]:
|
||||
def mask_cell(src: str) -> tuple[str, list[Replacement]]:
|
||||
"""Mask IPython magics so content becomes parseable Python code.
|
||||
|
||||
For example,
|
||||
@ -138,12 +159,12 @@ def mask_cell(src: str) -> Tuple[str, List[Replacement]]:
|
||||
|
||||
becomes
|
||||
|
||||
"25716f358c32750e"
|
||||
b"25716f358c32750"
|
||||
'foo'
|
||||
|
||||
The replacements are returned, along with the transformed code.
|
||||
"""
|
||||
replacements: List[Replacement] = []
|
||||
replacements: list[Replacement] = []
|
||||
try:
|
||||
ast.parse(src)
|
||||
except SyntaxError:
|
||||
@ -156,18 +177,32 @@ def mask_cell(src: str) -> Tuple[str, List[Replacement]]:
|
||||
from IPython.core.inputtransformer2 import TransformerManager
|
||||
|
||||
transformer_manager = TransformerManager()
|
||||
# A side effect of the following transformation is that it also removes any
|
||||
# empty lines at the beginning of the cell.
|
||||
transformed = transformer_manager.transform_cell(src)
|
||||
transformed, cell_magic_replacements = replace_cell_magics(transformed)
|
||||
replacements += cell_magic_replacements
|
||||
transformed = transformer_manager.transform_cell(transformed)
|
||||
transformed, magic_replacements = replace_magics(transformed)
|
||||
if len(transformed.splitlines()) != len(src.splitlines()):
|
||||
if len(transformed.strip().splitlines()) != len(src.strip().splitlines()):
|
||||
# Multi-line magic, not supported.
|
||||
raise NothingChanged
|
||||
replacements += magic_replacements
|
||||
return transformed, replacements
|
||||
|
||||
|
||||
def create_token(n_chars: int) -> str:
|
||||
"""Create a randomly generated token that is n_chars characters long."""
|
||||
assert n_chars > 0
|
||||
n_bytes = max(n_chars // 2 - 1, 1)
|
||||
token = secrets.token_hex(n_bytes)
|
||||
if len(token) + 3 > n_chars:
|
||||
token = token[:-1]
|
||||
# We use a bytestring so that the string does not get interpreted
|
||||
# as a docstring.
|
||||
return f'b"{token}"'
|
||||
|
||||
|
||||
def get_token(src: str, magic: str) -> str:
|
||||
"""Return randomly generated token to mask IPython magic with.
|
||||
|
||||
@ -177,11 +212,11 @@ def get_token(src: str, magic: str) -> str:
|
||||
not already present anywhere else in the cell.
|
||||
"""
|
||||
assert magic
|
||||
nbytes = max(len(magic) // 2 - 1, 1)
|
||||
token = TOKEN_HEX(nbytes)
|
||||
n_chars = len(magic)
|
||||
token = create_token(n_chars)
|
||||
counter = 0
|
||||
while token in src:
|
||||
token = TOKEN_HEX(nbytes)
|
||||
token = create_token(n_chars)
|
||||
counter += 1
|
||||
if counter > 100:
|
||||
raise AssertionError(
|
||||
@ -189,12 +224,10 @@ def get_token(src: str, magic: str) -> str:
|
||||
"Please report a bug on https://github.com/psf/black/issues. "
|
||||
f"The magic might be helpful: {magic}"
|
||||
) from None
|
||||
if len(token) + 2 < len(magic):
|
||||
token = f"{token}."
|
||||
return f'"{token}"'
|
||||
return token
|
||||
|
||||
|
||||
def replace_cell_magics(src: str) -> Tuple[str, List[Replacement]]:
|
||||
def replace_cell_magics(src: str) -> tuple[str, list[Replacement]]:
|
||||
"""Replace cell magic with token.
|
||||
|
||||
Note that 'src' will already have been processed by IPython's
|
||||
@ -211,7 +244,7 @@ def replace_cell_magics(src: str) -> Tuple[str, List[Replacement]]:
|
||||
|
||||
The replacement, along with the transformed code, is returned.
|
||||
"""
|
||||
replacements: List[Replacement] = []
|
||||
replacements: list[Replacement] = []
|
||||
|
||||
tree = ast.parse(src)
|
||||
|
||||
@ -225,7 +258,7 @@ def replace_cell_magics(src: str) -> Tuple[str, List[Replacement]]:
|
||||
return f"{mask}\n{cell_magic_finder.cell_magic.body}", replacements
|
||||
|
||||
|
||||
def replace_magics(src: str) -> Tuple[str, List[Replacement]]:
|
||||
def replace_magics(src: str) -> tuple[str, list[Replacement]]:
|
||||
"""Replace magics within body of cell.
|
||||
|
||||
Note that 'src' will already have been processed by IPython's
|
||||
@ -247,7 +280,7 @@ def replace_magics(src: str) -> Tuple[str, List[Replacement]]:
|
||||
magic_finder = MagicFinder()
|
||||
magic_finder.visit(ast.parse(src))
|
||||
new_srcs = []
|
||||
for i, line in enumerate(src.splitlines(), start=1):
|
||||
for i, line in enumerate(src.split("\n"), start=1):
|
||||
if i in magic_finder.magics:
|
||||
offsets_and_magics = magic_finder.magics[i]
|
||||
if len(offsets_and_magics) != 1: # pragma: nocover
|
||||
@ -266,7 +299,7 @@ def replace_magics(src: str) -> Tuple[str, List[Replacement]]:
|
||||
return "\n".join(new_srcs), replacements
|
||||
|
||||
|
||||
def unmask_cell(src: str, replacements: List[Replacement]) -> str:
|
||||
def unmask_cell(src: str, replacements: list[Replacement]) -> str:
|
||||
"""Remove replacements from cell.
|
||||
|
||||
For example
|
||||
@ -284,6 +317,21 @@ def unmask_cell(src: str, replacements: List[Replacement]) -> str:
|
||||
return src
|
||||
|
||||
|
||||
def _get_code_start(src: str) -> str:
|
||||
"""Provides the first line where the code starts.
|
||||
|
||||
Iterates over lines of code until it finds the first line that doesn't
|
||||
contain only empty spaces and comments. It removes any empty spaces at the
|
||||
start of the line and returns it. If such line doesn't exist, it returns an
|
||||
empty string.
|
||||
"""
|
||||
for match in re.finditer(".+", src):
|
||||
line = match.group(0).lstrip()
|
||||
if line and not line.startswith("#"):
|
||||
return line
|
||||
return ""
|
||||
|
||||
|
||||
def _is_ipython_magic(node: ast.expr) -> TypeGuard[ast.Attribute]:
|
||||
"""Check if attribute is IPython magic.
|
||||
|
||||
@ -299,11 +347,11 @@ def _is_ipython_magic(node: ast.expr) -> TypeGuard[ast.Attribute]:
|
||||
)
|
||||
|
||||
|
||||
def _get_str_args(args: List[ast.expr]) -> List[str]:
|
||||
def _get_str_args(args: list[ast.expr]) -> list[str]:
|
||||
str_args = []
|
||||
for arg in args:
|
||||
assert isinstance(arg, ast.Str)
|
||||
str_args.append(arg.s)
|
||||
assert isinstance(arg, ast.Constant) and isinstance(arg.value, str)
|
||||
str_args.append(arg.value)
|
||||
return str_args
|
||||
|
||||
|
||||
@ -330,7 +378,8 @@ class CellMagicFinder(ast.NodeVisitor):
|
||||
|
||||
For example,
|
||||
|
||||
%%time\nfoo()
|
||||
%%time\n
|
||||
foo()
|
||||
|
||||
would have been transformed to
|
||||
|
||||
@ -382,7 +431,7 @@ class MagicFinder(ast.NodeVisitor):
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.magics: Dict[int, List[OffsetAndMagic]] = collections.defaultdict(list)
|
||||
self.magics: dict[int, list[OffsetAndMagic]] = collections.defaultdict(list)
|
||||
|
||||
def visit_Assign(self, node: ast.Assign) -> None:
|
||||
"""Look for system assign magics.
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,19 +1,8 @@
|
||||
import itertools
|
||||
import math
|
||||
import sys
|
||||
from collections.abc import Callable, Iterator, Sequence
|
||||
from dataclasses import dataclass, field
|
||||
from typing import (
|
||||
Callable,
|
||||
Dict,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
from typing import Optional, TypeVar, Union, cast
|
||||
|
||||
from black.brackets import COMMA_PRIORITY, DOT_PRIORITY, BracketTracker
|
||||
from black.mode import Mode, Preview
|
||||
@ -24,11 +13,14 @@
|
||||
STANDALONE_COMMENT,
|
||||
TEST_DESCENDANTS,
|
||||
child_towards,
|
||||
is_docstring,
|
||||
is_import,
|
||||
is_multiline_string,
|
||||
is_one_sequence_between,
|
||||
is_type_comment,
|
||||
is_type_ignore_comment,
|
||||
is_with_or_async_with_stmt,
|
||||
make_simple_prefix,
|
||||
replace_child,
|
||||
syms,
|
||||
whitespace,
|
||||
@ -48,11 +40,11 @@
|
||||
class Line:
|
||||
"""Holds leaves and comments. Can be printed with `str(line)`."""
|
||||
|
||||
mode: Mode
|
||||
mode: Mode = field(repr=False)
|
||||
depth: int = 0
|
||||
leaves: List[Leaf] = field(default_factory=list)
|
||||
leaves: list[Leaf] = field(default_factory=list)
|
||||
# keys ordered like `leaves`
|
||||
comments: Dict[LeafID, List[Leaf]] = field(default_factory=dict)
|
||||
comments: dict[LeafID, list[Leaf]] = field(default_factory=dict)
|
||||
bracket_tracker: BracketTracker = field(default_factory=BracketTracker)
|
||||
inside_brackets: bool = False
|
||||
should_split_rhs: bool = False
|
||||
@ -70,7 +62,12 @@ def append(
|
||||
|
||||
Inline comments are put aside.
|
||||
"""
|
||||
has_value = leaf.type in BRACKETS or bool(leaf.value.strip())
|
||||
has_value = (
|
||||
leaf.type in BRACKETS
|
||||
# empty fstring-middles must not be truncated
|
||||
or leaf.type == token.FSTRING_MIDDLE
|
||||
or bool(leaf.value.strip())
|
||||
)
|
||||
if not has_value:
|
||||
return
|
||||
|
||||
@ -80,14 +77,16 @@ def append(
|
||||
# Note: at this point leaf.prefix should be empty except for
|
||||
# imports, for which we only preserve newlines.
|
||||
leaf.prefix += whitespace(
|
||||
leaf, complex_subscript=self.is_complex_subscript(leaf)
|
||||
leaf,
|
||||
complex_subscript=self.is_complex_subscript(leaf),
|
||||
mode=self.mode,
|
||||
)
|
||||
if self.inside_brackets or not preformatted or track_bracket:
|
||||
self.bracket_tracker.mark(leaf)
|
||||
if self.mode.magic_trailing_comma:
|
||||
if self.has_magic_trailing_comma(leaf):
|
||||
self.magic_trailing_comma = leaf
|
||||
elif self.has_magic_trailing_comma(leaf, ensure_removable=True):
|
||||
elif self.has_magic_trailing_comma(leaf):
|
||||
self.remove_trailing_comma()
|
||||
if not self.append_comment(leaf):
|
||||
self.leaves.append(leaf)
|
||||
@ -98,7 +97,10 @@ def append_safe(self, leaf: Leaf, preformatted: bool = False) -> None:
|
||||
Raises ValueError when any `leaf` is appended after a standalone comment
|
||||
or when a standalone comment is not the first leaf on the line.
|
||||
"""
|
||||
if self.bracket_tracker.depth == 0:
|
||||
if (
|
||||
self.bracket_tracker.depth == 0
|
||||
or self.bracket_tracker.any_open_for_or_lambda()
|
||||
):
|
||||
if self.is_comment:
|
||||
raise ValueError("cannot append to standalone comments")
|
||||
|
||||
@ -164,6 +166,13 @@ def is_def(self) -> bool:
|
||||
and second_leaf.value == "def"
|
||||
)
|
||||
|
||||
@property
|
||||
def is_stub_def(self) -> bool:
|
||||
"""Is this line a function definition with a body consisting only of "..."?"""
|
||||
return self.is_def and self.leaves[-4:] == [Leaf(token.COLON, ":")] + [
|
||||
Leaf(token.DOT, ".") for _ in range(3)
|
||||
]
|
||||
|
||||
@property
|
||||
def is_class_paren_empty(self) -> bool:
|
||||
"""Is this a class with no base classes but using parentheses?
|
||||
@ -181,13 +190,26 @@ def is_class_paren_empty(self) -> bool:
|
||||
)
|
||||
|
||||
@property
|
||||
def is_triple_quoted_string(self) -> bool:
|
||||
def _is_triple_quoted_string(self) -> bool:
|
||||
"""Is the line a triple quoted string?"""
|
||||
return (
|
||||
bool(self)
|
||||
and self.leaves[0].type == token.STRING
|
||||
and self.leaves[0].value.startswith(('"""', "'''"))
|
||||
)
|
||||
if not self or self.leaves[0].type != token.STRING:
|
||||
return False
|
||||
value = self.leaves[0].value
|
||||
if value.startswith(('"""', "'''")):
|
||||
return True
|
||||
if value.startswith(("r'''", 'r"""', "R'''", 'R"""')):
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_docstring(self) -> bool:
|
||||
"""Is the line a docstring?"""
|
||||
return bool(self) and is_docstring(self.leaves[0])
|
||||
|
||||
@property
|
||||
def is_chained_assignment(self) -> bool:
|
||||
"""Is the line a chained assignment"""
|
||||
return [leaf.type for leaf in self.leaves].count(token.EQUAL) > 1
|
||||
|
||||
@property
|
||||
def opens_block(self) -> bool:
|
||||
@ -216,14 +238,29 @@ def is_fmt_pass_converted(
|
||||
leaf.fmt_pass_converted_first_leaf
|
||||
)
|
||||
|
||||
def contains_standalone_comments(self, depth_limit: int = sys.maxsize) -> bool:
|
||||
def contains_standalone_comments(self) -> bool:
|
||||
"""If so, needs to be split before emitting."""
|
||||
for leaf in self.leaves:
|
||||
if leaf.type == STANDALONE_COMMENT and leaf.bracket_depth <= depth_limit:
|
||||
if leaf.type == STANDALONE_COMMENT:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def contains_implicit_multiline_string_with_comments(self) -> bool:
|
||||
"""Chck if we have an implicit multiline string with comments on the line"""
|
||||
for leaf_type, leaf_group_iterator in itertools.groupby(
|
||||
self.leaves, lambda leaf: leaf.type
|
||||
):
|
||||
if leaf_type != token.STRING:
|
||||
continue
|
||||
leaf_list = list(leaf_group_iterator)
|
||||
if len(leaf_list) == 1:
|
||||
continue
|
||||
for leaf in leaf_list:
|
||||
if self.comments_after(leaf):
|
||||
return True
|
||||
return False
|
||||
|
||||
def contains_uncollapsable_type_comments(self) -> bool:
|
||||
ignored_ids = set()
|
||||
try:
|
||||
@ -251,7 +288,7 @@ def contains_uncollapsable_type_comments(self) -> bool:
|
||||
for comment in comments:
|
||||
if is_type_comment(comment):
|
||||
if comment_seen or (
|
||||
not is_type_comment(comment, " ignore")
|
||||
not is_type_ignore_comment(comment)
|
||||
and leaf_id not in ignored_ids
|
||||
):
|
||||
return True
|
||||
@ -288,7 +325,7 @@ def contains_unsplittable_type_ignore(self) -> bool:
|
||||
# line.
|
||||
for node in self.leaves[-2:]:
|
||||
for comment in self.comments.get(id(node), []):
|
||||
if is_type_comment(comment, " ignore"):
|
||||
if is_type_ignore_comment(comment):
|
||||
return True
|
||||
|
||||
return False
|
||||
@ -296,16 +333,11 @@ def contains_unsplittable_type_ignore(self) -> bool:
|
||||
def contains_multiline_strings(self) -> bool:
|
||||
return any(is_multiline_string(leaf) for leaf in self.leaves)
|
||||
|
||||
def has_magic_trailing_comma(
|
||||
self, closing: Leaf, ensure_removable: bool = False
|
||||
) -> bool:
|
||||
def has_magic_trailing_comma(self, closing: Leaf) -> bool:
|
||||
"""Return True if we have a magic trailing comma, that is when:
|
||||
- there's a trailing comma here
|
||||
- it's not from single-element square bracket indexing
|
||||
- it's not a one-tuple
|
||||
- it's not a single-element subscript
|
||||
Additionally, if ensure_removable:
|
||||
- it's not from square bracket indexing
|
||||
(specifically, single-element square bracket indexing)
|
||||
"""
|
||||
if not (
|
||||
closing.type in CLOSING_BRACKETS
|
||||
@ -319,9 +351,9 @@ def has_magic_trailing_comma(
|
||||
|
||||
if closing.type == token.RSQB:
|
||||
if (
|
||||
closing.parent
|
||||
closing.parent is not None
|
||||
and closing.parent.type == syms.trailer
|
||||
and closing.opening_bracket
|
||||
and closing.opening_bracket is not None
|
||||
and is_one_sequence_between(
|
||||
closing.opening_bracket,
|
||||
closing,
|
||||
@ -329,25 +361,12 @@ def has_magic_trailing_comma(
|
||||
brackets=(token.LSQB, token.RSQB),
|
||||
)
|
||||
):
|
||||
assert closing.prev_sibling is not None
|
||||
assert closing.prev_sibling.type == syms.subscriptlist
|
||||
return False
|
||||
|
||||
if not ensure_removable:
|
||||
return True
|
||||
|
||||
comma = self.leaves[-1]
|
||||
if comma.parent is None:
|
||||
return False
|
||||
return (
|
||||
comma.parent.type != syms.subscriptlist
|
||||
or closing.opening_bracket is None
|
||||
or not is_one_sequence_between(
|
||||
closing.opening_bracket,
|
||||
closing,
|
||||
self.leaves,
|
||||
brackets=(token.LSQB, token.RSQB),
|
||||
)
|
||||
)
|
||||
|
||||
if self.is_import:
|
||||
return True
|
||||
|
||||
@ -395,7 +414,7 @@ def append_comment(self, comment: Leaf) -> bool:
|
||||
self.comments.setdefault(id(last_leaf), []).append(comment)
|
||||
return True
|
||||
|
||||
def comments_after(self, leaf: Leaf) -> List[Leaf]:
|
||||
def comments_after(self, leaf: Leaf) -> list[Leaf]:
|
||||
"""Generate comments that should appear directly after `leaf`."""
|
||||
return self.comments.get(id(leaf), [])
|
||||
|
||||
@ -421,20 +440,21 @@ def is_complex_subscript(self, leaf: Leaf) -> bool:
|
||||
|
||||
if subscript_start.type == syms.subscriptlist:
|
||||
subscript_start = child_towards(subscript_start, leaf)
|
||||
|
||||
return subscript_start is not None and any(
|
||||
n.type in TEST_DESCENDANTS for n in subscript_start.pre_order()
|
||||
)
|
||||
|
||||
def enumerate_with_length(
|
||||
self, reversed: bool = False
|
||||
) -> Iterator[Tuple[Index, Leaf, int]]:
|
||||
self, is_reversed: bool = False
|
||||
) -> Iterator[tuple[Index, Leaf, int]]:
|
||||
"""Return an enumeration of leaves with their length.
|
||||
|
||||
Stops prematurely on multiline strings and standalone comments.
|
||||
"""
|
||||
op = cast(
|
||||
Callable[[Sequence[Leaf]], Iterator[Tuple[Index, Leaf]]],
|
||||
enumerate_reversed if reversed else enumerate,
|
||||
Callable[[Sequence[Leaf]], Iterator[tuple[Index, Leaf]]],
|
||||
enumerate_reversed if is_reversed else enumerate,
|
||||
)
|
||||
for index, leaf in op(self.leaves):
|
||||
length = len(leaf.prefix) + len(leaf.value)
|
||||
@ -499,14 +519,14 @@ class LinesBlock:
|
||||
previous_block: Optional["LinesBlock"]
|
||||
original_line: Line
|
||||
before: int = 0
|
||||
content_lines: List[str] = field(default_factory=list)
|
||||
content_lines: list[str] = field(default_factory=list)
|
||||
after: int = 0
|
||||
form_feed: bool = False
|
||||
|
||||
def all_lines(self) -> List[str]:
|
||||
def all_lines(self) -> list[str]:
|
||||
empty_line = str(Line(mode=self.mode))
|
||||
return (
|
||||
[empty_line * self.before] + self.content_lines + [empty_line * self.after]
|
||||
)
|
||||
prefix = make_simple_prefix(self.before, self.form_feed, empty_line)
|
||||
return [prefix] + self.content_lines + [empty_line * self.after]
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -522,7 +542,7 @@ class EmptyLineTracker:
|
||||
mode: Mode
|
||||
previous_line: Optional[Line] = None
|
||||
previous_block: Optional[LinesBlock] = None
|
||||
previous_defs: List[Line] = field(default_factory=list)
|
||||
previous_defs: list[Line] = field(default_factory=list)
|
||||
semantic_leading_comment: Optional[LinesBlock] = None
|
||||
|
||||
def maybe_empty_lines(self, current_line: Line) -> LinesBlock:
|
||||
@ -531,21 +551,31 @@ def maybe_empty_lines(self, current_line: Line) -> LinesBlock:
|
||||
This is for separating `def`, `async def` and `class` with extra empty
|
||||
lines (two on module-level).
|
||||
"""
|
||||
form_feed = (
|
||||
current_line.depth == 0
|
||||
and bool(current_line.leaves)
|
||||
and "\f\n" in current_line.leaves[0].prefix
|
||||
)
|
||||
before, after = self._maybe_empty_lines(current_line)
|
||||
previous_after = self.previous_block.after if self.previous_block else 0
|
||||
before = (
|
||||
# Black should not insert empty lines at the beginning
|
||||
# of the file
|
||||
0
|
||||
if self.previous_line is None
|
||||
else before - previous_after
|
||||
)
|
||||
before = max(0, before - previous_after)
|
||||
if (
|
||||
# Always have one empty line after a module docstring
|
||||
self.previous_block
|
||||
and self.previous_block.previous_block is None
|
||||
and len(self.previous_block.original_line.leaves) == 1
|
||||
and self.previous_block.original_line.is_docstring
|
||||
and not (current_line.is_class or current_line.is_def)
|
||||
):
|
||||
before = 1
|
||||
|
||||
block = LinesBlock(
|
||||
mode=self.mode,
|
||||
previous_block=self.previous_block,
|
||||
original_line=current_line,
|
||||
before=before,
|
||||
after=after,
|
||||
form_feed=form_feed,
|
||||
)
|
||||
|
||||
# Maintain the semantic_leading_comment state.
|
||||
@ -565,10 +595,11 @@ def maybe_empty_lines(self, current_line: Line) -> LinesBlock:
|
||||
self.previous_block = block
|
||||
return block
|
||||
|
||||
def _maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
|
||||
def _maybe_empty_lines(self, current_line: Line) -> tuple[int, int]: # noqa: C901
|
||||
max_allowed = 1
|
||||
if current_line.depth == 0:
|
||||
max_allowed = 1 if self.mode.is_pyi else 2
|
||||
|
||||
if current_line.leaves:
|
||||
# Consume the first leaf's extra newlines.
|
||||
first_leaf = current_line.leaves[0]
|
||||
@ -577,19 +608,35 @@ def _maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
|
||||
first_leaf.prefix = ""
|
||||
else:
|
||||
before = 0
|
||||
|
||||
user_had_newline = bool(before)
|
||||
depth = current_line.depth
|
||||
|
||||
# Mutate self.previous_defs, remainder of this function should be pure
|
||||
previous_def = None
|
||||
while self.previous_defs and self.previous_defs[-1].depth >= depth:
|
||||
if self.mode.is_pyi:
|
||||
previous_def = self.previous_defs.pop()
|
||||
if current_line.is_def or current_line.is_class:
|
||||
self.previous_defs.append(current_line)
|
||||
|
||||
if self.previous_line is None:
|
||||
# Don't insert empty lines before the first line in the file.
|
||||
return 0, 0
|
||||
|
||||
if current_line.is_docstring:
|
||||
if self.previous_line.is_class:
|
||||
return 0, 1
|
||||
if self.previous_line.opens_block and self.previous_line.is_def:
|
||||
return 0, 0
|
||||
|
||||
if previous_def is not None:
|
||||
assert self.previous_line is not None
|
||||
if depth and not current_line.is_def and self.previous_line.is_def:
|
||||
# Empty lines between attributes and methods should be preserved.
|
||||
before = min(1, before)
|
||||
elif (
|
||||
Preview.blank_line_after_nested_stub_class in self.mode
|
||||
and self.previous_defs[-1].is_class
|
||||
and not self.previous_defs[-1].is_stub_class
|
||||
):
|
||||
if self.mode.is_pyi:
|
||||
if previous_def.is_class and not previous_def.is_stub_class:
|
||||
before = 1
|
||||
elif depth and not current_line.is_def and self.previous_line.is_def:
|
||||
# Empty lines between attributes and methods should be preserved.
|
||||
before = 1 if user_had_newline else 0
|
||||
elif depth:
|
||||
before = 0
|
||||
else:
|
||||
@ -599,7 +646,7 @@ def _maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
|
||||
before = 1
|
||||
elif (
|
||||
not depth
|
||||
and self.previous_defs[-1].depth
|
||||
and previous_def.depth
|
||||
and current_line.leaves[-1].type == token.COLON
|
||||
and (
|
||||
current_line.leaves[0].value
|
||||
@ -616,50 +663,48 @@ def _maybe_empty_lines(self, current_line: Line) -> Tuple[int, int]:
|
||||
before = 1
|
||||
else:
|
||||
before = 2
|
||||
self.previous_defs.pop()
|
||||
|
||||
if current_line.is_decorator or current_line.is_def or current_line.is_class:
|
||||
return self._maybe_empty_lines_for_class_or_def(current_line, before)
|
||||
return self._maybe_empty_lines_for_class_or_def(
|
||||
current_line, before, user_had_newline
|
||||
)
|
||||
|
||||
if (
|
||||
self.previous_line
|
||||
and self.previous_line.is_import
|
||||
self.previous_line.is_import
|
||||
and self.previous_line.depth == 0
|
||||
and current_line.depth == 0
|
||||
and not current_line.is_import
|
||||
and Preview.always_one_newline_after_import in self.mode
|
||||
):
|
||||
return 1, 0
|
||||
|
||||
if (
|
||||
self.previous_line.is_import
|
||||
and not current_line.is_import
|
||||
and not current_line.is_fmt_pass_converted(first_leaf_matches=is_import)
|
||||
and depth == self.previous_line.depth
|
||||
):
|
||||
return (before or 1), 0
|
||||
|
||||
if (
|
||||
self.previous_line
|
||||
and self.previous_line.is_class
|
||||
and current_line.is_triple_quoted_string
|
||||
):
|
||||
return before, 1
|
||||
|
||||
if self.previous_line and self.previous_line.opens_block:
|
||||
return 0, 0
|
||||
return before, 0
|
||||
|
||||
def _maybe_empty_lines_for_class_or_def(
|
||||
self, current_line: Line, before: int
|
||||
) -> Tuple[int, int]:
|
||||
if not current_line.is_decorator:
|
||||
self.previous_defs.append(current_line)
|
||||
if self.previous_line is None:
|
||||
# Don't insert empty lines before the first line in the file.
|
||||
return 0, 0
|
||||
def _maybe_empty_lines_for_class_or_def( # noqa: C901
|
||||
self, current_line: Line, before: int, user_had_newline: bool
|
||||
) -> tuple[int, int]:
|
||||
assert self.previous_line is not None
|
||||
|
||||
if self.previous_line.is_decorator:
|
||||
if self.mode.is_pyi and current_line.is_stub_class:
|
||||
# Insert an empty line after a decorated stub class
|
||||
return 0, 1
|
||||
|
||||
return 0, 0
|
||||
|
||||
if self.previous_line.depth < current_line.depth and (
|
||||
self.previous_line.is_class or self.previous_line.is_def
|
||||
):
|
||||
if self.mode.is_pyi:
|
||||
return 0, 0
|
||||
return 1 if user_had_newline else 0, 0
|
||||
|
||||
comment_to_add_newlines: Optional[LinesBlock] = None
|
||||
if (
|
||||
@ -690,6 +735,11 @@ def _maybe_empty_lines_for_class_or_def(
|
||||
newlines = 0
|
||||
else:
|
||||
newlines = 1
|
||||
# Don't inspect the previous line if it's part of the body of the previous
|
||||
# statement in the same level, we always want a blank line if there's
|
||||
# something with a body preceding.
|
||||
elif self.previous_line.depth > current_line.depth:
|
||||
newlines = 1
|
||||
elif (
|
||||
current_line.is_def or current_line.is_decorator
|
||||
) and not self.previous_line.is_def:
|
||||
@ -701,12 +751,14 @@ def _maybe_empty_lines_for_class_or_def(
|
||||
# Blank line between a block of functions (maybe with preceding
|
||||
# decorators) and a block of non-functions
|
||||
newlines = 1
|
||||
elif self.previous_line.depth > current_line.depth:
|
||||
newlines = 1
|
||||
else:
|
||||
newlines = 0
|
||||
else:
|
||||
newlines = 1 if current_line.depth else 2
|
||||
# If a user has left no space after a dummy implementation, don't insert
|
||||
# new lines. This is useful for instance for @overload or Protocols.
|
||||
if self.previous_line.is_stub_def and not user_had_newline:
|
||||
newlines = 0
|
||||
if comment_to_add_newlines is not None:
|
||||
previous_block = comment_to_add_newlines.previous_block
|
||||
if previous_block is not None:
|
||||
@ -717,7 +769,7 @@ def _maybe_empty_lines_for_class_or_def(
|
||||
return newlines, 0
|
||||
|
||||
|
||||
def enumerate_reversed(sequence: Sequence[T]) -> Iterator[Tuple[Index, T]]:
|
||||
def enumerate_reversed(sequence: Sequence[T]) -> Iterator[tuple[Index, T]]:
|
||||
"""Like `reversed(enumerate(sequence))` if that were possible."""
|
||||
index = len(sequence) - 1
|
||||
for element in reversed(sequence):
|
||||
@ -726,7 +778,7 @@ def enumerate_reversed(sequence: Sequence[T]) -> Iterator[Tuple[Index, T]]:
|
||||
|
||||
|
||||
def append_leaves(
|
||||
new_line: Line, old_line: Line, leaves: List[Leaf], preformatted: bool = False
|
||||
new_line: Line, old_line: Line, leaves: list[Leaf], preformatted: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Append leaves (taken from @old_line) to @new_line, making sure to fix the
|
||||
@ -760,11 +812,9 @@ def is_line_short_enough( # noqa: C901
|
||||
if not line_str:
|
||||
line_str = line_to_string(line)
|
||||
|
||||
width = str_width if mode.preview else len
|
||||
|
||||
if Preview.multiline_string_handling not in mode:
|
||||
return (
|
||||
width(line_str) <= mode.line_length
|
||||
str_width(line_str) <= mode.line_length
|
||||
and "\n" not in line_str # multiline strings
|
||||
and not line.contains_standalone_comments()
|
||||
)
|
||||
@ -773,10 +823,10 @@ def is_line_short_enough( # noqa: C901
|
||||
return False
|
||||
if "\n" not in line_str:
|
||||
# No multiline strings (MLS) present
|
||||
return width(line_str) <= mode.line_length
|
||||
return str_width(line_str) <= mode.line_length
|
||||
|
||||
first, *_, last = line_str.split("\n")
|
||||
if width(first) > mode.line_length or width(last) > mode.line_length:
|
||||
if str_width(first) > mode.line_length or str_width(last) > mode.line_length:
|
||||
return False
|
||||
|
||||
# Traverse the AST to examine the context of the multiline string (MLS),
|
||||
@ -785,12 +835,12 @@ def is_line_short_enough( # noqa: C901
|
||||
# Depth (which is based on the existing bracket_depth concept)
|
||||
# is needed to determine nesting level of the MLS.
|
||||
# Includes special case for trailing commas.
|
||||
commas: List[int] = [] # tracks number of commas per depth level
|
||||
commas: list[int] = [] # tracks number of commas per depth level
|
||||
multiline_string: Optional[Leaf] = None
|
||||
# store the leaves that contain parts of the MLS
|
||||
multiline_string_contexts: List[LN] = []
|
||||
multiline_string_contexts: list[LN] = []
|
||||
|
||||
max_level_to_update = math.inf # track the depth of the MLS
|
||||
max_level_to_update: Union[int, float] = math.inf # track the depth of the MLS
|
||||
for i, leaf in enumerate(line.leaves):
|
||||
if max_level_to_update == math.inf:
|
||||
had_comma: Optional[int] = None
|
||||
@ -810,11 +860,13 @@ def is_line_short_enough( # noqa: C901
|
||||
return False
|
||||
|
||||
if leaf.bracket_depth <= max_level_to_update and leaf.type == token.COMMA:
|
||||
# Ignore non-nested trailing comma
|
||||
# Inside brackets, ignore trailing comma
|
||||
# directly after MLS/MLS-containing expression
|
||||
ignore_ctxs: List[Optional[LN]] = [None]
|
||||
ignore_ctxs: list[Optional[LN]] = [None]
|
||||
ignore_ctxs += multiline_string_contexts
|
||||
if not (leaf.prev_sibling in ignore_ctxs and i == len(line.leaves) - 1):
|
||||
if (line.inside_brackets or leaf.bracket_depth > 0) and (
|
||||
i != len(line.leaves) - 1 or leaf.prev_sibling not in ignore_ctxs
|
||||
):
|
||||
commas[leaf.bracket_depth] += 1
|
||||
if max_level_to_update != math.inf:
|
||||
max_level_to_update = min(max_level_to_update, leaf.bracket_depth)
|
||||
@ -887,6 +939,23 @@ def can_omit_invisible_parens(
|
||||
are too long.
|
||||
"""
|
||||
line = rhs.body
|
||||
|
||||
# We need optional parens in order to split standalone comments to their own lines
|
||||
# if there are no nested parens around the standalone comments
|
||||
closing_bracket: Optional[Leaf] = None
|
||||
for leaf in reversed(line.leaves):
|
||||
if closing_bracket and leaf is closing_bracket.opening_bracket:
|
||||
closing_bracket = None
|
||||
if leaf.type == STANDALONE_COMMENT and not closing_bracket:
|
||||
return False
|
||||
if (
|
||||
not closing_bracket
|
||||
and leaf.type in CLOSING_BRACKETS
|
||||
and leaf.opening_bracket in line.leaves
|
||||
and leaf.value
|
||||
):
|
||||
closing_bracket = leaf
|
||||
|
||||
bt = line.bracket_tracker
|
||||
if not bt.delimiters:
|
||||
# Without delimiters the optional parentheses are useless.
|
||||
@ -899,11 +968,7 @@ def can_omit_invisible_parens(
|
||||
return False
|
||||
|
||||
if delimiter_count == 1:
|
||||
if (
|
||||
Preview.wrap_multiple_context_managers_in_parens in line.mode
|
||||
and max_priority == COMMA_PRIORITY
|
||||
and rhs.head.is_with_or_async_with_stmt
|
||||
):
|
||||
if max_priority == COMMA_PRIORITY and rhs.head.is_with_or_async_with_stmt:
|
||||
# For two context manager with statements, the optional parentheses read
|
||||
# better. In this case, `rhs.body` is the context managers part of
|
||||
# the with statement. `rhs.head` is the `with (` part on the previous
|
||||
|
@ -4,18 +4,11 @@
|
||||
chosen by the user.
|
||||
"""
|
||||
|
||||
import sys
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum, auto
|
||||
from hashlib import sha256
|
||||
from operator import attrgetter
|
||||
from typing import Dict, Set
|
||||
from warnings import warn
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Final
|
||||
else:
|
||||
from typing import Final
|
||||
from typing import Final
|
||||
|
||||
from black.const import DEFAULT_LINE_LENGTH
|
||||
|
||||
@ -30,6 +23,12 @@ class TargetVersion(Enum):
|
||||
PY39 = 9
|
||||
PY310 = 10
|
||||
PY311 = 11
|
||||
PY312 = 12
|
||||
PY313 = 13
|
||||
|
||||
def pretty(self) -> str:
|
||||
assert self.name[:2] == "PY"
|
||||
return f"Python {self.name[2]}.{self.name[3:]}"
|
||||
|
||||
|
||||
class Feature(Enum):
|
||||
@ -51,6 +50,9 @@ class Feature(Enum):
|
||||
VARIADIC_GENERICS = 15
|
||||
DEBUG_F_STRINGS = 16
|
||||
PARENTHESIZED_CONTEXT_MANAGERS = 17
|
||||
TYPE_PARAMS = 18
|
||||
FSTRING_PARSING = 19
|
||||
TYPE_PARAM_DEFAULTS = 20
|
||||
FORCE_OPTIONAL_PARENTHESES = 50
|
||||
|
||||
# __future__ flags
|
||||
@ -62,7 +64,7 @@ class Feature(Enum):
|
||||
}
|
||||
|
||||
|
||||
VERSION_TO_FEATURES: Dict[TargetVersion, Set[Feature]] = {
|
||||
VERSION_TO_FEATURES: dict[TargetVersion, set[Feature]] = {
|
||||
TargetVersion.PY33: {Feature.ASYNC_IDENTIFIERS},
|
||||
TargetVersion.PY34: {Feature.ASYNC_IDENTIFIERS},
|
||||
TargetVersion.PY35: {Feature.TRAILING_COMMA_IN_CALL, Feature.ASYNC_IDENTIFIERS},
|
||||
@ -143,68 +145,111 @@ class Feature(Enum):
|
||||
Feature.EXCEPT_STAR,
|
||||
Feature.VARIADIC_GENERICS,
|
||||
},
|
||||
TargetVersion.PY312: {
|
||||
Feature.F_STRINGS,
|
||||
Feature.DEBUG_F_STRINGS,
|
||||
Feature.NUMERIC_UNDERSCORES,
|
||||
Feature.TRAILING_COMMA_IN_CALL,
|
||||
Feature.TRAILING_COMMA_IN_DEF,
|
||||
Feature.ASYNC_KEYWORDS,
|
||||
Feature.FUTURE_ANNOTATIONS,
|
||||
Feature.ASSIGNMENT_EXPRESSIONS,
|
||||
Feature.RELAXED_DECORATORS,
|
||||
Feature.POS_ONLY_ARGUMENTS,
|
||||
Feature.UNPACKING_ON_FLOW,
|
||||
Feature.ANN_ASSIGN_EXTENDED_RHS,
|
||||
Feature.PARENTHESIZED_CONTEXT_MANAGERS,
|
||||
Feature.PATTERN_MATCHING,
|
||||
Feature.EXCEPT_STAR,
|
||||
Feature.VARIADIC_GENERICS,
|
||||
Feature.TYPE_PARAMS,
|
||||
Feature.FSTRING_PARSING,
|
||||
},
|
||||
TargetVersion.PY313: {
|
||||
Feature.F_STRINGS,
|
||||
Feature.DEBUG_F_STRINGS,
|
||||
Feature.NUMERIC_UNDERSCORES,
|
||||
Feature.TRAILING_COMMA_IN_CALL,
|
||||
Feature.TRAILING_COMMA_IN_DEF,
|
||||
Feature.ASYNC_KEYWORDS,
|
||||
Feature.FUTURE_ANNOTATIONS,
|
||||
Feature.ASSIGNMENT_EXPRESSIONS,
|
||||
Feature.RELAXED_DECORATORS,
|
||||
Feature.POS_ONLY_ARGUMENTS,
|
||||
Feature.UNPACKING_ON_FLOW,
|
||||
Feature.ANN_ASSIGN_EXTENDED_RHS,
|
||||
Feature.PARENTHESIZED_CONTEXT_MANAGERS,
|
||||
Feature.PATTERN_MATCHING,
|
||||
Feature.EXCEPT_STAR,
|
||||
Feature.VARIADIC_GENERICS,
|
||||
Feature.TYPE_PARAMS,
|
||||
Feature.FSTRING_PARSING,
|
||||
Feature.TYPE_PARAM_DEFAULTS,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def supports_feature(target_versions: Set[TargetVersion], feature: Feature) -> bool:
|
||||
def supports_feature(target_versions: set[TargetVersion], feature: Feature) -> bool:
|
||||
return all(feature in VERSION_TO_FEATURES[version] for version in target_versions)
|
||||
|
||||
|
||||
class Preview(Enum):
|
||||
"""Individual preview style features."""
|
||||
|
||||
add_trailing_comma_consistently = auto()
|
||||
blank_line_after_nested_stub_class = auto()
|
||||
hex_codes_in_unicode_sequences = auto()
|
||||
improved_async_statements_handling = auto()
|
||||
multiline_string_handling = auto()
|
||||
prefer_splitting_right_hand_side_of_assignments = auto()
|
||||
# NOTE: string_processing requires wrap_long_dict_values_in_parens
|
||||
# for https://github.com/psf/black/issues/3117 to be fixed.
|
||||
string_processing = auto()
|
||||
parenthesize_conditional_expressions = auto()
|
||||
skip_magic_trailing_comma_in_subscript = auto()
|
||||
hug_parens_with_braces_and_square_brackets = auto()
|
||||
wrap_long_dict_values_in_parens = auto()
|
||||
wrap_multiple_context_managers_in_parens = auto()
|
||||
multiline_string_handling = auto()
|
||||
always_one_newline_after_import = auto()
|
||||
fix_fmt_skip_in_one_liners = auto()
|
||||
|
||||
|
||||
UNSTABLE_FEATURES: set[Preview] = {
|
||||
# Many issues, see summary in https://github.com/psf/black/issues/4042
|
||||
Preview.string_processing,
|
||||
# See issue #4159
|
||||
Preview.multiline_string_handling,
|
||||
# See issue #4036 (crash), #4098, #4099 (proposed tweaks)
|
||||
Preview.hug_parens_with_braces_and_square_brackets,
|
||||
}
|
||||
|
||||
|
||||
class Deprecated(UserWarning):
|
||||
"""Visible deprecation warning."""
|
||||
|
||||
|
||||
_MAX_CACHE_KEY_PART_LENGTH: Final = 32
|
||||
|
||||
|
||||
@dataclass
|
||||
class Mode:
|
||||
target_versions: Set[TargetVersion] = field(default_factory=set)
|
||||
target_versions: set[TargetVersion] = field(default_factory=set)
|
||||
line_length: int = DEFAULT_LINE_LENGTH
|
||||
string_normalization: bool = True
|
||||
is_pyi: bool = False
|
||||
is_ipynb: bool = False
|
||||
skip_source_first_line: bool = False
|
||||
magic_trailing_comma: bool = True
|
||||
experimental_string_processing: bool = False
|
||||
python_cell_magics: Set[str] = field(default_factory=set)
|
||||
python_cell_magics: set[str] = field(default_factory=set)
|
||||
preview: bool = False
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.experimental_string_processing:
|
||||
warn(
|
||||
(
|
||||
"`experimental string processing` has been included in `preview`"
|
||||
" and deprecated. Use `preview` instead."
|
||||
),
|
||||
Deprecated,
|
||||
)
|
||||
unstable: bool = False
|
||||
enabled_features: set[Preview] = field(default_factory=set)
|
||||
|
||||
def __contains__(self, feature: Preview) -> bool:
|
||||
"""
|
||||
Provide `Preview.FEATURE in Mode` syntax that mirrors the ``preview`` flag.
|
||||
|
||||
The argument is not checked and features are not differentiated.
|
||||
They only exist to make development easier by clarifying intent.
|
||||
In unstable mode, all features are enabled. In preview mode, all features
|
||||
except those in UNSTABLE_FEATURES are enabled. Any features in
|
||||
`self.enabled_features` are also enabled.
|
||||
"""
|
||||
if feature is Preview.string_processing:
|
||||
return self.preview or self.experimental_string_processing
|
||||
return self.preview
|
||||
if self.unstable:
|
||||
return True
|
||||
if feature in self.enabled_features:
|
||||
return True
|
||||
return self.preview and feature not in UNSTABLE_FEATURES
|
||||
|
||||
def get_cache_key(self) -> str:
|
||||
if self.target_versions:
|
||||
@ -214,6 +259,19 @@ def get_cache_key(self) -> str:
|
||||
)
|
||||
else:
|
||||
version_str = "-"
|
||||
if len(version_str) > _MAX_CACHE_KEY_PART_LENGTH:
|
||||
version_str = sha256(version_str.encode()).hexdigest()[
|
||||
:_MAX_CACHE_KEY_PART_LENGTH
|
||||
]
|
||||
features_and_magics = (
|
||||
",".join(sorted(f.name for f in self.enabled_features))
|
||||
+ "@"
|
||||
+ ",".join(sorted(self.python_cell_magics))
|
||||
)
|
||||
if len(features_and_magics) > _MAX_CACHE_KEY_PART_LENGTH:
|
||||
features_and_magics = sha256(features_and_magics.encode()).hexdigest()[
|
||||
:_MAX_CACHE_KEY_PART_LENGTH
|
||||
]
|
||||
parts = [
|
||||
version_str,
|
||||
str(self.line_length),
|
||||
@ -222,8 +280,8 @@ def get_cache_key(self) -> str:
|
||||
str(int(self.is_ipynb)),
|
||||
str(int(self.skip_source_first_line)),
|
||||
str(int(self.magic_trailing_comma)),
|
||||
str(int(self.experimental_string_processing)),
|
||||
str(int(self.preview)),
|
||||
sha256((",".join(sorted(self.python_cell_magics))).encode()).hexdigest(),
|
||||
str(int(self.unstable)),
|
||||
features_and_magics,
|
||||
]
|
||||
return ".".join(parts)
|
||||
|
@ -3,12 +3,9 @@
|
||||
"""
|
||||
|
||||
import sys
|
||||
from typing import Generic, Iterator, List, Optional, Set, Tuple, TypeVar, Union
|
||||
from collections.abc import Iterator
|
||||
from typing import Final, Generic, Literal, Optional, TypeVar, Union
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Final
|
||||
else:
|
||||
from typing_extensions import Final
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import TypeGuard
|
||||
else:
|
||||
@ -17,7 +14,8 @@
|
||||
from mypy_extensions import mypyc_attr
|
||||
|
||||
from black.cache import CACHE_DIR
|
||||
from black.strings import has_triple_quotes
|
||||
from black.mode import Mode
|
||||
from black.strings import get_string_prefix, has_triple_quotes
|
||||
from blib2to3 import pygram
|
||||
from blib2to3.pgen2 import token
|
||||
from blib2to3.pytree import NL, Leaf, Node, type_repr
|
||||
@ -107,6 +105,7 @@
|
||||
syms.trailer,
|
||||
syms.term,
|
||||
syms.power,
|
||||
syms.namedexpr_test,
|
||||
}
|
||||
TYPED_NAMES: Final = {syms.tname, syms.tname_star}
|
||||
ASSIGNMENTS: Final = {
|
||||
@ -124,6 +123,7 @@
|
||||
">>=",
|
||||
"**=",
|
||||
"//=",
|
||||
":",
|
||||
}
|
||||
|
||||
IMPLICIT_TUPLE: Final = {syms.testlist, syms.testlist_star_expr, syms.exprlist}
|
||||
@ -135,7 +135,13 @@
|
||||
OPENING_BRACKETS: Final = set(BRACKET.keys())
|
||||
CLOSING_BRACKETS: Final = set(BRACKET.values())
|
||||
BRACKETS: Final = OPENING_BRACKETS | CLOSING_BRACKETS
|
||||
ALWAYS_NO_SPACE: Final = CLOSING_BRACKETS | {token.COMMA, STANDALONE_COMMENT}
|
||||
ALWAYS_NO_SPACE: Final = CLOSING_BRACKETS | {
|
||||
token.COMMA,
|
||||
STANDALONE_COMMENT,
|
||||
token.FSTRING_MIDDLE,
|
||||
token.FSTRING_END,
|
||||
token.BANG,
|
||||
}
|
||||
|
||||
RARROW = 55
|
||||
|
||||
@ -175,15 +181,15 @@ def visit_default(self, node: LN) -> Iterator[T]:
|
||||
yield from self.visit(child)
|
||||
|
||||
|
||||
def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
|
||||
def whitespace(leaf: Leaf, *, complex_subscript: bool, mode: Mode) -> str: # noqa: C901
|
||||
"""Return whitespace prefix if needed for the given `leaf`.
|
||||
|
||||
`complex_subscript` signals whether the given leaf is part of a subscription
|
||||
which has non-trivial arguments, like arithmetic expressions or function calls.
|
||||
"""
|
||||
NO: Final = ""
|
||||
SPACE: Final = " "
|
||||
DOUBLESPACE: Final = " "
|
||||
NO: Final[str] = ""
|
||||
SPACE: Final[str] = " "
|
||||
DOUBLESPACE: Final[str] = " "
|
||||
t = leaf.type
|
||||
p = leaf.parent
|
||||
v = leaf.value
|
||||
@ -201,6 +207,9 @@ def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
|
||||
}:
|
||||
return NO
|
||||
|
||||
if t == token.LBRACE and p.type == syms.fstring_replacement_field:
|
||||
return NO
|
||||
|
||||
prev = leaf.prev_sibling
|
||||
if not prev:
|
||||
prevp = preceding_leaf(p)
|
||||
@ -235,9 +244,9 @@ def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
|
||||
elif (
|
||||
prevp.type == token.STAR
|
||||
and parent_type(prevp) == syms.star_expr
|
||||
and parent_type(prevp.parent) == syms.subscriptlist
|
||||
and parent_type(prevp.parent) in (syms.subscriptlist, syms.tname_star)
|
||||
):
|
||||
# No space between typevar tuples.
|
||||
# No space between typevar tuples or unpacking them.
|
||||
return NO
|
||||
|
||||
elif prevp.type in VARARGS_SPECIALS:
|
||||
@ -262,6 +271,9 @@ def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
|
||||
elif prev.type in OPENING_BRACKETS:
|
||||
return NO
|
||||
|
||||
elif prev.type == token.BANG:
|
||||
return NO
|
||||
|
||||
if p.type in {syms.parameters, syms.arglist}:
|
||||
# untyped function signatures or calls
|
||||
if not prev or prev.type != token.COMMA:
|
||||
@ -349,6 +361,9 @@ def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
|
||||
|
||||
return NO
|
||||
|
||||
elif t == token.COLONEQUAL or prev.type == token.COLONEQUAL:
|
||||
return SPACE
|
||||
|
||||
elif not complex_subscript:
|
||||
return NO
|
||||
|
||||
@ -380,6 +395,7 @@ def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
|
||||
elif prevp.type == token.EQUAL and prevp_parent.type == syms.argument:
|
||||
return NO
|
||||
|
||||
# TODO: add fstring here?
|
||||
elif t in {token.NAME, token.NUMBER, token.STRING}:
|
||||
return NO
|
||||
|
||||
@ -405,6 +421,13 @@ def whitespace(leaf: Leaf, *, complex_subscript: bool) -> str: # noqa: C901
|
||||
return SPACE
|
||||
|
||||
|
||||
def make_simple_prefix(nl_count: int, form_feed: bool, empty_line: str = "\n") -> str:
|
||||
"""Generate a normalized prefix string."""
|
||||
if form_feed:
|
||||
return (empty_line * (nl_count - 1)) + "\f" + empty_line
|
||||
return empty_line * nl_count
|
||||
|
||||
|
||||
def preceding_leaf(node: Optional[LN]) -> Optional[Leaf]:
|
||||
"""Return the first leaf that precedes `node`, if any."""
|
||||
while node:
|
||||
@ -423,7 +446,7 @@ def preceding_leaf(node: Optional[LN]) -> Optional[Leaf]:
|
||||
return None
|
||||
|
||||
|
||||
def prev_siblings_are(node: Optional[LN], tokens: List[Optional[NodeType]]) -> bool:
|
||||
def prev_siblings_are(node: Optional[LN], tokens: list[Optional[NodeType]]) -> bool:
|
||||
"""Return if the `node` and its previous siblings match types against the provided
|
||||
list of tokens; the provided `node`has its type matched against the last element in
|
||||
the list. `None` can be used as the first element to declare that the start of the
|
||||
@ -522,14 +545,31 @@ def is_arith_like(node: LN) -> bool:
|
||||
}
|
||||
|
||||
|
||||
def is_docstring(leaf: Leaf) -> bool:
|
||||
def is_docstring(node: NL) -> bool:
|
||||
if isinstance(node, Leaf):
|
||||
if node.type != token.STRING:
|
||||
return False
|
||||
|
||||
prefix = get_string_prefix(node.value)
|
||||
if set(prefix).intersection("bBfF"):
|
||||
return False
|
||||
|
||||
if (
|
||||
node.parent
|
||||
and node.parent.type == syms.simple_stmt
|
||||
and not node.parent.prev_sibling
|
||||
and node.parent.parent
|
||||
and node.parent.parent.type == syms.file_input
|
||||
):
|
||||
return True
|
||||
|
||||
if prev_siblings_are(
|
||||
leaf.parent, [None, token.NEWLINE, token.INDENT, syms.simple_stmt]
|
||||
node.parent, [None, token.NEWLINE, token.INDENT, syms.simple_stmt]
|
||||
):
|
||||
return True
|
||||
|
||||
# Multiline docstring on the same line as the `def`.
|
||||
if prev_siblings_are(leaf.parent, [syms.parameters, token.COLON, syms.simple_stmt]):
|
||||
if prev_siblings_are(node.parent, [syms.parameters, token.COLON, syms.simple_stmt]):
|
||||
# `syms.parameters` is only used in funcdefs and async_funcdefs in the Python
|
||||
# grammar. We're safe to return True without further checks.
|
||||
return True
|
||||
@ -563,6 +603,17 @@ def is_one_tuple(node: LN) -> bool:
|
||||
)
|
||||
|
||||
|
||||
def is_tuple(node: LN) -> bool:
|
||||
"""Return True if `node` holds a tuple."""
|
||||
if node.type != syms.atom:
|
||||
return False
|
||||
gexp = unwrap_singleton_parenthesis(node)
|
||||
if gexp is None or gexp.type != syms.testlist_gexp:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_tuple_containing_walrus(node: LN) -> bool:
|
||||
"""Return True if `node` holds a tuple that contains a walrus operator."""
|
||||
if node.type != syms.atom:
|
||||
@ -574,11 +625,33 @@ def is_tuple_containing_walrus(node: LN) -> bool:
|
||||
return any(child.type == syms.namedexpr_test for child in gexp.children)
|
||||
|
||||
|
||||
def is_tuple_containing_star(node: LN) -> bool:
|
||||
"""Return True if `node` holds a tuple that contains a star operator."""
|
||||
if node.type != syms.atom:
|
||||
return False
|
||||
gexp = unwrap_singleton_parenthesis(node)
|
||||
if gexp is None or gexp.type != syms.testlist_gexp:
|
||||
return False
|
||||
|
||||
return any(child.type == syms.star_expr for child in gexp.children)
|
||||
|
||||
|
||||
def is_generator(node: LN) -> bool:
|
||||
"""Return True if `node` holds a generator."""
|
||||
if node.type != syms.atom:
|
||||
return False
|
||||
gexp = unwrap_singleton_parenthesis(node)
|
||||
if gexp is None or gexp.type != syms.testlist_gexp:
|
||||
return False
|
||||
|
||||
return any(child.type == syms.old_comp_for for child in gexp.children)
|
||||
|
||||
|
||||
def is_one_sequence_between(
|
||||
opening: Leaf,
|
||||
closing: Leaf,
|
||||
leaves: List[Leaf],
|
||||
brackets: Tuple[int, int] = (token.LPAR, token.RPAR),
|
||||
leaves: list[Leaf],
|
||||
brackets: tuple[int, int] = (token.LPAR, token.RPAR),
|
||||
) -> bool:
|
||||
"""Return True if content between `opening` and `closing` is a one-sequence."""
|
||||
if (opening.type, closing.type) != brackets:
|
||||
@ -688,7 +761,7 @@ def is_yield(node: LN) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def is_vararg(leaf: Leaf, within: Set[NodeType]) -> bool:
|
||||
def is_vararg(leaf: Leaf, within: set[NodeType]) -> bool:
|
||||
"""Return True if `leaf` is a star or double star in a vararg or kwarg.
|
||||
|
||||
If `within` includes VARARGS_PARENTS, this applies to function signatures.
|
||||
@ -711,13 +784,51 @@ def is_vararg(leaf: Leaf, within: Set[NodeType]) -> bool:
|
||||
return p.type in within
|
||||
|
||||
|
||||
def is_multiline_string(leaf: Leaf) -> bool:
|
||||
def is_fstring(node: Node) -> bool:
|
||||
"""Return True if the node is an f-string"""
|
||||
return node.type == syms.fstring
|
||||
|
||||
|
||||
def fstring_to_string(node: Node) -> Leaf:
|
||||
"""Converts an fstring node back to a string node."""
|
||||
string_without_prefix = str(node)[len(node.prefix) :]
|
||||
string_leaf = Leaf(token.STRING, string_without_prefix, prefix=node.prefix)
|
||||
string_leaf.lineno = node.get_lineno() or 0
|
||||
return string_leaf
|
||||
|
||||
|
||||
def is_multiline_string(node: LN) -> bool:
|
||||
"""Return True if `leaf` is a multiline string that actually spans many lines."""
|
||||
if isinstance(node, Node) and is_fstring(node):
|
||||
leaf = fstring_to_string(node)
|
||||
elif isinstance(node, Leaf):
|
||||
leaf = node
|
||||
else:
|
||||
return False
|
||||
|
||||
return has_triple_quotes(leaf.value) and "\n" in leaf.value
|
||||
|
||||
|
||||
def is_parent_function_or_class(node: Node) -> bool:
|
||||
assert node.type in {syms.suite, syms.simple_stmt}
|
||||
assert node.parent is not None
|
||||
# Note this works for suites / simple_stmts in async def as well
|
||||
return node.parent.type in {syms.funcdef, syms.classdef}
|
||||
|
||||
|
||||
def is_function_or_class(node: Node) -> bool:
|
||||
return node.type in {syms.funcdef, syms.classdef, syms.async_funcdef}
|
||||
|
||||
|
||||
def is_stub_suite(node: Node) -> bool:
|
||||
"""Return True if `node` is a suite with a stub body."""
|
||||
if node.parent is not None and not is_parent_function_or_class(node):
|
||||
return False
|
||||
|
||||
# If there is a comment, we want to keep it.
|
||||
if node.prefix.strip():
|
||||
return False
|
||||
|
||||
if (
|
||||
len(node.children) != 4
|
||||
or node.children[0].type != token.NEWLINE
|
||||
@ -726,6 +837,9 @@ def is_stub_suite(node: Node) -> bool:
|
||||
):
|
||||
return False
|
||||
|
||||
if node.children[3].prefix.strip():
|
||||
return False
|
||||
|
||||
return is_stub_body(node.children[2])
|
||||
|
||||
|
||||
@ -739,7 +853,8 @@ def is_stub_body(node: LN) -> bool:
|
||||
|
||||
child = node.children[0]
|
||||
return (
|
||||
child.type == syms.atom
|
||||
not child.prefix.strip()
|
||||
and child.type == syms.atom
|
||||
and len(child.children) == 3
|
||||
and all(leaf == Leaf(token.DOT, ".") for leaf in child.children)
|
||||
)
|
||||
@ -816,12 +931,27 @@ def is_async_stmt_or_funcdef(leaf: Leaf) -> bool:
|
||||
)
|
||||
|
||||
|
||||
def is_type_comment(leaf: Leaf, suffix: str = "") -> bool:
|
||||
"""Return True if the given leaf is a special comment.
|
||||
Only returns true for type comments for now."""
|
||||
def is_type_comment(leaf: Leaf) -> bool:
|
||||
"""Return True if the given leaf is a type comment. This function should only
|
||||
be used for general type comments (excluding ignore annotations, which should
|
||||
use `is_type_ignore_comment`). Note that general type comments are no longer
|
||||
used in modern version of Python, this function may be deprecated in the future."""
|
||||
t = leaf.type
|
||||
v = leaf.value
|
||||
return t in {token.COMMENT, STANDALONE_COMMENT} and v.startswith("# type:" + suffix)
|
||||
return t in {token.COMMENT, STANDALONE_COMMENT} and v.startswith("# type:")
|
||||
|
||||
|
||||
def is_type_ignore_comment(leaf: Leaf) -> bool:
|
||||
"""Return True if the given leaf is a type comment with ignore annotation."""
|
||||
t = leaf.type
|
||||
v = leaf.value
|
||||
return t in {token.COMMENT, STANDALONE_COMMENT} and is_type_ignore_comment_string(v)
|
||||
|
||||
|
||||
def is_type_ignore_comment_string(value: str) -> bool:
|
||||
"""Return True if the given string match with type comment with
|
||||
ignore annotation."""
|
||||
return value.startswith("# type: ignore")
|
||||
|
||||
|
||||
def wrap_in_parentheses(parent: Node, child: LN, *, visible: bool = True) -> None:
|
||||
@ -880,21 +1010,69 @@ def is_rpar_token(nl: NL) -> TypeGuard[Leaf]:
|
||||
return nl.type == token.RPAR
|
||||
|
||||
|
||||
def is_string_token(nl: NL) -> TypeGuard[Leaf]:
|
||||
return nl.type == token.STRING
|
||||
|
||||
|
||||
def is_number_token(nl: NL) -> TypeGuard[Leaf]:
|
||||
return nl.type == token.NUMBER
|
||||
|
||||
|
||||
def is_part_of_annotation(leaf: Leaf) -> bool:
|
||||
"""Returns whether this leaf is part of type annotations."""
|
||||
def get_annotation_type(leaf: Leaf) -> Literal["return", "param", None]:
|
||||
"""Returns the type of annotation this leaf is part of, if any."""
|
||||
ancestor = leaf.parent
|
||||
while ancestor is not None:
|
||||
if ancestor.prev_sibling and ancestor.prev_sibling.type == token.RARROW:
|
||||
return True
|
||||
return "return"
|
||||
if ancestor.parent and ancestor.parent.type == syms.tname:
|
||||
return True
|
||||
return "param"
|
||||
ancestor = ancestor.parent
|
||||
return None
|
||||
|
||||
|
||||
def is_part_of_annotation(leaf: Leaf) -> bool:
|
||||
"""Returns whether this leaf is part of a type annotation."""
|
||||
assert leaf.parent is not None
|
||||
return get_annotation_type(leaf) is not None
|
||||
|
||||
|
||||
def first_leaf(node: LN) -> Optional[Leaf]:
|
||||
"""Returns the first leaf of the ancestor node."""
|
||||
if isinstance(node, Leaf):
|
||||
return node
|
||||
elif not node.children:
|
||||
return None
|
||||
else:
|
||||
return first_leaf(node.children[0])
|
||||
|
||||
|
||||
def last_leaf(node: LN) -> Optional[Leaf]:
|
||||
"""Returns the last leaf of the ancestor node."""
|
||||
if isinstance(node, Leaf):
|
||||
return node
|
||||
elif not node.children:
|
||||
return None
|
||||
else:
|
||||
return last_leaf(node.children[-1])
|
||||
|
||||
|
||||
def furthest_ancestor_with_last_leaf(leaf: Leaf) -> LN:
|
||||
"""Returns the furthest ancestor that has this leaf node as the last leaf."""
|
||||
node: LN = leaf
|
||||
while node.parent and node.parent.children and node is node.parent.children[-1]:
|
||||
node = node.parent
|
||||
return node
|
||||
|
||||
|
||||
def has_sibling_with_type(node: LN, type: int) -> bool:
|
||||
# Check previous siblings
|
||||
sibling = node.prev_sibling
|
||||
while sibling is not None:
|
||||
if sibling.type == type:
|
||||
return True
|
||||
sibling = sibling.prev_sibling
|
||||
|
||||
# Check next siblings
|
||||
sibling = node.next_sibling
|
||||
while sibling is not None:
|
||||
if sibling.type == type:
|
||||
return True
|
||||
sibling = sibling.next_sibling
|
||||
|
||||
return False
|
||||
|
@ -1,6 +1,7 @@
|
||||
"""
|
||||
Formatting numeric literals.
|
||||
"""
|
||||
|
||||
from blib2to3.pytree import Leaf
|
||||
|
||||
|
||||
@ -13,7 +14,7 @@ def format_hex(text: str) -> str:
|
||||
|
||||
|
||||
def format_scientific_notation(text: str) -> str:
|
||||
"""Formats a numeric string utilizing scentific notation"""
|
||||
"""Formats a numeric string utilizing scientific notation"""
|
||||
before, after = text.split("e")
|
||||
sign = ""
|
||||
if after.startswith("-"):
|
||||
|
@ -4,6 +4,7 @@
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import tempfile
|
||||
from typing import Any, Optional
|
||||
|
||||
@ -55,12 +56,28 @@ def ipynb_diff(a: str, b: str, a_name: str, b_name: str) -> str:
|
||||
return "".join(diff_lines)
|
||||
|
||||
|
||||
_line_pattern = re.compile(r"(.*?(?:\r\n|\n|\r|$))")
|
||||
|
||||
|
||||
def _splitlines_no_ff(source: str) -> list[str]:
|
||||
"""Split a string into lines ignoring form feed and other chars.
|
||||
|
||||
This mimics how the Python parser splits source code.
|
||||
|
||||
A simplified version of the function with the same name in Lib/ast.py
|
||||
"""
|
||||
result = [match[0] for match in _line_pattern.finditer(source)]
|
||||
if result[-1] == "":
|
||||
result.pop(-1)
|
||||
return result
|
||||
|
||||
|
||||
def diff(a: str, b: str, a_name: str, b_name: str) -> str:
|
||||
"""Return a unified diff string between strings `a` and `b`."""
|
||||
import difflib
|
||||
|
||||
a_lines = a.splitlines(keepends=True)
|
||||
b_lines = b.splitlines(keepends=True)
|
||||
a_lines = _splitlines_no_ff(a)
|
||||
b_lines = _splitlines_no_ff(b)
|
||||
diff_lines = []
|
||||
for line in difflib.unified_diff(
|
||||
a_lines, b_lines, fromfile=a_name, tofile=b_name, n=5
|
||||
|
@ -1,15 +1,11 @@
|
||||
"""
|
||||
Parse Python code and perform AST validation.
|
||||
"""
|
||||
import ast
|
||||
import platform
|
||||
import sys
|
||||
from typing import Any, Iterable, Iterator, List, Set, Tuple, Type, Union
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Final
|
||||
else:
|
||||
from typing import Final
|
||||
import ast
|
||||
import sys
|
||||
import warnings
|
||||
from collections.abc import Collection, Iterator
|
||||
|
||||
from black.mode import VERSION_TO_FEATURES, Feature, TargetVersion, supports_feature
|
||||
from black.nodes import syms
|
||||
@ -20,42 +16,19 @@
|
||||
from blib2to3.pgen2.tokenize import TokenError
|
||||
from blib2to3.pytree import Leaf, Node
|
||||
|
||||
ast3: Any
|
||||
|
||||
_IS_PYPY = platform.python_implementation() == "PyPy"
|
||||
|
||||
try:
|
||||
from typed_ast import ast3
|
||||
except ImportError:
|
||||
if sys.version_info < (3, 8) and not _IS_PYPY:
|
||||
print(
|
||||
(
|
||||
"The typed_ast package is required but not installed.\n"
|
||||
"You can upgrade to Python 3.8+ or install typed_ast with\n"
|
||||
"`python3 -m pip install typed-ast`."
|
||||
),
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
ast3 = ast
|
||||
|
||||
|
||||
PY2_HINT: Final = "Python 2 support was removed in version 22.0."
|
||||
|
||||
|
||||
class InvalidInput(ValueError):
|
||||
"""Raised when input source code fails all parse attempts."""
|
||||
|
||||
|
||||
def get_grammars(target_versions: Set[TargetVersion]) -> List[Grammar]:
|
||||
def get_grammars(target_versions: set[TargetVersion]) -> list[Grammar]:
|
||||
if not target_versions:
|
||||
# No target_version specified, so try all grammars.
|
||||
return [
|
||||
# Python 3.7-3.9
|
||||
pygram.python_grammar_no_print_statement_no_exec_statement_async_keywords,
|
||||
pygram.python_grammar_async_keywords,
|
||||
# Python 3.0-3.6
|
||||
pygram.python_grammar_no_print_statement_no_exec_statement,
|
||||
pygram.python_grammar,
|
||||
# Python 3.10+
|
||||
pygram.python_grammar_soft_keywords,
|
||||
]
|
||||
@ -66,12 +39,10 @@ def get_grammars(target_versions: Set[TargetVersion]) -> List[Grammar]:
|
||||
target_versions, Feature.ASYNC_IDENTIFIERS
|
||||
) and not supports_feature(target_versions, Feature.PATTERN_MATCHING):
|
||||
# Python 3.7-3.9
|
||||
grammars.append(
|
||||
pygram.python_grammar_no_print_statement_no_exec_statement_async_keywords
|
||||
)
|
||||
grammars.append(pygram.python_grammar_async_keywords)
|
||||
if not supports_feature(target_versions, Feature.ASYNC_KEYWORDS):
|
||||
# Python 3.0-3.6
|
||||
grammars.append(pygram.python_grammar_no_print_statement_no_exec_statement)
|
||||
grammars.append(pygram.python_grammar)
|
||||
if any(Feature.PATTERN_MATCHING in VERSION_TO_FEATURES[v] for v in target_versions):
|
||||
# Python 3.10+
|
||||
grammars.append(pygram.python_grammar_soft_keywords)
|
||||
@ -81,12 +52,20 @@ def get_grammars(target_versions: Set[TargetVersion]) -> List[Grammar]:
|
||||
return grammars
|
||||
|
||||
|
||||
def lib2to3_parse(src_txt: str, target_versions: Iterable[TargetVersion] = ()) -> Node:
|
||||
def lib2to3_parse(
|
||||
src_txt: str, target_versions: Collection[TargetVersion] = ()
|
||||
) -> Node:
|
||||
"""Given a string with source, return the lib2to3 Node."""
|
||||
if not src_txt.endswith("\n"):
|
||||
src_txt += "\n"
|
||||
|
||||
grammars = get_grammars(set(target_versions))
|
||||
if target_versions:
|
||||
max_tv = max(target_versions, key=lambda tv: tv.value)
|
||||
tv_str = f" for target version {max_tv.pretty()}"
|
||||
else:
|
||||
tv_str = ""
|
||||
|
||||
errors = {}
|
||||
for grammar in grammars:
|
||||
drv = driver.Driver(grammar)
|
||||
@ -102,28 +81,20 @@ def lib2to3_parse(src_txt: str, target_versions: Iterable[TargetVersion] = ()) -
|
||||
except IndexError:
|
||||
faulty_line = "<line number missing in source>"
|
||||
errors[grammar.version] = InvalidInput(
|
||||
f"Cannot parse: {lineno}:{column}: {faulty_line}"
|
||||
f"Cannot parse{tv_str}: {lineno}:{column}: {faulty_line}"
|
||||
)
|
||||
|
||||
except TokenError as te:
|
||||
# In edge cases these are raised; and typically don't have a "faulty_line".
|
||||
lineno, column = te.args[1]
|
||||
errors[grammar.version] = InvalidInput(
|
||||
f"Cannot parse: {lineno}:{column}: {te.args[0]}"
|
||||
f"Cannot parse{tv_str}: {lineno}:{column}: {te.args[0]}"
|
||||
)
|
||||
|
||||
else:
|
||||
# Choose the latest version when raising the actual parsing error.
|
||||
assert len(errors) >= 1
|
||||
exc = errors[max(errors)]
|
||||
|
||||
if matches_grammar(src_txt, pygram.python_grammar) or matches_grammar(
|
||||
src_txt, pygram.python_grammar_no_print_statement
|
||||
):
|
||||
original_msg = exc.args[0]
|
||||
msg = f"{original_msg}\n{PY2_HINT}"
|
||||
raise InvalidInput(msg) from None
|
||||
|
||||
raise exc from None
|
||||
|
||||
if isinstance(result, Leaf):
|
||||
@ -147,40 +118,30 @@ def lib2to3_unparse(node: Node) -> str:
|
||||
return code
|
||||
|
||||
|
||||
def parse_single_version(
|
||||
src: str, version: Tuple[int, int], *, type_comments: bool
|
||||
) -> Union[ast.AST, ast3.AST]:
|
||||
class ASTSafetyError(Exception):
|
||||
"""Raised when Black's generated code is not equivalent to the old AST."""
|
||||
|
||||
|
||||
def _parse_single_version(
|
||||
src: str, version: tuple[int, int], *, type_comments: bool
|
||||
) -> ast.AST:
|
||||
filename = "<unknown>"
|
||||
# typed-ast is needed because of feature version limitations in the builtin ast 3.8>
|
||||
if sys.version_info >= (3, 8) and version >= (3,):
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", SyntaxWarning)
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
return ast.parse(
|
||||
src, filename, feature_version=version, type_comments=type_comments
|
||||
)
|
||||
|
||||
if _IS_PYPY:
|
||||
# PyPy 3.7 doesn't support type comment tracking which is not ideal, but there's
|
||||
# not much we can do as typed-ast won't work either.
|
||||
if sys.version_info >= (3, 8):
|
||||
return ast3.parse(src, filename, type_comments=type_comments)
|
||||
else:
|
||||
return ast3.parse(src, filename)
|
||||
else:
|
||||
if type_comments:
|
||||
# Typed-ast is guaranteed to be used here and automatically tracks type
|
||||
# comments separately.
|
||||
return ast3.parse(src, filename, feature_version=version[1])
|
||||
else:
|
||||
return ast.parse(src, filename)
|
||||
|
||||
|
||||
def parse_ast(src: str) -> Union[ast.AST, ast3.AST]:
|
||||
def parse_ast(src: str) -> ast.AST:
|
||||
# TODO: support Python 4+ ;)
|
||||
versions = [(3, minor) for minor in range(3, sys.version_info[1] + 1)]
|
||||
|
||||
first_error = ""
|
||||
for version in sorted(versions, reverse=True):
|
||||
try:
|
||||
return parse_single_version(src, version, type_comments=True)
|
||||
return _parse_single_version(src, version, type_comments=True)
|
||||
except SyntaxError as e:
|
||||
if not first_error:
|
||||
first_error = str(e)
|
||||
@ -188,42 +149,52 @@ def parse_ast(src: str) -> Union[ast.AST, ast3.AST]:
|
||||
# Try to parse without type comments
|
||||
for version in sorted(versions, reverse=True):
|
||||
try:
|
||||
return parse_single_version(src, version, type_comments=False)
|
||||
return _parse_single_version(src, version, type_comments=False)
|
||||
except SyntaxError:
|
||||
pass
|
||||
|
||||
raise SyntaxError(first_error)
|
||||
|
||||
|
||||
ast3_AST: Final[Type[ast3.AST]] = ast3.AST
|
||||
|
||||
|
||||
def _normalize(lineend: str, value: str) -> str:
|
||||
# To normalize, we strip any leading and trailing space from
|
||||
# each line...
|
||||
stripped: List[str] = [i.strip() for i in value.splitlines()]
|
||||
stripped: list[str] = [i.strip() for i in value.splitlines()]
|
||||
normalized = lineend.join(stripped)
|
||||
# ...and remove any blank lines at the beginning and end of
|
||||
# the whole string
|
||||
return normalized.strip()
|
||||
|
||||
|
||||
def stringify_ast(node: Union[ast.AST, ast3.AST], depth: int = 0) -> Iterator[str]:
|
||||
def stringify_ast(node: ast.AST) -> Iterator[str]:
|
||||
"""Simple visitor generating strings to compare ASTs by content."""
|
||||
return _stringify_ast(node, [])
|
||||
|
||||
node = fixup_ast_constants(node)
|
||||
|
||||
yield f"{' ' * depth}{node.__class__.__name__}("
|
||||
def _stringify_ast_with_new_parent(
|
||||
node: ast.AST, parent_stack: list[ast.AST], new_parent: ast.AST
|
||||
) -> Iterator[str]:
|
||||
parent_stack.append(new_parent)
|
||||
yield from _stringify_ast(node, parent_stack)
|
||||
parent_stack.pop()
|
||||
|
||||
|
||||
def _stringify_ast(node: ast.AST, parent_stack: list[ast.AST]) -> Iterator[str]:
|
||||
if (
|
||||
isinstance(node, ast.Constant)
|
||||
and isinstance(node.value, str)
|
||||
and node.kind == "u"
|
||||
):
|
||||
# It's a quirk of history that we strip the u prefix over here. We used to
|
||||
# rewrite the AST nodes for Python version compatibility and we never copied
|
||||
# over the kind
|
||||
node.kind = None
|
||||
|
||||
yield f"{' ' * len(parent_stack)}{node.__class__.__name__}("
|
||||
|
||||
type_ignore_classes: Tuple[Type[Any], ...]
|
||||
for field in sorted(node._fields): # noqa: F402
|
||||
# TypeIgnore will not be present using pypy < 3.8, so need for this
|
||||
if not (_IS_PYPY and sys.version_info < (3, 8)):
|
||||
# TypeIgnore has only one field 'lineno' which breaks this comparison
|
||||
type_ignore_classes = (ast3.TypeIgnore,)
|
||||
if sys.version_info >= (3, 8):
|
||||
type_ignore_classes += (ast.TypeIgnore,)
|
||||
if isinstance(node, type_ignore_classes):
|
||||
if isinstance(node, ast.TypeIgnore):
|
||||
break
|
||||
|
||||
try:
|
||||
@ -231,7 +202,7 @@ def stringify_ast(node: Union[ast.AST, ast3.AST], depth: int = 0) -> Iterator[st
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
yield f"{' ' * (depth+1)}{field}="
|
||||
yield f"{' ' * (len(parent_stack) + 1)}{field}="
|
||||
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
@ -239,51 +210,51 @@ def stringify_ast(node: Union[ast.AST, ast3.AST], depth: int = 0) -> Iterator[st
|
||||
# parentheses and they change the AST.
|
||||
if (
|
||||
field == "targets"
|
||||
and isinstance(node, (ast.Delete, ast3.Delete))
|
||||
and isinstance(item, (ast.Tuple, ast3.Tuple))
|
||||
and isinstance(node, ast.Delete)
|
||||
and isinstance(item, ast.Tuple)
|
||||
):
|
||||
for elt in item.elts:
|
||||
yield from stringify_ast(elt, depth + 2)
|
||||
for elt in _unwrap_tuples(item):
|
||||
yield from _stringify_ast_with_new_parent(
|
||||
elt, parent_stack, node
|
||||
)
|
||||
|
||||
elif isinstance(item, (ast.AST, ast3.AST)):
|
||||
yield from stringify_ast(item, depth + 2)
|
||||
elif isinstance(item, ast.AST):
|
||||
yield from _stringify_ast_with_new_parent(item, parent_stack, node)
|
||||
|
||||
# Note that we are referencing the typed-ast ASTs via global variables and not
|
||||
# direct module attribute accesses because that breaks mypyc. It's probably
|
||||
# something to do with the ast3 variables being marked as Any leading
|
||||
# mypy to think this branch is always taken, leaving the rest of the code
|
||||
# unanalyzed. Tighting up the types for the typed-ast AST types avoids the
|
||||
# mypyc crash.
|
||||
elif isinstance(value, (ast.AST, ast3_AST)):
|
||||
yield from stringify_ast(value, depth + 2)
|
||||
elif isinstance(value, ast.AST):
|
||||
yield from _stringify_ast_with_new_parent(value, parent_stack, node)
|
||||
|
||||
else:
|
||||
normalized: object
|
||||
# Constant strings may be indented across newlines, if they are
|
||||
# docstrings; fold spaces after newlines when comparing. Similarly,
|
||||
# trailing and leading space may be removed.
|
||||
if (
|
||||
isinstance(node, ast.Constant)
|
||||
and field == "value"
|
||||
and isinstance(value, str)
|
||||
and len(parent_stack) >= 2
|
||||
# Any standalone string, ideally this would
|
||||
# exactly match black.nodes.is_docstring
|
||||
and isinstance(parent_stack[-1], ast.Expr)
|
||||
):
|
||||
# Constant strings may be indented across newlines, if they are
|
||||
# docstrings; fold spaces after newlines when comparing. Similarly,
|
||||
# trailing and leading space may be removed.
|
||||
normalized = _normalize("\n", value)
|
||||
elif field == "type_comment" and isinstance(value, str):
|
||||
# Trailing whitespace in type comments is removed.
|
||||
normalized = value.rstrip()
|
||||
else:
|
||||
normalized = value
|
||||
yield f"{' ' * (depth+2)}{normalized!r}, # {value.__class__.__name__}"
|
||||
yield (
|
||||
f"{' ' * (len(parent_stack) + 1)}{normalized!r}, #"
|
||||
f" {value.__class__.__name__}"
|
||||
)
|
||||
|
||||
yield f"{' ' * depth}) # /{node.__class__.__name__}"
|
||||
yield f"{' ' * len(parent_stack)}) # /{node.__class__.__name__}"
|
||||
|
||||
|
||||
def fixup_ast_constants(node: Union[ast.AST, ast3.AST]) -> Union[ast.AST, ast3.AST]:
|
||||
"""Map ast nodes deprecated in 3.8 to Constant."""
|
||||
if isinstance(node, (ast.Str, ast3.Str, ast.Bytes, ast3.Bytes)):
|
||||
return ast.Constant(value=node.s)
|
||||
|
||||
if isinstance(node, (ast.Num, ast3.Num)):
|
||||
return ast.Constant(value=node.n)
|
||||
|
||||
if isinstance(node, (ast.NameConstant, ast3.NameConstant)):
|
||||
return ast.Constant(value=node.value)
|
||||
|
||||
return node
|
||||
def _unwrap_tuples(node: ast.Tuple) -> Iterator[ast.AST]:
|
||||
for elt in node.elts:
|
||||
if isinstance(elt, ast.Tuple):
|
||||
yield from _unwrap_tuples(elt)
|
||||
else:
|
||||
yield elt
|
||||
|
522
src/black/ranges.py
Normal file
522
src/black/ranges.py
Normal file
@ -0,0 +1,522 @@
|
||||
"""Functions related to Black's formatting by line ranges feature."""
|
||||
|
||||
import difflib
|
||||
from collections.abc import Collection, Iterator, Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import Union
|
||||
|
||||
from black.nodes import (
|
||||
LN,
|
||||
STANDALONE_COMMENT,
|
||||
Leaf,
|
||||
Node,
|
||||
Visitor,
|
||||
first_leaf,
|
||||
furthest_ancestor_with_last_leaf,
|
||||
last_leaf,
|
||||
syms,
|
||||
)
|
||||
from blib2to3.pgen2.token import ASYNC, NEWLINE
|
||||
|
||||
|
||||
def parse_line_ranges(line_ranges: Sequence[str]) -> list[tuple[int, int]]:
|
||||
lines: list[tuple[int, int]] = []
|
||||
for lines_str in line_ranges:
|
||||
parts = lines_str.split("-")
|
||||
if len(parts) != 2:
|
||||
raise ValueError(
|
||||
"Incorrect --line-ranges format, expect 'START-END', found"
|
||||
f" {lines_str!r}"
|
||||
)
|
||||
try:
|
||||
start = int(parts[0])
|
||||
end = int(parts[1])
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
"Incorrect --line-ranges value, expect integer ranges, found"
|
||||
f" {lines_str!r}"
|
||||
) from None
|
||||
else:
|
||||
lines.append((start, end))
|
||||
return lines
|
||||
|
||||
|
||||
def is_valid_line_range(lines: tuple[int, int]) -> bool:
|
||||
"""Returns whether the line range is valid."""
|
||||
return not lines or lines[0] <= lines[1]
|
||||
|
||||
|
||||
def sanitized_lines(
|
||||
lines: Collection[tuple[int, int]], src_contents: str
|
||||
) -> Collection[tuple[int, int]]:
|
||||
"""Returns the valid line ranges for the given source.
|
||||
|
||||
This removes ranges that are entirely outside the valid lines.
|
||||
|
||||
Other ranges are normalized so that the start values are at least 1 and the
|
||||
end values are at most the (1-based) index of the last source line.
|
||||
"""
|
||||
if not src_contents:
|
||||
return []
|
||||
good_lines = []
|
||||
src_line_count = src_contents.count("\n")
|
||||
if not src_contents.endswith("\n"):
|
||||
src_line_count += 1
|
||||
for start, end in lines:
|
||||
if start > src_line_count:
|
||||
continue
|
||||
# line-ranges are 1-based
|
||||
start = max(start, 1)
|
||||
if end < start:
|
||||
continue
|
||||
end = min(end, src_line_count)
|
||||
good_lines.append((start, end))
|
||||
return good_lines
|
||||
|
||||
|
||||
def adjusted_lines(
|
||||
lines: Collection[tuple[int, int]],
|
||||
original_source: str,
|
||||
modified_source: str,
|
||||
) -> list[tuple[int, int]]:
|
||||
"""Returns the adjusted line ranges based on edits from the original code.
|
||||
|
||||
This computes the new line ranges by diffing original_source and
|
||||
modified_source, and adjust each range based on how the range overlaps with
|
||||
the diffs.
|
||||
|
||||
Note the diff can contain lines outside of the original line ranges. This can
|
||||
happen when the formatting has to be done in adjacent to maintain consistent
|
||||
local results. For example:
|
||||
|
||||
1. def my_func(arg1, arg2,
|
||||
2. arg3,):
|
||||
3. pass
|
||||
|
||||
If it restricts to line 2-2, it can't simply reformat line 2, it also has
|
||||
to reformat line 1:
|
||||
|
||||
1. def my_func(
|
||||
2. arg1,
|
||||
3. arg2,
|
||||
4. arg3,
|
||||
5. ):
|
||||
6. pass
|
||||
|
||||
In this case, we will expand the line ranges to also include the whole diff
|
||||
block.
|
||||
|
||||
Args:
|
||||
lines: a collection of line ranges.
|
||||
original_source: the original source.
|
||||
modified_source: the modified source.
|
||||
"""
|
||||
lines_mappings = _calculate_lines_mappings(original_source, modified_source)
|
||||
|
||||
new_lines = []
|
||||
# Keep an index of the current search. Since the lines and lines_mappings are
|
||||
# sorted, this makes the search complexity linear.
|
||||
current_mapping_index = 0
|
||||
for start, end in sorted(lines):
|
||||
start_mapping_index = _find_lines_mapping_index(
|
||||
start,
|
||||
lines_mappings,
|
||||
current_mapping_index,
|
||||
)
|
||||
end_mapping_index = _find_lines_mapping_index(
|
||||
end,
|
||||
lines_mappings,
|
||||
start_mapping_index,
|
||||
)
|
||||
current_mapping_index = start_mapping_index
|
||||
if start_mapping_index >= len(lines_mappings) or end_mapping_index >= len(
|
||||
lines_mappings
|
||||
):
|
||||
# Protect against invalid inputs.
|
||||
continue
|
||||
start_mapping = lines_mappings[start_mapping_index]
|
||||
end_mapping = lines_mappings[end_mapping_index]
|
||||
if start_mapping.is_changed_block:
|
||||
# When the line falls into a changed block, expands to the whole block.
|
||||
new_start = start_mapping.modified_start
|
||||
else:
|
||||
new_start = (
|
||||
start - start_mapping.original_start + start_mapping.modified_start
|
||||
)
|
||||
if end_mapping.is_changed_block:
|
||||
# When the line falls into a changed block, expands to the whole block.
|
||||
new_end = end_mapping.modified_end
|
||||
else:
|
||||
new_end = end - end_mapping.original_start + end_mapping.modified_start
|
||||
new_range = (new_start, new_end)
|
||||
if is_valid_line_range(new_range):
|
||||
new_lines.append(new_range)
|
||||
return new_lines
|
||||
|
||||
|
||||
def convert_unchanged_lines(src_node: Node, lines: Collection[tuple[int, int]]) -> None:
|
||||
"""Converts unchanged lines to STANDALONE_COMMENT.
|
||||
|
||||
The idea is similar to how `# fmt: on/off` is implemented. It also converts the
|
||||
nodes between those markers as a single `STANDALONE_COMMENT` leaf node with
|
||||
the unformatted code as its value. `STANDALONE_COMMENT` is a "fake" token
|
||||
that will be formatted as-is with its prefix normalized.
|
||||
|
||||
Here we perform two passes:
|
||||
|
||||
1. Visit the top-level statements, and convert them to a single
|
||||
`STANDALONE_COMMENT` when unchanged. This speeds up formatting when some
|
||||
of the top-level statements aren't changed.
|
||||
2. Convert unchanged "unwrapped lines" to `STANDALONE_COMMENT` nodes line by
|
||||
line. "unwrapped lines" are divided by the `NEWLINE` token. e.g. a
|
||||
multi-line statement is *one* "unwrapped line" that ends with `NEWLINE`,
|
||||
even though this statement itself can span multiple lines, and the
|
||||
tokenizer only sees the last '\n' as the `NEWLINE` token.
|
||||
|
||||
NOTE: During pass (2), comment prefixes and indentations are ALWAYS
|
||||
normalized even when the lines aren't changed. This is fixable by moving
|
||||
more formatting to pass (1). However, it's hard to get it correct when
|
||||
incorrect indentations are used. So we defer this to future optimizations.
|
||||
"""
|
||||
lines_set: set[int] = set()
|
||||
for start, end in lines:
|
||||
lines_set.update(range(start, end + 1))
|
||||
visitor = _TopLevelStatementsVisitor(lines_set)
|
||||
_ = list(visitor.visit(src_node)) # Consume all results.
|
||||
_convert_unchanged_line_by_line(src_node, lines_set)
|
||||
|
||||
|
||||
def _contains_standalone_comment(node: LN) -> bool:
|
||||
if isinstance(node, Leaf):
|
||||
return node.type == STANDALONE_COMMENT
|
||||
else:
|
||||
for child in node.children:
|
||||
if _contains_standalone_comment(child):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class _TopLevelStatementsVisitor(Visitor[None]):
|
||||
"""
|
||||
A node visitor that converts unchanged top-level statements to
|
||||
STANDALONE_COMMENT.
|
||||
|
||||
This is used in addition to _convert_unchanged_line_by_line, to
|
||||
speed up formatting when there are unchanged top-level
|
||||
classes/functions/statements.
|
||||
"""
|
||||
|
||||
def __init__(self, lines_set: set[int]):
|
||||
self._lines_set = lines_set
|
||||
|
||||
def visit_simple_stmt(self, node: Node) -> Iterator[None]:
|
||||
# This is only called for top-level statements, since `visit_suite`
|
||||
# won't visit its children nodes.
|
||||
yield from []
|
||||
newline_leaf = last_leaf(node)
|
||||
if not newline_leaf:
|
||||
return
|
||||
assert (
|
||||
newline_leaf.type == NEWLINE
|
||||
), f"Unexpectedly found leaf.type={newline_leaf.type}"
|
||||
# We need to find the furthest ancestor with the NEWLINE as the last
|
||||
# leaf, since a `suite` can simply be a `simple_stmt` when it puts
|
||||
# its body on the same line. Example: `if cond: pass`.
|
||||
ancestor = furthest_ancestor_with_last_leaf(newline_leaf)
|
||||
if not _get_line_range(ancestor).intersection(self._lines_set):
|
||||
_convert_node_to_standalone_comment(ancestor)
|
||||
|
||||
def visit_suite(self, node: Node) -> Iterator[None]:
|
||||
yield from []
|
||||
# If there is a STANDALONE_COMMENT node, it means parts of the node tree
|
||||
# have fmt on/off/skip markers. Those STANDALONE_COMMENT nodes can't
|
||||
# be simply converted by calling str(node). So we just don't convert
|
||||
# here.
|
||||
if _contains_standalone_comment(node):
|
||||
return
|
||||
# Find the semantic parent of this suite. For `async_stmt` and
|
||||
# `async_funcdef`, the ASYNC token is defined on a separate level by the
|
||||
# grammar.
|
||||
semantic_parent = node.parent
|
||||
if semantic_parent is not None:
|
||||
if (
|
||||
semantic_parent.prev_sibling is not None
|
||||
and semantic_parent.prev_sibling.type == ASYNC
|
||||
):
|
||||
semantic_parent = semantic_parent.parent
|
||||
if semantic_parent is not None and not _get_line_range(
|
||||
semantic_parent
|
||||
).intersection(self._lines_set):
|
||||
_convert_node_to_standalone_comment(semantic_parent)
|
||||
|
||||
|
||||
def _convert_unchanged_line_by_line(node: Node, lines_set: set[int]) -> None:
|
||||
"""Converts unchanged to STANDALONE_COMMENT line by line."""
|
||||
for leaf in node.leaves():
|
||||
if leaf.type != NEWLINE:
|
||||
# We only consider "unwrapped lines", which are divided by the NEWLINE
|
||||
# token.
|
||||
continue
|
||||
if leaf.parent and leaf.parent.type == syms.match_stmt:
|
||||
# The `suite` node is defined as:
|
||||
# match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT
|
||||
# Here we need to check `subject_expr`. The `case_block+` will be
|
||||
# checked by their own NEWLINEs.
|
||||
nodes_to_ignore: list[LN] = []
|
||||
prev_sibling = leaf.prev_sibling
|
||||
while prev_sibling:
|
||||
nodes_to_ignore.insert(0, prev_sibling)
|
||||
prev_sibling = prev_sibling.prev_sibling
|
||||
if not _get_line_range(nodes_to_ignore).intersection(lines_set):
|
||||
_convert_nodes_to_standalone_comment(nodes_to_ignore, newline=leaf)
|
||||
elif leaf.parent and leaf.parent.type == syms.suite:
|
||||
# The `suite` node is defined as:
|
||||
# suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT
|
||||
# We will check `simple_stmt` and `stmt+` separately against the lines set
|
||||
parent_sibling = leaf.parent.prev_sibling
|
||||
nodes_to_ignore = []
|
||||
while parent_sibling and not parent_sibling.type == syms.suite:
|
||||
# NOTE: Multiple suite nodes can exist as siblings in e.g. `if_stmt`.
|
||||
nodes_to_ignore.insert(0, parent_sibling)
|
||||
parent_sibling = parent_sibling.prev_sibling
|
||||
# Special case for `async_stmt` and `async_funcdef` where the ASYNC
|
||||
# token is on the grandparent node.
|
||||
grandparent = leaf.parent.parent
|
||||
if (
|
||||
grandparent is not None
|
||||
and grandparent.prev_sibling is not None
|
||||
and grandparent.prev_sibling.type == ASYNC
|
||||
):
|
||||
nodes_to_ignore.insert(0, grandparent.prev_sibling)
|
||||
if not _get_line_range(nodes_to_ignore).intersection(lines_set):
|
||||
_convert_nodes_to_standalone_comment(nodes_to_ignore, newline=leaf)
|
||||
else:
|
||||
ancestor = furthest_ancestor_with_last_leaf(leaf)
|
||||
# Consider multiple decorators as a whole block, as their
|
||||
# newlines have different behaviors than the rest of the grammar.
|
||||
if (
|
||||
ancestor.type == syms.decorator
|
||||
and ancestor.parent
|
||||
and ancestor.parent.type == syms.decorators
|
||||
):
|
||||
ancestor = ancestor.parent
|
||||
if not _get_line_range(ancestor).intersection(lines_set):
|
||||
_convert_node_to_standalone_comment(ancestor)
|
||||
|
||||
|
||||
def _convert_node_to_standalone_comment(node: LN) -> None:
|
||||
"""Convert node to STANDALONE_COMMENT by modifying the tree inline."""
|
||||
parent = node.parent
|
||||
if not parent:
|
||||
return
|
||||
first = first_leaf(node)
|
||||
last = last_leaf(node)
|
||||
if not first or not last:
|
||||
return
|
||||
if first is last:
|
||||
# This can happen on the following edge cases:
|
||||
# 1. A block of `# fmt: off/on` code except the `# fmt: on` is placed
|
||||
# on the end of the last line instead of on a new line.
|
||||
# 2. A single backslash on its own line followed by a comment line.
|
||||
# Ideally we don't want to format them when not requested, but fixing
|
||||
# isn't easy. These cases are also badly formatted code, so it isn't
|
||||
# too bad we reformat them.
|
||||
return
|
||||
# The prefix contains comments and indentation whitespaces. They are
|
||||
# reformatted accordingly to the correct indentation level.
|
||||
# This also means the indentation will be changed on the unchanged lines, and
|
||||
# this is actually required to not break incremental reformatting.
|
||||
prefix = first.prefix
|
||||
first.prefix = ""
|
||||
index = node.remove()
|
||||
if index is not None:
|
||||
# Remove the '\n', as STANDALONE_COMMENT will have '\n' appended when
|
||||
# generating the formatted code.
|
||||
value = str(node)[:-1]
|
||||
parent.insert_child(
|
||||
index,
|
||||
Leaf(
|
||||
STANDALONE_COMMENT,
|
||||
value,
|
||||
prefix=prefix,
|
||||
fmt_pass_converted_first_leaf=first,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _convert_nodes_to_standalone_comment(nodes: Sequence[LN], *, newline: Leaf) -> None:
|
||||
"""Convert nodes to STANDALONE_COMMENT by modifying the tree inline."""
|
||||
if not nodes:
|
||||
return
|
||||
parent = nodes[0].parent
|
||||
first = first_leaf(nodes[0])
|
||||
if not parent or not first:
|
||||
return
|
||||
prefix = first.prefix
|
||||
first.prefix = ""
|
||||
value = "".join(str(node) for node in nodes)
|
||||
# The prefix comment on the NEWLINE leaf is the trailing comment of the statement.
|
||||
if newline.prefix:
|
||||
value += newline.prefix
|
||||
newline.prefix = ""
|
||||
index = nodes[0].remove()
|
||||
for node in nodes[1:]:
|
||||
node.remove()
|
||||
if index is not None:
|
||||
parent.insert_child(
|
||||
index,
|
||||
Leaf(
|
||||
STANDALONE_COMMENT,
|
||||
value,
|
||||
prefix=prefix,
|
||||
fmt_pass_converted_first_leaf=first,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _leaf_line_end(leaf: Leaf) -> int:
|
||||
"""Returns the line number of the leaf node's last line."""
|
||||
if leaf.type == NEWLINE:
|
||||
return leaf.lineno
|
||||
else:
|
||||
# Leaf nodes like multiline strings can occupy multiple lines.
|
||||
return leaf.lineno + str(leaf).count("\n")
|
||||
|
||||
|
||||
def _get_line_range(node_or_nodes: Union[LN, list[LN]]) -> set[int]:
|
||||
"""Returns the line range of this node or list of nodes."""
|
||||
if isinstance(node_or_nodes, list):
|
||||
nodes = node_or_nodes
|
||||
if not nodes:
|
||||
return set()
|
||||
first = first_leaf(nodes[0])
|
||||
last = last_leaf(nodes[-1])
|
||||
if first and last:
|
||||
line_start = first.lineno
|
||||
line_end = _leaf_line_end(last)
|
||||
return set(range(line_start, line_end + 1))
|
||||
else:
|
||||
return set()
|
||||
else:
|
||||
node = node_or_nodes
|
||||
if isinstance(node, Leaf):
|
||||
return set(range(node.lineno, _leaf_line_end(node) + 1))
|
||||
else:
|
||||
first = first_leaf(node)
|
||||
last = last_leaf(node)
|
||||
if first and last:
|
||||
return set(range(first.lineno, _leaf_line_end(last) + 1))
|
||||
else:
|
||||
return set()
|
||||
|
||||
|
||||
@dataclass
|
||||
class _LinesMapping:
|
||||
"""1-based lines mapping from original source to modified source.
|
||||
|
||||
Lines [original_start, original_end] from original source
|
||||
are mapped to [modified_start, modified_end].
|
||||
|
||||
The ranges are inclusive on both ends.
|
||||
"""
|
||||
|
||||
original_start: int
|
||||
original_end: int
|
||||
modified_start: int
|
||||
modified_end: int
|
||||
# Whether this range corresponds to a changed block, or an unchanged block.
|
||||
is_changed_block: bool
|
||||
|
||||
|
||||
def _calculate_lines_mappings(
|
||||
original_source: str,
|
||||
modified_source: str,
|
||||
) -> Sequence[_LinesMapping]:
|
||||
"""Returns a sequence of _LinesMapping by diffing the sources.
|
||||
|
||||
For example, given the following diff:
|
||||
import re
|
||||
- def func(arg1,
|
||||
- arg2, arg3):
|
||||
+ def func(arg1, arg2, arg3):
|
||||
pass
|
||||
It returns the following mappings:
|
||||
original -> modified
|
||||
(1, 1) -> (1, 1), is_changed_block=False (the "import re" line)
|
||||
(2, 3) -> (2, 2), is_changed_block=True (the diff)
|
||||
(4, 4) -> (3, 3), is_changed_block=False (the "pass" line)
|
||||
|
||||
You can think of this visually as if it brings up a side-by-side diff, and tries
|
||||
to map the line ranges from the left side to the right side:
|
||||
|
||||
(1, 1)->(1, 1) 1. import re 1. import re
|
||||
(2, 3)->(2, 2) 2. def func(arg1, 2. def func(arg1, arg2, arg3):
|
||||
3. arg2, arg3):
|
||||
(4, 4)->(3, 3) 4. pass 3. pass
|
||||
|
||||
Args:
|
||||
original_source: the original source.
|
||||
modified_source: the modified source.
|
||||
"""
|
||||
matcher = difflib.SequenceMatcher(
|
||||
None,
|
||||
original_source.splitlines(keepends=True),
|
||||
modified_source.splitlines(keepends=True),
|
||||
)
|
||||
matching_blocks = matcher.get_matching_blocks()
|
||||
lines_mappings: list[_LinesMapping] = []
|
||||
# matching_blocks is a sequence of "same block of code ranges", see
|
||||
# https://docs.python.org/3/library/difflib.html#difflib.SequenceMatcher.get_matching_blocks
|
||||
# Each block corresponds to a _LinesMapping with is_changed_block=False,
|
||||
# and the ranges between two blocks corresponds to a _LinesMapping with
|
||||
# is_changed_block=True,
|
||||
# NOTE: matching_blocks is 0-based, but _LinesMapping is 1-based.
|
||||
for i, block in enumerate(matching_blocks):
|
||||
if i == 0:
|
||||
if block.a != 0 or block.b != 0:
|
||||
lines_mappings.append(
|
||||
_LinesMapping(
|
||||
original_start=1,
|
||||
original_end=block.a,
|
||||
modified_start=1,
|
||||
modified_end=block.b,
|
||||
is_changed_block=False,
|
||||
)
|
||||
)
|
||||
else:
|
||||
previous_block = matching_blocks[i - 1]
|
||||
lines_mappings.append(
|
||||
_LinesMapping(
|
||||
original_start=previous_block.a + previous_block.size + 1,
|
||||
original_end=block.a,
|
||||
modified_start=previous_block.b + previous_block.size + 1,
|
||||
modified_end=block.b,
|
||||
is_changed_block=True,
|
||||
)
|
||||
)
|
||||
if i < len(matching_blocks) - 1:
|
||||
lines_mappings.append(
|
||||
_LinesMapping(
|
||||
original_start=block.a + 1,
|
||||
original_end=block.a + block.size,
|
||||
modified_start=block.b + 1,
|
||||
modified_end=block.b + block.size,
|
||||
is_changed_block=False,
|
||||
)
|
||||
)
|
||||
return lines_mappings
|
||||
|
||||
|
||||
def _find_lines_mapping_index(
|
||||
original_line: int,
|
||||
lines_mappings: Sequence[_LinesMapping],
|
||||
start_index: int,
|
||||
) -> int:
|
||||
"""Returns the original index of the lines mappings for the original line."""
|
||||
index = start_index
|
||||
while index < len(lines_mappings):
|
||||
mapping = lines_mappings[index]
|
||||
if mapping.original_start <= original_line <= mapping.original_end:
|
||||
return index
|
||||
index += 1
|
||||
return index
|
@ -1,6 +1,7 @@
|
||||
"""
|
||||
Summarize Black runs to users.
|
||||
"""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
0
src/black/resources/__init__.py
Normal file
0
src/black/resources/__init__.py
Normal file
148
src/black/resources/black.schema.json
Normal file
148
src/black/resources/black.schema.json
Normal file
@ -0,0 +1,148 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://github.com/psf/black/blob/main/src/black/resources/black.schema.json",
|
||||
"$comment": "tool.black table in pyproject.toml",
|
||||
"type": "object",
|
||||
"additionalProperties": false,
|
||||
"properties": {
|
||||
"code": {
|
||||
"type": "string",
|
||||
"description": "Format the code passed in as a string."
|
||||
},
|
||||
"line-length": {
|
||||
"type": "integer",
|
||||
"description": "How many characters per line to allow.",
|
||||
"default": 88
|
||||
},
|
||||
"target-version": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"enum": [
|
||||
"py33",
|
||||
"py34",
|
||||
"py35",
|
||||
"py36",
|
||||
"py37",
|
||||
"py38",
|
||||
"py39",
|
||||
"py310",
|
||||
"py311",
|
||||
"py312",
|
||||
"py313"
|
||||
]
|
||||
},
|
||||
"description": "Python versions that should be supported by Black's output. You should include all versions that your code supports. By default, Black will infer target versions from the project metadata in pyproject.toml. If this does not yield conclusive results, Black will use per-file auto-detection."
|
||||
},
|
||||
"pyi": {
|
||||
"type": "boolean",
|
||||
"description": "Format all input files like typing stubs regardless of file extension. This is useful when piping source on standard input.",
|
||||
"default": false
|
||||
},
|
||||
"ipynb": {
|
||||
"type": "boolean",
|
||||
"description": "Format all input files like Jupyter Notebooks regardless of file extension. This is useful when piping source on standard input.",
|
||||
"default": false
|
||||
},
|
||||
"python-cell-magics": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "When processing Jupyter Notebooks, add the given magic to the list of known python-magics (capture, prun, pypy, python, python3, time, timeit). Useful for formatting cells with custom python magics."
|
||||
},
|
||||
"skip-source-first-line": {
|
||||
"type": "boolean",
|
||||
"description": "Skip the first line of the source code.",
|
||||
"default": false
|
||||
},
|
||||
"skip-string-normalization": {
|
||||
"type": "boolean",
|
||||
"description": "Don't normalize string quotes or prefixes.",
|
||||
"default": false
|
||||
},
|
||||
"skip-magic-trailing-comma": {
|
||||
"type": "boolean",
|
||||
"description": "Don't use trailing commas as a reason to split lines.",
|
||||
"default": false
|
||||
},
|
||||
"preview": {
|
||||
"type": "boolean",
|
||||
"description": "Enable potentially disruptive style changes that may be added to Black's main functionality in the next major release.",
|
||||
"default": false
|
||||
},
|
||||
"unstable": {
|
||||
"type": "boolean",
|
||||
"description": "Enable potentially disruptive style changes that have known bugs or are not currently expected to make it into the stable style Black's next major release. Implies --preview.",
|
||||
"default": false
|
||||
},
|
||||
"enable-unstable-feature": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"enum": [
|
||||
"string_processing",
|
||||
"hug_parens_with_braces_and_square_brackets",
|
||||
"wrap_long_dict_values_in_parens",
|
||||
"multiline_string_handling",
|
||||
"always_one_newline_after_import",
|
||||
"fix_fmt_skip_in_one_liners"
|
||||
]
|
||||
},
|
||||
"description": "Enable specific features included in the `--unstable` style. Requires `--preview`. No compatibility guarantees are provided on the behavior or existence of any unstable features."
|
||||
},
|
||||
"check": {
|
||||
"type": "boolean",
|
||||
"description": "Don't write the files back, just return the status. Return code 0 means nothing would change. Return code 1 means some files would be reformatted. Return code 123 means there was an internal error.",
|
||||
"default": false
|
||||
},
|
||||
"diff": {
|
||||
"type": "boolean",
|
||||
"description": "Don't write the files back, just output a diff to indicate what changes Black would've made. They are printed to stdout so capturing them is simple.",
|
||||
"default": false
|
||||
},
|
||||
"color": {
|
||||
"type": "boolean",
|
||||
"description": "Show (or do not show) colored diff. Only applies when --diff is given.",
|
||||
"default": false
|
||||
},
|
||||
"fast": {
|
||||
"type": "boolean",
|
||||
"description": "By default, Black performs an AST safety check after formatting your code. The --fast flag turns off this check and the --safe flag explicitly enables it. [default: --safe]",
|
||||
"default": false
|
||||
},
|
||||
"required-version": {
|
||||
"type": "string",
|
||||
"description": "Require a specific version of Black to be running. This is useful for ensuring that all contributors to your project are using the same version, because different versions of Black may format code a little differently. This option can be set in a configuration file for consistent results across environments."
|
||||
},
|
||||
"exclude": {
|
||||
"type": "string",
|
||||
"description": "A regular expression that matches files and directories that should be excluded on recursive searches. An empty value means no paths are excluded. Use forward slashes for directories on all platforms (Windows, too). By default, Black also ignores all paths listed in .gitignore. Changing this value will override all default exclusions. [default: /(\\.direnv|\\.eggs|\\.git|\\.hg|\\.ipynb_checkpoints|\\.mypy_cache|\\.nox|\\.pytest_cache|\\.ruff_cache|\\.tox|\\.svn|\\.venv|\\.vscode|__pypackages__|_build|buck-out|build|dist|venv)/]"
|
||||
},
|
||||
"extend-exclude": {
|
||||
"type": "string",
|
||||
"description": "Like --exclude, but adds additional files and directories on top of the default values instead of overriding them."
|
||||
},
|
||||
"force-exclude": {
|
||||
"type": "string",
|
||||
"description": "Like --exclude, but files and directories matching this regex will be excluded even when they are passed explicitly as arguments. This is useful when invoking Black programmatically on changed files, such as in a pre-commit hook or editor plugin."
|
||||
},
|
||||
"include": {
|
||||
"type": "string",
|
||||
"description": "A regular expression that matches files and directories that should be included on recursive searches. An empty value means all files are included regardless of the name. Use forward slashes for directories on all platforms (Windows, too). Overrides all exclusions, including from .gitignore and command line options.",
|
||||
"default": "(\\.pyi?|\\.ipynb)$"
|
||||
},
|
||||
"workers": {
|
||||
"type": "integer",
|
||||
"description": "When Black formats multiple files, it may use a process pool to speed up formatting. This option controls the number of parallel workers. This can also be specified via the BLACK_NUM_WORKERS environment variable. Defaults to the number of CPUs in the system."
|
||||
},
|
||||
"quiet": {
|
||||
"type": "boolean",
|
||||
"description": "Stop emitting all non-critical output. Error messages will still be emitted (which can silenced by 2>/dev/null).",
|
||||
"default": false
|
||||
},
|
||||
"verbose": {
|
||||
"type": "boolean",
|
||||
"description": "Emit messages about files that were not changed or were ignored due to exclusion patterns. If Black is using a configuration file, a message detailing which one it is using will be emitted.",
|
||||
"default": false
|
||||
}
|
||||
}
|
||||
}
|
@ -2,6 +2,7 @@
|
||||
|
||||
See https://doc.rust-lang.org/book/ch09-00-error-handling.html.
|
||||
"""
|
||||
|
||||
from typing import Generic, TypeVar, Union
|
||||
|
||||
T = TypeVar("T")
|
||||
|
15
src/black/schema.py
Normal file
15
src/black/schema.py
Normal file
@ -0,0 +1,15 @@
|
||||
import importlib.resources
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
|
||||
def get_schema(tool_name: str = "black") -> Any:
|
||||
"""Get the stored complete schema for black's settings."""
|
||||
assert tool_name == "black", "Only black is supported."
|
||||
|
||||
pkg = "black.resources"
|
||||
fname = "black.schema.json"
|
||||
|
||||
schema = importlib.resources.files(pkg).joinpath(fname)
|
||||
with schema.open(encoding="utf-8") as f:
|
||||
return json.load(f)
|
@ -5,22 +5,16 @@
|
||||
import re
|
||||
import sys
|
||||
from functools import lru_cache
|
||||
from typing import List, Match, Pattern
|
||||
|
||||
from blib2to3.pytree import Leaf
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Final
|
||||
else:
|
||||
from typing import Final
|
||||
from re import Match, Pattern
|
||||
from typing import Final
|
||||
|
||||
from black._width_table import WIDTH_TABLE
|
||||
from blib2to3.pytree import Leaf
|
||||
|
||||
STRING_PREFIX_CHARS: Final = "furbFURB" # All possible string prefix characters.
|
||||
STRING_PREFIX_RE: Final = re.compile(
|
||||
r"^([" + STRING_PREFIX_CHARS + r"]*)(.*)$", re.DOTALL
|
||||
)
|
||||
FIRST_NON_WHITESPACE_RE: Final = re.compile(r"\s*\t+\s*(\S)")
|
||||
UNICODE_ESCAPE_RE: Final = re.compile(
|
||||
r"(?P<backslashes>\\+)(?P<body>"
|
||||
r"(u(?P<u>[a-fA-F0-9]{4}))" # Character with 16-bit hex value xxxx
|
||||
@ -50,32 +44,28 @@ def has_triple_quotes(string: str) -> bool:
|
||||
return raw_string[:3] in {'"""', "'''"}
|
||||
|
||||
|
||||
def lines_with_leading_tabs_expanded(s: str) -> List[str]:
|
||||
def lines_with_leading_tabs_expanded(s: str) -> list[str]:
|
||||
"""
|
||||
Splits string into lines and expands only leading tabs (following the normal
|
||||
Python rules)
|
||||
"""
|
||||
lines = []
|
||||
for line in s.splitlines():
|
||||
# Find the index of the first non-whitespace character after a string of
|
||||
# whitespace that includes at least one tab
|
||||
match = FIRST_NON_WHITESPACE_RE.match(line)
|
||||
if match:
|
||||
first_non_whitespace_idx = match.start(1)
|
||||
|
||||
lines.append(
|
||||
line[:first_non_whitespace_idx].expandtabs()
|
||||
+ line[first_non_whitespace_idx:]
|
||||
)
|
||||
else:
|
||||
stripped_line = line.lstrip()
|
||||
if not stripped_line or stripped_line == line:
|
||||
lines.append(line)
|
||||
else:
|
||||
prefix_length = len(line) - len(stripped_line)
|
||||
prefix = line[:prefix_length].expandtabs()
|
||||
lines.append(prefix + stripped_line)
|
||||
if s.endswith("\n"):
|
||||
lines.append("")
|
||||
return lines
|
||||
|
||||
|
||||
def fix_docstring(docstring: str, prefix: str) -> str:
|
||||
def fix_multiline_docstring(docstring: str, prefix: str) -> str:
|
||||
# https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation
|
||||
if not docstring:
|
||||
return ""
|
||||
assert docstring, "INTERNAL ERROR: Multiline docstrings cannot be empty"
|
||||
lines = lines_with_leading_tabs_expanded(docstring)
|
||||
# Determine minimum indentation (first line doesn't count):
|
||||
indent = sys.maxsize
|
||||
@ -179,8 +169,7 @@ def _cached_compile(pattern: str) -> Pattern[str]:
|
||||
def normalize_string_quotes(s: str) -> str:
|
||||
"""Prefer double quotes but only if it doesn't cause more escaping.
|
||||
|
||||
Adds or removes backslashes as appropriate. Doesn't parse and fix
|
||||
strings nested in f-strings.
|
||||
Adds or removes backslashes as appropriate.
|
||||
"""
|
||||
value = s.lstrip(STRING_PREFIX_CHARS)
|
||||
if value[:3] == '"""':
|
||||
@ -196,8 +185,7 @@ def normalize_string_quotes(s: str) -> str:
|
||||
orig_quote = "'"
|
||||
new_quote = '"'
|
||||
first_quote_pos = s.find(orig_quote)
|
||||
if first_quote_pos == -1:
|
||||
return s # There's an internal error
|
||||
assert first_quote_pos != -1, f"INTERNAL ERROR: Malformed string {s!r}"
|
||||
|
||||
prefix = s[:first_quote_pos]
|
||||
unescaped_new_quote = _cached_compile(rf"(([^\\]|^)(\\\\)*){new_quote}")
|
||||
@ -221,6 +209,7 @@ def normalize_string_quotes(s: str) -> str:
|
||||
s = f"{prefix}{orig_quote}{body}{orig_quote}"
|
||||
new_body = sub_twice(escaped_orig_quote, rf"\1\2{orig_quote}", new_body)
|
||||
new_body = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_body)
|
||||
|
||||
if "f" in prefix.casefold():
|
||||
matches = re.findall(
|
||||
r"""
|
||||
@ -250,6 +239,71 @@ def normalize_string_quotes(s: str) -> str:
|
||||
return f"{prefix}{new_quote}{new_body}{new_quote}"
|
||||
|
||||
|
||||
def normalize_fstring_quotes(
|
||||
quote: str,
|
||||
middles: list[Leaf],
|
||||
is_raw_fstring: bool,
|
||||
) -> tuple[list[Leaf], str]:
|
||||
"""Prefer double quotes but only if it doesn't cause more escaping.
|
||||
|
||||
Adds or removes backslashes as appropriate.
|
||||
"""
|
||||
if quote == '"""':
|
||||
return middles, quote
|
||||
|
||||
elif quote == "'''":
|
||||
new_quote = '"""'
|
||||
elif quote == '"':
|
||||
new_quote = "'"
|
||||
else:
|
||||
new_quote = '"'
|
||||
|
||||
unescaped_new_quote = _cached_compile(rf"(([^\\]|^)(\\\\)*){new_quote}")
|
||||
escaped_new_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){new_quote}")
|
||||
escaped_orig_quote = _cached_compile(rf"([^\\]|^)\\((?:\\\\)*){quote}")
|
||||
if is_raw_fstring:
|
||||
for middle in middles:
|
||||
if unescaped_new_quote.search(middle.value):
|
||||
# There's at least one unescaped new_quote in this raw string
|
||||
# so converting is impossible
|
||||
return middles, quote
|
||||
|
||||
# Do not introduce or remove backslashes in raw strings, just use double quote
|
||||
return middles, '"'
|
||||
|
||||
new_segments = []
|
||||
for middle in middles:
|
||||
segment = middle.value
|
||||
# remove unnecessary escapes
|
||||
new_segment = sub_twice(escaped_new_quote, rf"\1\2{new_quote}", segment)
|
||||
if segment != new_segment:
|
||||
# Consider the string without unnecessary escapes as the original
|
||||
middle.value = new_segment
|
||||
|
||||
new_segment = sub_twice(escaped_orig_quote, rf"\1\2{quote}", new_segment)
|
||||
new_segment = sub_twice(unescaped_new_quote, rf"\1\\{new_quote}", new_segment)
|
||||
new_segments.append(new_segment)
|
||||
|
||||
if new_quote == '"""' and new_segments[-1].endswith('"'):
|
||||
# edge case:
|
||||
new_segments[-1] = new_segments[-1][:-1] + '\\"'
|
||||
|
||||
for middle, new_segment in zip(middles, new_segments):
|
||||
orig_escape_count = middle.value.count("\\")
|
||||
new_escape_count = new_segment.count("\\")
|
||||
|
||||
if new_escape_count > orig_escape_count:
|
||||
return middles, quote # Do not introduce more escaping
|
||||
|
||||
if new_escape_count == orig_escape_count and quote == '"':
|
||||
return middles, quote # Prefer double quotes
|
||||
|
||||
for middle, new_segment in zip(middles, new_segments):
|
||||
middle.value = new_segment
|
||||
|
||||
return middles, new_quote
|
||||
|
||||
|
||||
def normalize_unicode_escape_sequences(leaf: Leaf) -> None:
|
||||
"""Replace hex codes in Unicode escape sequences with lowercase representation."""
|
||||
text = leaf.value
|
||||
|
@ -1,32 +1,13 @@
|
||||
"""
|
||||
String transformers that can split and merge strings.
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable, Collection, Iterable, Iterator, Sequence
|
||||
from dataclasses import dataclass
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
ClassVar,
|
||||
Collection,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 8):
|
||||
from typing_extensions import Final, Literal
|
||||
else:
|
||||
from typing import Literal, Final
|
||||
from typing import Any, ClassVar, Final, Literal, Optional, TypeVar, Union
|
||||
|
||||
from mypy_extensions import trait
|
||||
|
||||
@ -71,7 +52,7 @@ class CannotTransform(Exception):
|
||||
ParserState = int
|
||||
StringID = int
|
||||
TResult = Result[T, CannotTransform] # (T)ransform Result
|
||||
TMatchResult = TResult[List[Index]]
|
||||
TMatchResult = TResult[list[Index]]
|
||||
|
||||
SPLIT_SAFE_CHARS = frozenset(["\u3001", "\u3002", "\uff0c"]) # East Asian stops
|
||||
|
||||
@ -97,43 +78,30 @@ def hug_power_op(
|
||||
else:
|
||||
raise CannotTransform("No doublestar token was found in the line.")
|
||||
|
||||
def is_simple_lookup(index: int, step: Literal[1, -1]) -> bool:
|
||||
def is_simple_lookup(index: int, kind: Literal[1, -1]) -> bool:
|
||||
# Brackets and parentheses indicate calls, subscripts, etc. ...
|
||||
# basically stuff that doesn't count as "simple". Only a NAME lookup
|
||||
# or dotted lookup (eg. NAME.NAME) is OK.
|
||||
if step == -1:
|
||||
disallowed = {token.RPAR, token.RSQB}
|
||||
if kind == -1:
|
||||
return handle_is_simple_look_up_prev(line, index, {token.RPAR, token.RSQB})
|
||||
else:
|
||||
disallowed = {token.LPAR, token.LSQB}
|
||||
return handle_is_simple_lookup_forward(
|
||||
line, index, {token.LPAR, token.LSQB}
|
||||
)
|
||||
|
||||
while 0 <= index < len(line.leaves):
|
||||
current = line.leaves[index]
|
||||
if current.type in disallowed:
|
||||
return False
|
||||
if current.type not in {token.NAME, token.DOT} or current.value == "for":
|
||||
# If the current token isn't disallowed, we'll assume this is simple as
|
||||
# only the disallowed tokens are semantically attached to this lookup
|
||||
# expression we're checking. Also, stop early if we hit the 'for' bit
|
||||
# of a comprehension.
|
||||
return True
|
||||
|
||||
index += step
|
||||
|
||||
return True
|
||||
|
||||
def is_simple_operand(index: int, kind: Literal["base", "exponent"]) -> bool:
|
||||
def is_simple_operand(index: int, kind: Literal[1, -1]) -> bool:
|
||||
# An operand is considered "simple" if's a NAME, a numeric CONSTANT, a simple
|
||||
# lookup (see above), with or without a preceding unary operator.
|
||||
start = line.leaves[index]
|
||||
if start.type in {token.NAME, token.NUMBER}:
|
||||
return is_simple_lookup(index, step=(1 if kind == "exponent" else -1))
|
||||
return is_simple_lookup(index, kind)
|
||||
|
||||
if start.type in {token.PLUS, token.MINUS, token.TILDE}:
|
||||
if line.leaves[index + 1].type in {token.NAME, token.NUMBER}:
|
||||
# step is always one as bases with a preceding unary op will be checked
|
||||
# kind is always one as bases with a preceding unary op will be checked
|
||||
# for simplicity starting from the next token (so it'll hit the check
|
||||
# above).
|
||||
return is_simple_lookup(index + 1, step=1)
|
||||
return is_simple_lookup(index + 1, kind=1)
|
||||
|
||||
return False
|
||||
|
||||
@ -148,9 +116,9 @@ def is_simple_operand(index: int, kind: Literal["base", "exponent"]) -> bool:
|
||||
should_hug = (
|
||||
(0 < idx < len(line.leaves) - 1)
|
||||
and leaf.type == token.DOUBLESTAR
|
||||
and is_simple_operand(idx - 1, kind="base")
|
||||
and is_simple_operand(idx - 1, kind=-1)
|
||||
and line.leaves[idx - 1].value != "lambda"
|
||||
and is_simple_operand(idx + 1, kind="exponent")
|
||||
and is_simple_operand(idx + 1, kind=1)
|
||||
)
|
||||
if should_hug:
|
||||
new_leaf.prefix = ""
|
||||
@ -165,6 +133,75 @@ def is_simple_operand(index: int, kind: Literal["base", "exponent"]) -> bool:
|
||||
yield new_line
|
||||
|
||||
|
||||
def handle_is_simple_look_up_prev(line: Line, index: int, disallowed: set[int]) -> bool:
|
||||
"""
|
||||
Handling the determination of is_simple_lookup for the lines prior to the doublestar
|
||||
token. This is required because of the need to isolate the chained expression
|
||||
to determine the bracket or parenthesis belong to the single expression.
|
||||
"""
|
||||
contains_disallowed = False
|
||||
chain = []
|
||||
|
||||
while 0 <= index < len(line.leaves):
|
||||
current = line.leaves[index]
|
||||
chain.append(current)
|
||||
if not contains_disallowed and current.type in disallowed:
|
||||
contains_disallowed = True
|
||||
if not is_expression_chained(chain):
|
||||
return not contains_disallowed
|
||||
|
||||
index -= 1
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def handle_is_simple_lookup_forward(
|
||||
line: Line, index: int, disallowed: set[int]
|
||||
) -> bool:
|
||||
"""
|
||||
Handling decision is_simple_lookup for the lines behind the doublestar token.
|
||||
This function is simplified to keep consistent with the prior logic and the forward
|
||||
case are more straightforward and do not need to care about chained expressions.
|
||||
"""
|
||||
while 0 <= index < len(line.leaves):
|
||||
current = line.leaves[index]
|
||||
if current.type in disallowed:
|
||||
return False
|
||||
if current.type not in {token.NAME, token.DOT} or (
|
||||
current.type == token.NAME and current.value == "for"
|
||||
):
|
||||
# If the current token isn't disallowed, we'll assume this is simple as
|
||||
# only the disallowed tokens are semantically attached to this lookup
|
||||
# expression we're checking. Also, stop early if we hit the 'for' bit
|
||||
# of a comprehension.
|
||||
return True
|
||||
|
||||
index += 1
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def is_expression_chained(chained_leaves: list[Leaf]) -> bool:
|
||||
"""
|
||||
Function to determine if the variable is a chained call.
|
||||
(e.g., foo.lookup, foo().lookup, (foo.lookup())) will be recognized as chained call)
|
||||
"""
|
||||
if len(chained_leaves) < 2:
|
||||
return True
|
||||
|
||||
current_leaf = chained_leaves[-1]
|
||||
past_leaf = chained_leaves[-2]
|
||||
|
||||
if past_leaf.type == token.NAME:
|
||||
return current_leaf.type in {token.DOT}
|
||||
elif past_leaf.type in {token.RPAR, token.RSQB}:
|
||||
return current_leaf.type in {token.RSQB, token.RPAR}
|
||||
elif past_leaf.type in {token.LPAR, token.LSQB}:
|
||||
return current_leaf.type in {token.NAME, token.LPAR, token.LSQB}
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class StringTransformer(ABC):
|
||||
"""
|
||||
An implementation of the Transformer protocol that relies on its
|
||||
@ -215,7 +252,7 @@ def do_match(self, line: Line) -> TMatchResult:
|
||||
|
||||
@abstractmethod
|
||||
def do_transform(
|
||||
self, line: Line, string_indices: List[int]
|
||||
self, line: Line, string_indices: list[int]
|
||||
) -> Iterator[TResult[Line]]:
|
||||
"""
|
||||
Yields:
|
||||
@ -305,8 +342,8 @@ class CustomSplitMapMixin:
|
||||
the resultant substrings go over the configured max line length.
|
||||
"""
|
||||
|
||||
_Key: ClassVar = Tuple[StringID, str]
|
||||
_CUSTOM_SPLIT_MAP: ClassVar[Dict[_Key, Tuple[CustomSplit, ...]]] = defaultdict(
|
||||
_Key: ClassVar = tuple[StringID, str]
|
||||
_CUSTOM_SPLIT_MAP: ClassVar[dict[_Key, tuple[CustomSplit, ...]]] = defaultdict(
|
||||
tuple
|
||||
)
|
||||
|
||||
@ -330,7 +367,7 @@ def add_custom_splits(
|
||||
key = self._get_key(string)
|
||||
self._CUSTOM_SPLIT_MAP[key] = tuple(custom_splits)
|
||||
|
||||
def pop_custom_splits(self, string: str) -> List[CustomSplit]:
|
||||
def pop_custom_splits(self, string: str) -> list[CustomSplit]:
|
||||
"""Custom Split Map Getter Method
|
||||
|
||||
Returns:
|
||||
@ -393,7 +430,19 @@ def do_match(self, line: Line) -> TMatchResult:
|
||||
and is_valid_index(idx + 1)
|
||||
and LL[idx + 1].type == token.STRING
|
||||
):
|
||||
if not is_part_of_annotation(leaf):
|
||||
# Let's check if the string group contains an inline comment
|
||||
# If we have a comment inline, we don't merge the strings
|
||||
contains_comment = False
|
||||
i = idx
|
||||
while is_valid_index(i):
|
||||
if LL[i].type != token.STRING:
|
||||
break
|
||||
if line.comments_after(LL[i]):
|
||||
contains_comment = True
|
||||
break
|
||||
i += 1
|
||||
|
||||
if not contains_comment and not is_part_of_annotation(leaf):
|
||||
string_indices.append(idx)
|
||||
|
||||
# Advance to the next non-STRING leaf.
|
||||
@ -417,7 +466,7 @@ def do_match(self, line: Line) -> TMatchResult:
|
||||
return TErr("This line has no strings that need merging.")
|
||||
|
||||
def do_transform(
|
||||
self, line: Line, string_indices: List[int]
|
||||
self, line: Line, string_indices: list[int]
|
||||
) -> Iterator[TResult[Line]]:
|
||||
new_line = line
|
||||
|
||||
@ -448,7 +497,7 @@ def do_transform(
|
||||
|
||||
@staticmethod
|
||||
def _remove_backslash_line_continuation_chars(
|
||||
line: Line, string_indices: List[int]
|
||||
line: Line, string_indices: list[int]
|
||||
) -> TResult[Line]:
|
||||
"""
|
||||
Merge strings that were split across multiple lines using
|
||||
@ -489,7 +538,7 @@ def _remove_backslash_line_continuation_chars(
|
||||
return Ok(new_line)
|
||||
|
||||
def _merge_string_group(
|
||||
self, line: Line, string_indices: List[int]
|
||||
self, line: Line, string_indices: list[int]
|
||||
) -> TResult[Line]:
|
||||
"""
|
||||
Merges string groups (i.e. set of adjacent strings).
|
||||
@ -508,7 +557,7 @@ def _merge_string_group(
|
||||
is_valid_index = is_valid_index_factory(LL)
|
||||
|
||||
# A dict of {string_idx: tuple[num_of_strings, string_leaf]}.
|
||||
merged_string_idx_dict: Dict[int, Tuple[int, Leaf]] = {}
|
||||
merged_string_idx_dict: dict[int, tuple[int, Leaf]] = {}
|
||||
for string_idx in string_indices:
|
||||
vresult = self._validate_msg(line, string_idx)
|
||||
if isinstance(vresult, Err):
|
||||
@ -544,8 +593,8 @@ def _merge_string_group(
|
||||
return Ok(new_line)
|
||||
|
||||
def _merge_one_string_group(
|
||||
self, LL: List[Leaf], string_idx: int, is_valid_index: Callable[[int], bool]
|
||||
) -> Tuple[int, Leaf]:
|
||||
self, LL: list[Leaf], string_idx: int, is_valid_index: Callable[[int], bool]
|
||||
) -> tuple[int, Leaf]:
|
||||
"""
|
||||
Merges one string group where the first string in the group is
|
||||
`LL[string_idx]`.
|
||||
@ -581,6 +630,17 @@ def make_naked(string: str, string_prefix: str) -> str:
|
||||
"""
|
||||
assert_is_leaf_string(string)
|
||||
if "f" in string_prefix:
|
||||
f_expressions = [
|
||||
string[span[0] + 1 : span[1] - 1] # +-1 to get rid of curly braces
|
||||
for span in iter_fexpr_spans(string)
|
||||
]
|
||||
debug_expressions_contain_visible_quotes = any(
|
||||
re.search(r".*[\'\"].*(?<![!:=])={1}(?!=)(?![^\s:])", expression)
|
||||
for expression in f_expressions
|
||||
)
|
||||
if not debug_expressions_contain_visible_quotes:
|
||||
# We don't want to toggle visible quotes in debug f-strings, as
|
||||
# that would modify the AST
|
||||
string = _toggle_fexpr_quotes(string, QUOTE)
|
||||
# After quotes toggling, quotes in expressions won't be escaped
|
||||
# because quotes can't be reused in f-strings. So we can simply
|
||||
@ -704,6 +764,8 @@ def _validate_msg(line: Line, string_idx: int) -> TResult[None]:
|
||||
- The set of all string prefixes in the string group is of
|
||||
length greater than one and is not equal to {"", "f"}.
|
||||
- The string group consists of raw strings.
|
||||
- The string group would merge f-strings with different quote types
|
||||
and internal quotes.
|
||||
- The string group is stringified type annotations. We don't want to
|
||||
process stringified type annotations since pyright doesn't support
|
||||
them spanning multiple string values. (NOTE: mypy, pytype, pyre do
|
||||
@ -730,6 +792,8 @@ def _validate_msg(line: Line, string_idx: int) -> TResult[None]:
|
||||
|
||||
i += inc
|
||||
|
||||
QUOTE = line.leaves[string_idx].value[-1]
|
||||
|
||||
num_of_inline_string_comments = 0
|
||||
set_of_prefixes = set()
|
||||
num_of_strings = 0
|
||||
@ -752,6 +816,19 @@ def _validate_msg(line: Line, string_idx: int) -> TResult[None]:
|
||||
|
||||
set_of_prefixes.add(prefix)
|
||||
|
||||
if (
|
||||
"f" in prefix
|
||||
and leaf.value[-1] != QUOTE
|
||||
and (
|
||||
"'" in leaf.value[len(prefix) + 1 : -1]
|
||||
or '"' in leaf.value[len(prefix) + 1 : -1]
|
||||
)
|
||||
):
|
||||
return TErr(
|
||||
"StringMerger does NOT merge f-strings with different quote types"
|
||||
" and internal quotes."
|
||||
)
|
||||
|
||||
if id(leaf) in line.comments:
|
||||
num_of_inline_string_comments += 1
|
||||
if contains_pragma_comment(line.comments[id(leaf)]):
|
||||
@ -780,6 +857,7 @@ class StringParenStripper(StringTransformer):
|
||||
The line contains a string which is surrounded by parentheses and:
|
||||
- The target string is NOT the only argument to a function call.
|
||||
- The target string is NOT a "pointless" string.
|
||||
- The target string is NOT a dictionary value.
|
||||
- If the target string contains a PERCENT, the brackets are not
|
||||
preceded or followed by an operator with higher precedence than
|
||||
PERCENT.
|
||||
@ -827,11 +905,14 @@ def do_match(self, line: Line) -> TMatchResult:
|
||||
):
|
||||
continue
|
||||
|
||||
# That LPAR should NOT be preceded by a function name or a closing
|
||||
# bracket (which could be a function which returns a function or a
|
||||
# list/dictionary that contains a function)...
|
||||
# That LPAR should NOT be preceded by a colon (which could be a
|
||||
# dictionary value), function name, or a closing bracket (which
|
||||
# could be a function returning a function or a list/dictionary
|
||||
# containing a function)...
|
||||
if is_valid_index(idx - 2) and (
|
||||
LL[idx - 2].type == token.NAME or LL[idx - 2].type in CLOSING_BRACKETS
|
||||
LL[idx - 2].type == token.COLON
|
||||
or LL[idx - 2].type == token.NAME
|
||||
or LL[idx - 2].type in CLOSING_BRACKETS
|
||||
):
|
||||
continue
|
||||
|
||||
@ -898,11 +979,11 @@ def do_match(self, line: Line) -> TMatchResult:
|
||||
return TErr("This line has no strings wrapped in parens.")
|
||||
|
||||
def do_transform(
|
||||
self, line: Line, string_indices: List[int]
|
||||
self, line: Line, string_indices: list[int]
|
||||
) -> Iterator[TResult[Line]]:
|
||||
LL = line.leaves
|
||||
|
||||
string_and_rpar_indices: List[int] = []
|
||||
string_and_rpar_indices: list[int] = []
|
||||
for string_idx in string_indices:
|
||||
string_parser = StringParser()
|
||||
rpar_idx = string_parser.parse(LL, string_idx)
|
||||
@ -925,7 +1006,7 @@ def do_transform(
|
||||
)
|
||||
|
||||
def _transform_to_new_line(
|
||||
self, line: Line, string_and_rpar_indices: List[int]
|
||||
self, line: Line, string_and_rpar_indices: list[int]
|
||||
) -> Line:
|
||||
LL = line.leaves
|
||||
|
||||
@ -946,6 +1027,9 @@ def _transform_to_new_line(
|
||||
LL[lpar_or_rpar_idx].remove() # Remove lpar.
|
||||
replace_child(LL[idx], string_leaf)
|
||||
new_line.append(string_leaf)
|
||||
# replace comments
|
||||
old_comments = new_line.comments.pop(id(LL[idx]), [])
|
||||
new_line.comments.setdefault(id(string_leaf), []).extend(old_comments)
|
||||
else:
|
||||
LL[lpar_or_rpar_idx].remove() # This is a rpar.
|
||||
|
||||
@ -970,12 +1054,15 @@ class BaseStringSplitter(StringTransformer):
|
||||
lines after all line splits are performed) would still be over the
|
||||
line_length limit unless we split this string.
|
||||
AND
|
||||
|
||||
* The target string is NOT a "pointless" string (i.e. a string that has
|
||||
no parent or siblings).
|
||||
AND
|
||||
|
||||
* The target string is not followed by an inline comment that appears
|
||||
to be a pragma.
|
||||
AND
|
||||
|
||||
* The target string is not a multiline (i.e. triple-quote) string.
|
||||
"""
|
||||
|
||||
@ -1172,7 +1259,7 @@ def _get_max_string_length(self, line: Line, string_idx: int) -> int:
|
||||
return max_string_length
|
||||
|
||||
@staticmethod
|
||||
def _prefer_paren_wrap_match(LL: List[Leaf]) -> Optional[int]:
|
||||
def _prefer_paren_wrap_match(LL: list[Leaf]) -> Optional[int]:
|
||||
"""
|
||||
Returns:
|
||||
string_idx such that @LL[string_idx] is equal to our target (i.e.
|
||||
@ -1186,10 +1273,24 @@ def _prefer_paren_wrap_match(LL: List[Leaf]) -> Optional[int]:
|
||||
if LL[0].type != token.STRING:
|
||||
return None
|
||||
|
||||
# If the string is surrounded by commas (or is the first/last child)...
|
||||
matching_nodes = [
|
||||
syms.listmaker,
|
||||
syms.dictsetmaker,
|
||||
syms.testlist_gexp,
|
||||
]
|
||||
# If the string is an immediate child of a list/set/tuple literal...
|
||||
if (
|
||||
parent_type(LL[0]) in matching_nodes
|
||||
or parent_type(LL[0].parent) in matching_nodes
|
||||
):
|
||||
# And the string is surrounded by commas (or is the first/last child)...
|
||||
prev_sibling = LL[0].prev_sibling
|
||||
next_sibling = LL[0].next_sibling
|
||||
if not prev_sibling and not next_sibling and parent_type(LL[0]) == syms.atom:
|
||||
if (
|
||||
not prev_sibling
|
||||
and not next_sibling
|
||||
and parent_type(LL[0]) == syms.atom
|
||||
):
|
||||
# If it's an atom string, we need to check the parent atom's siblings.
|
||||
parent = LL[0].parent
|
||||
assert parent is not None # For type checkers.
|
||||
@ -1203,14 +1304,14 @@ def _prefer_paren_wrap_match(LL: List[Leaf]) -> Optional[int]:
|
||||
return None
|
||||
|
||||
|
||||
def iter_fexpr_spans(s: str) -> Iterator[Tuple[int, int]]:
|
||||
def iter_fexpr_spans(s: str) -> Iterator[tuple[int, int]]:
|
||||
"""
|
||||
Yields spans corresponding to expressions in a given f-string.
|
||||
Spans are half-open ranges (left inclusive, right exclusive).
|
||||
Assumes the input string is a valid f-string, but will not crash if the input
|
||||
string is invalid.
|
||||
"""
|
||||
stack: List[int] = [] # our curly paren stack
|
||||
stack: list[int] = [] # our curly paren stack
|
||||
i = 0
|
||||
while i < len(s):
|
||||
if s[i] == "{":
|
||||
@ -1233,7 +1334,7 @@ def iter_fexpr_spans(s: str) -> Iterator[Tuple[int, int]]:
|
||||
i += 1
|
||||
continue
|
||||
|
||||
# if we're in an expression part of the f-string, fast forward through strings
|
||||
# if we're in an expression part of the f-string, fast-forward through strings
|
||||
# note that backslashes are not legal in the expression portion of f-strings
|
||||
if stack:
|
||||
delim = None
|
||||
@ -1373,7 +1474,7 @@ def do_splitter_match(self, line: Line) -> TMatchResult:
|
||||
return Ok([string_idx])
|
||||
|
||||
def do_transform(
|
||||
self, line: Line, string_indices: List[int]
|
||||
self, line: Line, string_indices: list[int]
|
||||
) -> Iterator[TResult[Line]]:
|
||||
LL = line.leaves
|
||||
assert len(string_indices) == 1, (
|
||||
@ -1475,7 +1576,7 @@ def more_splits_should_be_made() -> bool:
|
||||
else:
|
||||
return str_width(rest_value) > max_last_string_column()
|
||||
|
||||
string_line_results: List[Ok[Line]] = []
|
||||
string_line_results: list[Ok[Line]] = []
|
||||
while more_splits_should_be_made():
|
||||
if use_custom_breakpoints:
|
||||
# Custom User Split (manual)
|
||||
@ -1604,7 +1705,7 @@ def more_splits_should_be_made() -> bool:
|
||||
last_line.comments = line.comments.copy()
|
||||
yield Ok(last_line)
|
||||
|
||||
def _iter_nameescape_slices(self, string: str) -> Iterator[Tuple[Index, Index]]:
|
||||
def _iter_nameescape_slices(self, string: str) -> Iterator[tuple[Index, Index]]:
|
||||
"""
|
||||
Yields:
|
||||
All ranges of @string which, if @string were to be split there,
|
||||
@ -1635,7 +1736,7 @@ def _iter_nameescape_slices(self, string: str) -> Iterator[Tuple[Index, Index]]:
|
||||
raise RuntimeError(f"{self.__class__.__name__} LOGIC ERROR!")
|
||||
yield begin, end
|
||||
|
||||
def _iter_fexpr_slices(self, string: str) -> Iterator[Tuple[Index, Index]]:
|
||||
def _iter_fexpr_slices(self, string: str) -> Iterator[tuple[Index, Index]]:
|
||||
"""
|
||||
Yields:
|
||||
All ranges of @string which, if @string were to be split there,
|
||||
@ -1646,8 +1747,8 @@ def _iter_fexpr_slices(self, string: str) -> Iterator[Tuple[Index, Index]]:
|
||||
return
|
||||
yield from iter_fexpr_spans(string)
|
||||
|
||||
def _get_illegal_split_indices(self, string: str) -> Set[Index]:
|
||||
illegal_indices: Set[Index] = set()
|
||||
def _get_illegal_split_indices(self, string: str) -> set[Index]:
|
||||
illegal_indices: set[Index] = set()
|
||||
iterators = [
|
||||
self._iter_fexpr_slices(string),
|
||||
self._iter_nameescape_slices(string),
|
||||
@ -1700,7 +1801,7 @@ def passes_all_checks(i: Index) -> bool:
|
||||
"""
|
||||
Returns:
|
||||
True iff ALL of the conditions listed in the 'Transformations'
|
||||
section of this classes' docstring would be be met by returning @i.
|
||||
section of this classes' docstring would be met by returning @i.
|
||||
"""
|
||||
is_space = string[i] == " "
|
||||
is_split_safe = is_valid_index(i - 1) and string[i - 1] in SPLIT_SAFE_CHARS
|
||||
@ -1773,7 +1874,7 @@ def _normalize_f_string(self, string: str, prefix: str) -> str:
|
||||
else:
|
||||
return string
|
||||
|
||||
def _get_string_operator_leaves(self, leaves: Iterable[Leaf]) -> List[Leaf]:
|
||||
def _get_string_operator_leaves(self, leaves: Iterable[Leaf]) -> list[Leaf]:
|
||||
LL = list(leaves)
|
||||
|
||||
string_op_leaves = []
|
||||
@ -1811,8 +1912,9 @@ class StringParenWrapper(BaseStringSplitter, CustomSplitMapMixin):
|
||||
* The line is an lambda expression and the value is a string.
|
||||
OR
|
||||
* The line starts with an "atom" string that prefers to be wrapped in
|
||||
parens. It's preferred to be wrapped when the string is surrounded by
|
||||
commas (or is the first/last child).
|
||||
parens. It's preferred to be wrapped when it's is an immediate child of
|
||||
a list/set/tuple literal, AND the string is surrounded by commas (or is
|
||||
the first/last child).
|
||||
|
||||
Transformations:
|
||||
The chosen string is wrapped in parentheses and then split at the LPAR.
|
||||
@ -1881,7 +1983,7 @@ def do_splitter_match(self, line: Line) -> TMatchResult:
|
||||
return TErr("This line does not contain any non-atomic strings.")
|
||||
|
||||
@staticmethod
|
||||
def _return_match(LL: List[Leaf]) -> Optional[int]:
|
||||
def _return_match(LL: list[Leaf]) -> Optional[int]:
|
||||
"""
|
||||
Returns:
|
||||
string_idx such that @LL[string_idx] is equal to our target (i.e.
|
||||
@ -1891,7 +1993,7 @@ def _return_match(LL: List[Leaf]) -> Optional[int]:
|
||||
OR
|
||||
None, otherwise.
|
||||
"""
|
||||
# If this line is apart of a return/yield statement and the first leaf
|
||||
# If this line is a part of a return/yield statement and the first leaf
|
||||
# contains either the "return" or "yield" keywords...
|
||||
if parent_type(LL[0]) in [syms.return_stmt, syms.yield_expr] and LL[
|
||||
0
|
||||
@ -1906,7 +2008,7 @@ def _return_match(LL: List[Leaf]) -> Optional[int]:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _else_match(LL: List[Leaf]) -> Optional[int]:
|
||||
def _else_match(LL: list[Leaf]) -> Optional[int]:
|
||||
"""
|
||||
Returns:
|
||||
string_idx such that @LL[string_idx] is equal to our target (i.e.
|
||||
@ -1916,7 +2018,7 @@ def _else_match(LL: List[Leaf]) -> Optional[int]:
|
||||
OR
|
||||
None, otherwise.
|
||||
"""
|
||||
# If this line is apart of a ternary expression and the first leaf
|
||||
# If this line is a part of a ternary expression and the first leaf
|
||||
# contains the "else" keyword...
|
||||
if (
|
||||
parent_type(LL[0]) == syms.test
|
||||
@ -1933,7 +2035,7 @@ def _else_match(LL: List[Leaf]) -> Optional[int]:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _assert_match(LL: List[Leaf]) -> Optional[int]:
|
||||
def _assert_match(LL: list[Leaf]) -> Optional[int]:
|
||||
"""
|
||||
Returns:
|
||||
string_idx such that @LL[string_idx] is equal to our target (i.e.
|
||||
@ -1943,7 +2045,7 @@ def _assert_match(LL: List[Leaf]) -> Optional[int]:
|
||||
OR
|
||||
None, otherwise.
|
||||
"""
|
||||
# If this line is apart of an assert statement and the first leaf
|
||||
# If this line is a part of an assert statement and the first leaf
|
||||
# contains the "assert" keyword...
|
||||
if parent_type(LL[0]) == syms.assert_stmt and LL[0].value == "assert":
|
||||
is_valid_index = is_valid_index_factory(LL)
|
||||
@ -1968,7 +2070,7 @@ def _assert_match(LL: List[Leaf]) -> Optional[int]:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _assign_match(LL: List[Leaf]) -> Optional[int]:
|
||||
def _assign_match(LL: list[Leaf]) -> Optional[int]:
|
||||
"""
|
||||
Returns:
|
||||
string_idx such that @LL[string_idx] is equal to our target (i.e.
|
||||
@ -1978,7 +2080,7 @@ def _assign_match(LL: List[Leaf]) -> Optional[int]:
|
||||
OR
|
||||
None, otherwise.
|
||||
"""
|
||||
# If this line is apart of an expression statement or is a function
|
||||
# If this line is a part of an expression statement or is a function
|
||||
# argument AND the first leaf contains a variable name...
|
||||
if (
|
||||
parent_type(LL[0]) in [syms.expr_stmt, syms.argument, syms.power]
|
||||
@ -1999,7 +2101,7 @@ def _assign_match(LL: List[Leaf]) -> Optional[int]:
|
||||
string_parser = StringParser()
|
||||
idx = string_parser.parse(LL, string_idx)
|
||||
|
||||
# The next leaf MAY be a comma iff this line is apart
|
||||
# The next leaf MAY be a comma iff this line is a part
|
||||
# of a function argument...
|
||||
if (
|
||||
parent_type(LL[0]) == syms.argument
|
||||
@ -2015,7 +2117,7 @@ def _assign_match(LL: List[Leaf]) -> Optional[int]:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def _dict_or_lambda_match(LL: List[Leaf]) -> Optional[int]:
|
||||
def _dict_or_lambda_match(LL: list[Leaf]) -> Optional[int]:
|
||||
"""
|
||||
Returns:
|
||||
string_idx such that @LL[string_idx] is equal to our target (i.e.
|
||||
@ -2054,7 +2156,7 @@ def _dict_or_lambda_match(LL: List[Leaf]) -> Optional[int]:
|
||||
return None
|
||||
|
||||
def do_transform(
|
||||
self, line: Line, string_indices: List[int]
|
||||
self, line: Line, string_indices: list[int]
|
||||
) -> Iterator[TResult[Line]]:
|
||||
LL = line.leaves
|
||||
assert len(string_indices) == 1, (
|
||||
@ -2136,18 +2238,17 @@ def do_transform(
|
||||
elif right_leaves and right_leaves[-1].type == token.RPAR:
|
||||
# Special case for lambda expressions as dict's value, e.g.:
|
||||
# my_dict = {
|
||||
# "key": lambda x: f"formatted: {x},
|
||||
# "key": lambda x: f"formatted: {x}",
|
||||
# }
|
||||
# After wrapping the dict's value with parentheses, the string is
|
||||
# followed by a RPAR but its opening bracket is lambda's, not
|
||||
# the string's:
|
||||
# "key": (lambda x: f"formatted: {x}),
|
||||
# "key": (lambda x: f"formatted: {x}"),
|
||||
opening_bracket = right_leaves[-1].opening_bracket
|
||||
if opening_bracket is not None and opening_bracket in left_leaves:
|
||||
index = left_leaves.index(opening_bracket)
|
||||
if (
|
||||
index > 0
|
||||
and index < len(left_leaves) - 1
|
||||
0 < index < len(left_leaves) - 1
|
||||
and left_leaves[index - 1].type == token.COLON
|
||||
and left_leaves[index + 1].value == "lambda"
|
||||
):
|
||||
@ -2221,7 +2322,7 @@ class StringParser:
|
||||
DONE: Final = 8
|
||||
|
||||
# Lookup Table for Next State
|
||||
_goto: Final[Dict[Tuple[ParserState, NodeType], ParserState]] = {
|
||||
_goto: Final[dict[tuple[ParserState, NodeType], ParserState]] = {
|
||||
# A string trailer may start with '.' OR '%'.
|
||||
(START, token.DOT): DOT,
|
||||
(START, token.PERCENT): PERCENT,
|
||||
@ -2250,13 +2351,13 @@ def __init__(self) -> None:
|
||||
self._state = self.START
|
||||
self._unmatched_lpars = 0
|
||||
|
||||
def parse(self, leaves: List[Leaf], string_idx: int) -> int:
|
||||
def parse(self, leaves: list[Leaf], string_idx: int) -> int:
|
||||
"""
|
||||
Pre-conditions:
|
||||
* @leaves[@string_idx].type == token.STRING
|
||||
|
||||
Returns:
|
||||
The index directly after the last leaf which is apart of the string
|
||||
The index directly after the last leaf which is a part of the string
|
||||
trailer, if a "trailer" exists.
|
||||
OR
|
||||
@string_idx + 1, if no string "trailer" exists.
|
||||
@ -2279,7 +2380,7 @@ def _next_state(self, leaf: Leaf) -> bool:
|
||||
MUST be the leaf directly following @leaf.
|
||||
|
||||
Returns:
|
||||
True iff @leaf is apart of the string's trailer.
|
||||
True iff @leaf is a part of the string's trailer.
|
||||
"""
|
||||
# We ignore empty LPAR or RPAR leaves.
|
||||
if is_empty_par(leaf):
|
||||
|
@ -1,13 +1,13 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from concurrent.futures import Executor, ProcessPoolExecutor
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from datetime import datetime, timezone
|
||||
from functools import cache, partial
|
||||
from multiprocessing import freeze_support
|
||||
from typing import Set, Tuple
|
||||
|
||||
try:
|
||||
from aiohttp import web
|
||||
from multidict import MultiMapping
|
||||
|
||||
from .middlewares import cors
|
||||
except ImportError as ie:
|
||||
@ -34,6 +34,8 @@
|
||||
SKIP_STRING_NORMALIZATION_HEADER = "X-Skip-String-Normalization"
|
||||
SKIP_MAGIC_TRAILING_COMMA = "X-Skip-Magic-Trailing-Comma"
|
||||
PREVIEW = "X-Preview"
|
||||
UNSTABLE = "X-Unstable"
|
||||
ENABLE_UNSTABLE_FEATURE = "X-Enable-Unstable-Feature"
|
||||
FAST_OR_SAFE_HEADER = "X-Fast-Or-Safe"
|
||||
DIFF_HEADER = "X-Diff"
|
||||
|
||||
@ -45,6 +47,8 @@
|
||||
SKIP_STRING_NORMALIZATION_HEADER,
|
||||
SKIP_MAGIC_TRAILING_COMMA,
|
||||
PREVIEW,
|
||||
UNSTABLE,
|
||||
ENABLE_UNSTABLE_FEATURE,
|
||||
FAST_OR_SAFE_HEADER,
|
||||
DIFF_HEADER,
|
||||
]
|
||||
@ -53,15 +57,25 @@
|
||||
BLACK_VERSION_HEADER = "X-Black-Version"
|
||||
|
||||
|
||||
class HeaderError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidVariantHeader(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@click.command(context_settings={"help_option_names": ["-h", "--help"]})
|
||||
@click.option(
|
||||
"--bind-host", type=str, help="Address to bind the server to.", default="localhost"
|
||||
"--bind-host",
|
||||
type=str,
|
||||
help="Address to bind the server to.",
|
||||
default="localhost",
|
||||
show_default=True,
|
||||
)
|
||||
@click.option(
|
||||
"--bind-port", type=int, help="Port to listen on", default=45484, show_default=True
|
||||
)
|
||||
@click.option("--bind-port", type=int, help="Port to listen on", default=45484)
|
||||
@click.version_option(version=black.__version__)
|
||||
def main(bind_host: str, bind_port: int) -> None:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
@ -71,12 +85,16 @@ def main(bind_host: str, bind_port: int) -> None:
|
||||
web.run_app(app, host=bind_host, port=bind_port, handle_signals=True, print=None)
|
||||
|
||||
|
||||
@cache
|
||||
def executor() -> Executor:
|
||||
return ProcessPoolExecutor()
|
||||
|
||||
|
||||
def make_app() -> web.Application:
|
||||
app = web.Application(
|
||||
middlewares=[cors(allow_headers=(*BLACK_HEADERS, "Content-Type"))]
|
||||
)
|
||||
executor = ProcessPoolExecutor()
|
||||
app.add_routes([web.post("/", partial(handle, executor=executor))])
|
||||
app.add_routes([web.post("/", partial(handle, executor=executor()))])
|
||||
return app
|
||||
|
||||
|
||||
@ -87,55 +105,21 @@ async def handle(request: web.Request, executor: Executor) -> web.Response:
|
||||
return web.Response(
|
||||
status=501, text="This server only supports protocol version 1"
|
||||
)
|
||||
try:
|
||||
line_length = int(
|
||||
request.headers.get(LINE_LENGTH_HEADER, black.DEFAULT_LINE_LENGTH)
|
||||
)
|
||||
except ValueError:
|
||||
return web.Response(status=400, text="Invalid line length header value")
|
||||
|
||||
if PYTHON_VARIANT_HEADER in request.headers:
|
||||
value = request.headers[PYTHON_VARIANT_HEADER]
|
||||
try:
|
||||
pyi, versions = parse_python_variant_header(value)
|
||||
except InvalidVariantHeader as e:
|
||||
return web.Response(
|
||||
status=400,
|
||||
text=f"Invalid value for {PYTHON_VARIANT_HEADER}: {e.args[0]}",
|
||||
)
|
||||
else:
|
||||
pyi = False
|
||||
versions = set()
|
||||
|
||||
skip_string_normalization = bool(
|
||||
request.headers.get(SKIP_STRING_NORMALIZATION_HEADER, False)
|
||||
)
|
||||
skip_magic_trailing_comma = bool(
|
||||
request.headers.get(SKIP_MAGIC_TRAILING_COMMA, False)
|
||||
)
|
||||
skip_source_first_line = bool(
|
||||
request.headers.get(SKIP_SOURCE_FIRST_LINE, False)
|
||||
)
|
||||
preview = bool(request.headers.get(PREVIEW, False))
|
||||
fast = False
|
||||
if request.headers.get(FAST_OR_SAFE_HEADER, "safe") == "fast":
|
||||
fast = True
|
||||
mode = black.FileMode(
|
||||
target_versions=versions,
|
||||
is_pyi=pyi,
|
||||
line_length=line_length,
|
||||
skip_source_first_line=skip_source_first_line,
|
||||
string_normalization=not skip_string_normalization,
|
||||
magic_trailing_comma=not skip_magic_trailing_comma,
|
||||
preview=preview,
|
||||
)
|
||||
try:
|
||||
mode = parse_mode(request.headers)
|
||||
except HeaderError as e:
|
||||
return web.Response(status=400, text=e.args[0])
|
||||
req_bytes = await request.content.read()
|
||||
charset = request.charset if request.charset is not None else "utf8"
|
||||
req_str = req_bytes.decode(charset)
|
||||
then = datetime.utcnow()
|
||||
then = datetime.now(timezone.utc)
|
||||
|
||||
header = ""
|
||||
if skip_source_first_line:
|
||||
if mode.skip_source_first_line:
|
||||
first_newline_position: int = req_str.find("\n") + 1
|
||||
header = req_str[:first_newline_position]
|
||||
req_str = req_str[first_newline_position:]
|
||||
@ -146,7 +130,8 @@ async def handle(request: web.Request, executor: Executor) -> web.Response:
|
||||
)
|
||||
|
||||
# Preserve CRLF line endings
|
||||
if req_str[req_str.find("\n") - 1] == "\r":
|
||||
nl = req_str.find("\n")
|
||||
if nl > 0 and req_str[nl - 1] == "\r":
|
||||
formatted_str = formatted_str.replace("\n", "\r\n")
|
||||
# If, after swapping line endings, nothing changed, then say so
|
||||
if formatted_str == req_str:
|
||||
@ -159,9 +144,9 @@ async def handle(request: web.Request, executor: Executor) -> web.Response:
|
||||
# Only output the diff in the HTTP response
|
||||
only_diff = bool(request.headers.get(DIFF_HEADER, False))
|
||||
if only_diff:
|
||||
now = datetime.utcnow()
|
||||
src_name = f"In\t{then} +0000"
|
||||
dst_name = f"Out\t{now} +0000"
|
||||
now = datetime.now(timezone.utc)
|
||||
src_name = f"In\t{then}"
|
||||
dst_name = f"Out\t{now}"
|
||||
loop = asyncio.get_event_loop()
|
||||
formatted_str = await loop.run_in_executor(
|
||||
executor,
|
||||
@ -183,7 +168,58 @@ async def handle(request: web.Request, executor: Executor) -> web.Response:
|
||||
return web.Response(status=500, headers=headers, text=str(e))
|
||||
|
||||
|
||||
def parse_python_variant_header(value: str) -> Tuple[bool, Set[black.TargetVersion]]:
|
||||
def parse_mode(headers: MultiMapping[str]) -> black.Mode:
|
||||
try:
|
||||
line_length = int(headers.get(LINE_LENGTH_HEADER, black.DEFAULT_LINE_LENGTH))
|
||||
except ValueError:
|
||||
raise HeaderError("Invalid line length header value") from None
|
||||
|
||||
if PYTHON_VARIANT_HEADER in headers:
|
||||
value = headers[PYTHON_VARIANT_HEADER]
|
||||
try:
|
||||
pyi, versions = parse_python_variant_header(value)
|
||||
except InvalidVariantHeader as e:
|
||||
raise HeaderError(
|
||||
f"Invalid value for {PYTHON_VARIANT_HEADER}: {e.args[0]}",
|
||||
) from None
|
||||
else:
|
||||
pyi = False
|
||||
versions = set()
|
||||
|
||||
skip_string_normalization = bool(
|
||||
headers.get(SKIP_STRING_NORMALIZATION_HEADER, False)
|
||||
)
|
||||
skip_magic_trailing_comma = bool(headers.get(SKIP_MAGIC_TRAILING_COMMA, False))
|
||||
skip_source_first_line = bool(headers.get(SKIP_SOURCE_FIRST_LINE, False))
|
||||
|
||||
preview = bool(headers.get(PREVIEW, False))
|
||||
unstable = bool(headers.get(UNSTABLE, False))
|
||||
enable_features: set[black.Preview] = set()
|
||||
enable_unstable_features = headers.get(ENABLE_UNSTABLE_FEATURE, "").split(",")
|
||||
for piece in enable_unstable_features:
|
||||
piece = piece.strip()
|
||||
if piece:
|
||||
try:
|
||||
enable_features.add(black.Preview[piece])
|
||||
except KeyError:
|
||||
raise HeaderError(
|
||||
f"Invalid value for {ENABLE_UNSTABLE_FEATURE}: {piece}",
|
||||
) from None
|
||||
|
||||
return black.FileMode(
|
||||
target_versions=versions,
|
||||
is_pyi=pyi,
|
||||
line_length=line_length,
|
||||
skip_source_first_line=skip_source_first_line,
|
||||
string_normalization=not skip_string_normalization,
|
||||
magic_trailing_comma=not skip_magic_trailing_comma,
|
||||
preview=preview,
|
||||
unstable=unstable,
|
||||
enabled_features=enable_features,
|
||||
)
|
||||
|
||||
|
||||
def parse_python_variant_header(value: str) -> tuple[bool, set[black.TargetVersion]]:
|
||||
if value == "pyi":
|
||||
return True, set()
|
||||
else:
|
||||
@ -219,7 +255,6 @@ def parse_python_variant_header(value: str) -> Tuple[bool, Set[black.TargetVersi
|
||||
def patched_main() -> None:
|
||||
maybe_install_uvloop()
|
||||
freeze_support()
|
||||
black.patch_click()
|
||||
main()
|
||||
|
||||
|
||||
|
@ -1,21 +1,11 @@
|
||||
from typing import TYPE_CHECKING, Any, Awaitable, Callable, Iterable, TypeVar
|
||||
from collections.abc import Awaitable, Callable, Iterable
|
||||
|
||||
from aiohttp.typedefs import Middleware
|
||||
from aiohttp.web_middlewares import middleware
|
||||
from aiohttp.web_request import Request
|
||||
from aiohttp.web_response import StreamResponse
|
||||
|
||||
if TYPE_CHECKING:
|
||||
F = TypeVar("F", bound=Callable[..., Any])
|
||||
middleware: Callable[[F], F]
|
||||
else:
|
||||
try:
|
||||
from aiohttp.web_middlewares import middleware
|
||||
except ImportError:
|
||||
# @middleware is deprecated and its behaviour is the default since aiohttp 4.0
|
||||
# so if it doesn't exist anymore, define a no-op for forward compatibility.
|
||||
middleware = lambda x: x # noqa: E731
|
||||
|
||||
Handler = Callable[[Request], Awaitable[StreamResponse]]
|
||||
Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
|
||||
|
||||
|
||||
def cors(allow_headers: Iterable[str]) -> Middleware:
|
||||
|
@ -12,11 +12,17 @@ file_input: (NEWLINE | stmt)* ENDMARKER
|
||||
single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE
|
||||
eval_input: testlist NEWLINE* ENDMARKER
|
||||
|
||||
typevar: NAME [':' test] ['=' test]
|
||||
paramspec: '**' NAME ['=' test]
|
||||
typevartuple: '*' NAME ['=' (test|star_expr)]
|
||||
typeparam: typevar | paramspec | typevartuple
|
||||
typeparams: '[' typeparam (',' typeparam)* [','] ']'
|
||||
|
||||
decorator: '@' namedexpr_test NEWLINE
|
||||
decorators: decorator+
|
||||
decorated: decorators (classdef | funcdef | async_funcdef)
|
||||
async_funcdef: ASYNC funcdef
|
||||
funcdef: 'def' NAME parameters ['->' test] ':' suite
|
||||
funcdef: 'def' NAME [typeparams] parameters ['->' test] ':' suite
|
||||
parameters: '(' [typedargslist] ')'
|
||||
|
||||
# The following definition for typedarglist is equivalent to this set of rules:
|
||||
@ -74,8 +80,8 @@ vfplist: vfpdef (',' vfpdef)* [',']
|
||||
|
||||
stmt: simple_stmt | compound_stmt
|
||||
simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
|
||||
small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
|
||||
import_stmt | global_stmt | exec_stmt | assert_stmt)
|
||||
small_stmt: (type_stmt | expr_stmt | del_stmt | pass_stmt | flow_stmt |
|
||||
import_stmt | global_stmt | assert_stmt)
|
||||
expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) |
|
||||
('=' (yield_expr|testlist_star_expr))*)
|
||||
annassign: ':' test ['=' (yield_expr|testlist_star_expr)]
|
||||
@ -83,8 +89,6 @@ testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [',']
|
||||
augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' |
|
||||
'<<=' | '>>=' | '**=' | '//=')
|
||||
# For normal and annotated assignments, additional restrictions enforced by the interpreter
|
||||
print_stmt: 'print' ( [ test (',' test)* [','] ] |
|
||||
'>>' test [ (',' test)+ [','] ] )
|
||||
del_stmt: 'del' exprlist
|
||||
pass_stmt: 'pass'
|
||||
flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt
|
||||
@ -103,8 +107,8 @@ import_as_names: import_as_name (',' import_as_name)* [',']
|
||||
dotted_as_names: dotted_as_name (',' dotted_as_name)*
|
||||
dotted_name: NAME ('.' NAME)*
|
||||
global_stmt: ('global' | 'nonlocal') NAME (',' NAME)*
|
||||
exec_stmt: 'exec' expr ['in' test [',' test]]
|
||||
assert_stmt: 'assert' test [',' test]
|
||||
type_stmt: "type" NAME [typeparams] '=' test
|
||||
|
||||
compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt | match_stmt
|
||||
async_stmt: ASYNC (funcdef | with_stmt | for_stmt)
|
||||
@ -159,7 +163,7 @@ atom: ('(' [yield_expr|testlist_gexp] ')' |
|
||||
'[' [listmaker] ']' |
|
||||
'{' [dictsetmaker] '}' |
|
||||
'`' testlist1 '`' |
|
||||
NAME | NUMBER | STRING+ | '.' '.' '.')
|
||||
NAME | NUMBER | (STRING | fstring)+ | '.' '.' '.')
|
||||
listmaker: (namedexpr_test|star_expr) ( old_comp_for | (',' (namedexpr_test|star_expr))* [','] )
|
||||
testlist_gexp: (namedexpr_test|star_expr) ( old_comp_for | (',' (namedexpr_test|star_expr))* [','] )
|
||||
lambdef: 'lambda' [varargslist] ':' test
|
||||
@ -174,7 +178,7 @@ dictsetmaker: ( ((test ':' asexpr_test | '**' expr)
|
||||
((test [':=' test] | star_expr)
|
||||
(comp_for | (',' (test [':=' test] | star_expr))* [','])) )
|
||||
|
||||
classdef: 'class' NAME ['(' [arglist] ')'] ':' suite
|
||||
classdef: 'class' NAME [typeparams] ['(' [arglist] ')'] ':' suite
|
||||
|
||||
arglist: argument (',' argument)* [',']
|
||||
|
||||
@ -250,3 +254,8 @@ case_block: "case" patterns [guard] ':' suite
|
||||
guard: 'if' namedexpr_test
|
||||
patterns: pattern (',' pattern)* [',']
|
||||
pattern: (expr|star_expr) ['as' expr]
|
||||
|
||||
fstring: FSTRING_START fstring_middle* FSTRING_END
|
||||
fstring_middle: fstring_replacement_field | FSTRING_MIDDLE
|
||||
fstring_replacement_field: '{' (yield_expr | testlist_star_expr) ['='] [ "!" NAME ] [ ':' fstring_format_spec* ] '}'
|
||||
fstring_format_spec: FSTRING_MIDDLE | fstring_replacement_field
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user