Add primer CI tool 🏴 (#1402)
* Add primer CI tool 💩 - Run in PATH `black` binary on configured projects - Can set wether we expect changes or not per project - Can set what python versions are supported for a project - if `long_checkout` True project will not be ran on CI Will add to CI after I finish unit tests to avoid silly bugs I'm sure I have 🤪 Tests: - Manual Run - Will add unit tests if people think it will be useful - Output: ```shell (b) cooper-mbp1:black cooper$ time /tmp/b/bin/black-primer -k -w /tmp/cooper_primer_1 [2020-05-10 08:48:25,696] INFO: 4 projects to run black over (lib.py:212) [2020-05-10 08:48:25,697] INFO: Skipping aioexabgp as it's disabled via config (lib.py:166) [2020-05-10 08:48:25,699] INFO: Skipping bandersnatch as it's disabled via config (lib.py:166) [2020-05-10 08:48:28,676] INFO: Analyzing results (lib.py:225) -- primer results 📊 -- 2 / 4 succeeded (50.0%) ✅ 0 / 4 FAILED (0.0%) 💩 - 2 projects Disabled by config - 0 projects skipped due to Python Version - 0 skipped due to long checkout real 0m3.304s user 0m9.529s sys 0m1.019s ``` - ls of /tmp/cooper_primer_1 ``` (b) cooper-mbp1:black cooper$ ls -lh /tmp/cooper_primer_1 total 0 drwxr-xr-x 21 cooper wheel 672B May 10 08:48 attrs drwxr-xr-x 14 cooper wheel 448B May 10 08:48 flake8-bugbear ``` * Address mypy 3.6 type errors - Don't use asyncio.run() ... go back to the past :P - Refactor results into a named tuple of two dicts to avoid typing nightmare - Fix some variable names - Fix bug with rebase logic in git_checkout_or_rebase * Prettier the JSON config file for primer * Delete projects when finished, move dir to be timestamped + shallow copy * Re-enable disabled projects post @JelleZijlstra's docstring fix * Workaround for future annotations until someone tells me the correct fix
This commit is contained in:
parent
12e857f24c
commit
b50a52708c
3
setup.py
3
setup.py
@ -62,7 +62,7 @@ def get_long_description() -> str:
|
||||
license="MIT",
|
||||
py_modules=["_black_version"],
|
||||
ext_modules=ext_modules,
|
||||
packages=["blackd", "black", "blib2to3", "blib2to3.pgen2"],
|
||||
packages=["blackd", "black", "blib2to3", "blib2to3.pgen2", "black_primer"],
|
||||
package_dir={"": "src"},
|
||||
package_data={"blib2to3": ["*.txt"], "black": ["py.typed"]},
|
||||
python_requires=">=3.6",
|
||||
@ -102,6 +102,7 @@ def get_long_description() -> str:
|
||||
"console_scripts": [
|
||||
"black=black:patched_main",
|
||||
"blackd=blackd:patched_main [d]",
|
||||
"black-primer=black_primer.cli:main",
|
||||
]
|
||||
},
|
||||
)
|
||||
|
135
src/black_primer/cli.py
Normal file
135
src/black_primer/cli.py
Normal file
@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from os import cpu_count
|
||||
from pathlib import Path
|
||||
from shutil import rmtree, which
|
||||
from tempfile import gettempdir
|
||||
from typing import Any, Union
|
||||
|
||||
import click
|
||||
|
||||
from black_primer import lib
|
||||
|
||||
|
||||
DEFAULT_CONFIG = Path(__file__).parent / "primer.json"
|
||||
_timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
DEFAULT_WORKDIR = Path(gettempdir()) / f"primer.{_timestamp}"
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _handle_debug(
|
||||
ctx: click.core.Context,
|
||||
param: Union[click.core.Option, click.core.Parameter],
|
||||
debug: Union[bool, int, str],
|
||||
) -> Union[bool, int, str]:
|
||||
"""Turn on debugging if asked otherwise INFO default"""
|
||||
log_level = logging.DEBUG if debug else logging.INFO
|
||||
logging.basicConfig(
|
||||
format="[%(asctime)s] %(levelname)s: %(message)s (%(filename)s:%(lineno)d)",
|
||||
level=log_level,
|
||||
)
|
||||
return debug
|
||||
|
||||
|
||||
async def async_main(
|
||||
config: str,
|
||||
debug: bool,
|
||||
keep: bool,
|
||||
long_checkouts: bool,
|
||||
rebase: bool,
|
||||
workdir: str,
|
||||
workers: int,
|
||||
) -> int:
|
||||
work_path = Path(workdir)
|
||||
if not work_path.exists():
|
||||
LOG.debug(f"Creating {work_path}")
|
||||
work_path.mkdir()
|
||||
|
||||
if not which("black"):
|
||||
LOG.error(f"Can not find 'black' executable in PATH. No point in running")
|
||||
return -1
|
||||
|
||||
try:
|
||||
ret_val = await lib.process_queue(
|
||||
config, work_path, workers, keep, long_checkouts, rebase
|
||||
)
|
||||
return int(ret_val)
|
||||
finally:
|
||||
if not keep and work_path.exists():
|
||||
LOG.debug(f"Removing {work_path}")
|
||||
rmtree(work_path)
|
||||
|
||||
return -1
|
||||
|
||||
|
||||
@click.command(context_settings={"help_option_names": ["-h", "--help"]})
|
||||
@click.option(
|
||||
"-c",
|
||||
"--config",
|
||||
default=str(DEFAULT_CONFIG),
|
||||
type=click.Path(exists=True),
|
||||
show_default=True,
|
||||
help="JSON config file path",
|
||||
)
|
||||
@click.option(
|
||||
"--debug",
|
||||
is_flag=True,
|
||||
callback=_handle_debug,
|
||||
show_default=True,
|
||||
help="Turn on debug logging",
|
||||
)
|
||||
@click.option(
|
||||
"-k",
|
||||
"--keep",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="Keep workdir + repos post run",
|
||||
)
|
||||
@click.option(
|
||||
"-L",
|
||||
"--long-checkouts",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="Pull big projects to test",
|
||||
)
|
||||
@click.option(
|
||||
"-R",
|
||||
"--rebase",
|
||||
is_flag=True,
|
||||
show_default=True,
|
||||
help="Rebase project if already checked out",
|
||||
)
|
||||
@click.option(
|
||||
"-w",
|
||||
"--workdir",
|
||||
default=str(DEFAULT_WORKDIR),
|
||||
type=click.Path(exists=False),
|
||||
show_default=True,
|
||||
help="Directory Path for repo checkouts",
|
||||
)
|
||||
@click.option(
|
||||
"-W",
|
||||
"--workers",
|
||||
default=int((cpu_count() or 4) / 2) or 1,
|
||||
type=int,
|
||||
show_default=True,
|
||||
help="Number of parallel worker coroutines",
|
||||
)
|
||||
@click.pass_context
|
||||
def main(ctx: click.core.Context, **kwargs: Any) -> None:
|
||||
"""primer - prime projects for blackening ... 🏴"""
|
||||
LOG.debug(f"Starting {sys.argv[0]}")
|
||||
# TODO: Change to asyncio.run when black >= 3.7 only
|
||||
loop = asyncio.get_event_loop()
|
||||
try:
|
||||
ctx.exit(loop.run_until_complete(async_main(**kwargs)))
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
261
src/black_primer/lib.py
Normal file
261
src/black_primer/lib.py
Normal file
@ -0,0 +1,261 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Module '__future__' has no attribute 'annotations'
|
||||
from __future__ import annotations # type: ignore
|
||||
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from shutil import rmtree, which
|
||||
from subprocess import CalledProcessError
|
||||
from sys import version_info
|
||||
from typing import Any, Dict, NamedTuple, Optional, Sequence, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import click
|
||||
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Results(NamedTuple):
|
||||
stats: Dict[str, int] = {}
|
||||
failed_projects: Dict[str, CalledProcessError] = {}
|
||||
|
||||
|
||||
async def _gen_check_output(
|
||||
cmd: Sequence[str],
|
||||
timeout: float = 30,
|
||||
env: Optional[Dict[str, str]] = None,
|
||||
cwd: Optional[Path] = None,
|
||||
) -> Tuple[bytes, bytes]:
|
||||
process = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.STDOUT,
|
||||
env=env,
|
||||
cwd=cwd,
|
||||
)
|
||||
try:
|
||||
(stdout, stderr) = await asyncio.wait_for(process.communicate(), timeout)
|
||||
except asyncio.TimeoutError:
|
||||
process.kill()
|
||||
await process.wait()
|
||||
raise
|
||||
|
||||
if process.returncode != 0:
|
||||
cmd_str = " ".join(cmd)
|
||||
raise CalledProcessError(
|
||||
process.returncode, cmd_str, output=stdout, stderr=stderr
|
||||
)
|
||||
|
||||
return (stdout, stderr)
|
||||
|
||||
|
||||
async def analyze_results(project_count: int, results: Results) -> int:
|
||||
failed_pct = round(((results.stats["failed"] / project_count) * 100), 2)
|
||||
success_pct = round(((results.stats["success"] / project_count) * 100), 2)
|
||||
|
||||
click.secho(f"-- primer results 📊 --\n", bold=True)
|
||||
click.secho(
|
||||
f"{results.stats['success']} / {project_count} succeeded ({success_pct}%) ✅",
|
||||
bold=True,
|
||||
fg="green",
|
||||
)
|
||||
click.secho(
|
||||
f"{results.stats['failed']} / {project_count} FAILED ({failed_pct}%) 💩",
|
||||
bold=bool(results.stats["failed"]),
|
||||
fg="red",
|
||||
)
|
||||
click.echo(f" - {results.stats['disabled']} projects Disabled by config")
|
||||
click.echo(
|
||||
f" - {results.stats['wrong_py_ver']} projects skipped due to Python Version"
|
||||
)
|
||||
click.echo(
|
||||
f" - {results.stats['skipped_long_checkout']} skipped due to long checkout"
|
||||
)
|
||||
|
||||
if results.failed_projects:
|
||||
click.secho(f"\nFailed Projects:\n", bold=True)
|
||||
|
||||
for project_name, project_cpe in results.failed_projects.items():
|
||||
print(f"## {project_name}:")
|
||||
print(f" - Returned {project_cpe.returncode}")
|
||||
if project_cpe.stderr:
|
||||
print(f" - stderr:\n{project_cpe.stderr.decode('utf8')}")
|
||||
if project_cpe.stdout:
|
||||
print(f" - stdout:\n{project_cpe.stdout.decode('utf8')}")
|
||||
print("")
|
||||
|
||||
return results.stats["failed"]
|
||||
|
||||
|
||||
async def black_run(
|
||||
repo_path: Path, project_config: Dict[str, Any], results: Results
|
||||
) -> None:
|
||||
"""Run black and record failures"""
|
||||
cmd = [str(which("black"))]
|
||||
if project_config["cli_arguments"]:
|
||||
cmd.extend(*project_config["cli_arguments"])
|
||||
cmd.extend(["--check", "--diff", "."])
|
||||
|
||||
try:
|
||||
_stdout, _stderr = await _gen_check_output(cmd, cwd=repo_path)
|
||||
except asyncio.TimeoutError:
|
||||
results.stats["failed"] += 1
|
||||
LOG.error(f"Running black for {repo_path} timed out ({cmd})")
|
||||
except CalledProcessError as cpe:
|
||||
# TODO: This might need to be tuned and made smarter for higher signal
|
||||
if not project_config["expect_formatting_changes"] and cpe.returncode == 1:
|
||||
results.stats["failed"] += 1
|
||||
results.failed_projects[repo_path.name] = cpe
|
||||
return
|
||||
|
||||
results.stats["success"] += 1
|
||||
|
||||
|
||||
async def git_checkout_or_rebase(
|
||||
work_path: Path,
|
||||
project_config: Dict[str, Any],
|
||||
rebase: bool = False,
|
||||
*,
|
||||
depth: int = 1,
|
||||
) -> Optional[Path]:
|
||||
"""git Clone project or rebase"""
|
||||
git_bin = str(which("git"))
|
||||
if not git_bin:
|
||||
LOG.error(f"No git binary found")
|
||||
return None
|
||||
|
||||
repo_url_parts = urlparse(project_config["git_clone_url"])
|
||||
path_parts = repo_url_parts.path[1:].split("/", maxsplit=1)
|
||||
|
||||
repo_path: Path = work_path / path_parts[1].replace(".git", "")
|
||||
cmd = [git_bin, "clone", "--depth", str(depth), project_config["git_clone_url"]]
|
||||
cwd = work_path
|
||||
if repo_path.exists() and rebase:
|
||||
cmd = [git_bin, "pull", "--rebase"]
|
||||
cwd = repo_path
|
||||
elif repo_path.exists():
|
||||
return repo_path
|
||||
|
||||
try:
|
||||
_stdout, _stderr = await _gen_check_output(cmd, cwd=cwd)
|
||||
except (asyncio.TimeoutError, CalledProcessError) as e:
|
||||
LOG.error(f"Unable to git clone / pull {project_config['git_clone_url']}: {e}")
|
||||
return None
|
||||
|
||||
return repo_path
|
||||
|
||||
|
||||
async def load_projects_queue(
|
||||
config_path: Path,
|
||||
) -> Tuple[Dict[str, Any], asyncio.Queue[str]]:
|
||||
"""Load project config and fill queue with all the project names"""
|
||||
with config_path.open("r") as cfp:
|
||||
config = json.load(cfp)
|
||||
|
||||
# TODO: Offer more options here
|
||||
# e.g. Run on X random packages or specific sub list etc.
|
||||
project_names = sorted(config["projects"].keys())
|
||||
queue: asyncio.Queue[str] = asyncio.Queue(maxsize=len(project_names))
|
||||
for project in project_names:
|
||||
await queue.put(project)
|
||||
|
||||
return config, queue
|
||||
|
||||
|
||||
async def project_runner(
|
||||
idx: int,
|
||||
config: Dict[str, Any],
|
||||
queue: asyncio.Queue[str],
|
||||
work_path: Path,
|
||||
results: Results,
|
||||
long_checkouts: bool = False,
|
||||
rebase: bool = False,
|
||||
keep: bool = False,
|
||||
) -> None:
|
||||
"""Checkout project and run black on it + record result"""
|
||||
loop = asyncio.get_event_loop()
|
||||
py_version = f"{version_info[0]}.{version_info[1]}"
|
||||
while True:
|
||||
try:
|
||||
project_name = queue.get_nowait()
|
||||
except asyncio.QueueEmpty:
|
||||
LOG.debug(f"project_runner {idx} exiting")
|
||||
return
|
||||
|
||||
project_config = config["projects"][project_name]
|
||||
|
||||
# Check if disabled by config
|
||||
if "disabled" in project_config and project_config["disabled"]:
|
||||
results.stats["disabled"] += 1
|
||||
LOG.info(f"Skipping {project_name} as it's disabled via config")
|
||||
continue
|
||||
|
||||
# Check if we should run on this version of Python
|
||||
if (
|
||||
"all" not in project_config["py_versions"]
|
||||
and py_version not in project_config["py_versions"]
|
||||
):
|
||||
results.stats["wrong_py_ver"] += 1
|
||||
LOG.debug(f"Skipping {project_name} as it's not enabled for {py_version}")
|
||||
continue
|
||||
|
||||
# Check if we're doing big projects / long checkouts
|
||||
if not long_checkouts and project_config["long_checkout"]:
|
||||
results.stats["skipped_long_checkout"] += 1
|
||||
LOG.debug(f"Skipping {project_name} as it's configured as a long checkout")
|
||||
continue
|
||||
|
||||
repo_path = await git_checkout_or_rebase(work_path, project_config, rebase)
|
||||
if not repo_path:
|
||||
continue
|
||||
await black_run(repo_path, project_config, results)
|
||||
|
||||
if not keep:
|
||||
LOG.debug(f"Removing {repo_path}")
|
||||
await loop.run_in_executor(None, rmtree, repo_path)
|
||||
|
||||
|
||||
async def process_queue(
|
||||
config_file: str,
|
||||
work_path: Path,
|
||||
workers: int,
|
||||
keep: bool = False,
|
||||
long_checkouts: bool = False,
|
||||
rebase: bool = False,
|
||||
) -> int:
|
||||
"""
|
||||
Process the queue with X workers and evaluate results
|
||||
- Success is guaged via the config "expect_formatting_changes"
|
||||
|
||||
Integer return equals the number of failed projects
|
||||
"""
|
||||
results = Results()
|
||||
results.stats["disabled"] = 0
|
||||
results.stats["failed"] = 0
|
||||
results.stats["skipped_long_checkout"] = 0
|
||||
results.stats["success"] = 0
|
||||
results.stats["wrong_py_ver"] = 0
|
||||
|
||||
config, queue = await load_projects_queue(Path(config_file))
|
||||
project_count = queue.qsize()
|
||||
LOG.info(f"{project_count} projects to run black over")
|
||||
if not project_count:
|
||||
return -1
|
||||
|
||||
LOG.debug(f"Using {workers} parallel workers to run black")
|
||||
# Wait until we finish running all the projects before analyzing
|
||||
await asyncio.gather(
|
||||
*[
|
||||
project_runner(
|
||||
i, config, queue, work_path, results, long_checkouts, rebase, keep
|
||||
)
|
||||
for i in range(workers)
|
||||
]
|
||||
)
|
||||
|
||||
LOG.info("Analyzing results")
|
||||
return await analyze_results(project_count, results)
|
33
src/black_primer/primer.json
Normal file
33
src/black_primer/primer.json
Normal file
@ -0,0 +1,33 @@
|
||||
{
|
||||
"configuration_format_version": 20200509,
|
||||
"projects": {
|
||||
"aioexabgp": {
|
||||
"cli_arguments": [],
|
||||
"expect_formatting_changes": true,
|
||||
"git_clone_url": "https://github.com/cooperlees/aioexabgp.git",
|
||||
"long_checkout": false,
|
||||
"py_versions": ["all"]
|
||||
},
|
||||
"attrs": {
|
||||
"cli_arguments": [],
|
||||
"expect_formatting_changes": true,
|
||||
"git_clone_url": "https://github.com/python-attrs/attrs.git",
|
||||
"long_checkout": false,
|
||||
"py_versions": ["all"]
|
||||
},
|
||||
"bandersnatch": {
|
||||
"cli_arguments": [],
|
||||
"expect_formatting_changes": true,
|
||||
"git_clone_url": "https://github.com/pypa/bandersnatch.git",
|
||||
"long_checkout": false,
|
||||
"py_versions": ["all"]
|
||||
},
|
||||
"flake8-bugbear": {
|
||||
"cli_arguments": [],
|
||||
"expect_formatting_changes": true,
|
||||
"git_clone_url": "https://github.com/PyCQA/flake8-bugbear.git",
|
||||
"long_checkout": false,
|
||||
"py_versions": ["all"]
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user