Make cache work with non-default line lenghts (#163)

This commit is contained in:
Jonas Obrist 2018-04-25 03:56:50 +09:00 committed by Łukasz Langa
parent 52fda8b0e9
commit 92957a41e3
3 changed files with 46 additions and 29 deletions

View File

@ -458,9 +458,9 @@ location of the file depends on the black version and the system on which black
is run. The file is non-portable. The standard location on common operating systems is run. The file is non-portable. The standard location on common operating systems
is: is:
* Windows: `C:\\Users\<username>\AppData\Local\black\black\Cache\<version>\cache.pickle` * Windows: `C:\\Users\<username>\AppData\Local\black\black\Cache\<version>\cache.<line-length>.pickle`
* macOS: `/Users/<username>/Library/Caches/black/<version>/cache.pickle` * macOS: `/Users/<username>/Library/Caches/black/<version>/cache.<line-length>.pickle`
* Linux: `/home/<username>/.cache/black/<version>/cache.pickle` * Linux: `/home/<username>/.cache/black/<version>/cache.<line-length>.pickle`
## Testimonials ## Testimonials

View File

@ -233,7 +233,7 @@ def reformat_one(
else: else:
cache: Cache = {} cache: Cache = {}
if write_back != WriteBack.DIFF: if write_back != WriteBack.DIFF:
cache = read_cache() cache = read_cache(line_length)
src = src.resolve() src = src.resolve()
if src in cache and cache[src] == get_cache_info(src): if src in cache and cache[src] == get_cache_info(src):
changed = Changed.CACHED changed = Changed.CACHED
@ -245,7 +245,7 @@ def reformat_one(
): ):
changed = Changed.YES changed = Changed.YES
if write_back != WriteBack.DIFF and changed is not Changed.NO: if write_back != WriteBack.DIFF and changed is not Changed.NO:
write_cache(cache, [src]) write_cache(cache, [src], line_length)
report.done(src, changed) report.done(src, changed)
except Exception as exc: except Exception as exc:
report.failed(src, str(exc)) report.failed(src, str(exc))
@ -269,7 +269,7 @@ async def schedule_formatting(
""" """
cache: Cache = {} cache: Cache = {}
if write_back != WriteBack.DIFF: if write_back != WriteBack.DIFF:
cache = read_cache() cache = read_cache(line_length)
sources, cached = filter_cached(cache, sources) sources, cached = filter_cached(cache, sources)
for src in cached: for src in cached:
report.done(src, Changed.CACHED) report.done(src, Changed.CACHED)
@ -312,7 +312,7 @@ async def schedule_formatting(
if cancelled: if cancelled:
await asyncio.gather(*cancelled, loop=loop, return_exceptions=True) await asyncio.gather(*cancelled, loop=loop, return_exceptions=True)
if write_back != WriteBack.DIFF and formatted: if write_back != WriteBack.DIFF and formatted:
write_cache(cache, formatted) write_cache(cache, formatted, line_length)
def format_file_in_place( def format_file_in_place(
@ -2473,18 +2473,22 @@ def sub_twice(regex: Pattern[str], replacement: str, original: str) -> str:
CACHE_DIR = Path(user_cache_dir("black", version=__version__)) CACHE_DIR = Path(user_cache_dir("black", version=__version__))
CACHE_FILE = CACHE_DIR / "cache.pickle"
def read_cache() -> Cache: def get_cache_file(line_length: int) -> Path:
return CACHE_DIR / f"cache.{line_length}.pickle"
def read_cache(line_length: int) -> Cache:
"""Read the cache if it exists and is well formed. """Read the cache if it exists and is well formed.
If it is not well formed, the call to write_cache later should resolve the issue. If it is not well formed, the call to write_cache later should resolve the issue.
""" """
if not CACHE_FILE.exists(): cache_file = get_cache_file(line_length)
if not cache_file.exists():
return {} return {}
with CACHE_FILE.open("rb") as fobj: with cache_file.open("rb") as fobj:
try: try:
cache: Cache = pickle.load(fobj) cache: Cache = pickle.load(fobj)
except pickle.UnpicklingError: except pickle.UnpicklingError:
@ -2517,13 +2521,14 @@ def filter_cached(
return todo, done return todo, done
def write_cache(cache: Cache, sources: List[Path]) -> None: def write_cache(cache: Cache, sources: List[Path], line_length: int) -> None:
"""Update the cache file.""" """Update the cache file."""
cache_file = get_cache_file(line_length)
try: try:
if not CACHE_DIR.exists(): if not CACHE_DIR.exists():
CACHE_DIR.mkdir(parents=True) CACHE_DIR.mkdir(parents=True)
new_cache = {**cache, **{src.resolve(): get_cache_info(src) for src in sources}} new_cache = {**cache, **{src.resolve(): get_cache_info(src) for src in sources}}
with CACHE_FILE.open("wb") as fobj: with cache_file.open("wb") as fobj:
pickle.dump(new_cache, fobj, protocol=pickle.HIGHEST_PROTOCOL) pickle.dump(new_cache, fobj, protocol=pickle.HIGHEST_PROTOCOL)
except OSError: except OSError:
pass pass

View File

@ -57,8 +57,7 @@ def cache_dir(exists: bool = True) -> Iterator[Path]:
cache_dir = Path(workspace) cache_dir = Path(workspace)
if not exists: if not exists:
cache_dir = cache_dir / "new" cache_dir = cache_dir / "new"
cache_file = cache_dir / "cache.pkl" with patch("black.CACHE_DIR", cache_dir):
with patch("black.CACHE_DIR", cache_dir), patch("black.CACHE_FILE", cache_file):
yield cache_dir yield cache_dir
@ -492,15 +491,16 @@ def err(msg: str, **kwargs: Any) -> None:
def test_cache_broken_file(self) -> None: def test_cache_broken_file(self) -> None:
with cache_dir() as workspace: with cache_dir() as workspace:
with black.CACHE_FILE.open("w") as fobj: cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH)
with cache_file.open("w") as fobj:
fobj.write("this is not a pickle") fobj.write("this is not a pickle")
self.assertEqual(black.read_cache(), {}) self.assertEqual(black.read_cache(black.DEFAULT_LINE_LENGTH), {})
src = (workspace / "test.py").resolve() src = (workspace / "test.py").resolve()
with src.open("w") as fobj: with src.open("w") as fobj:
fobj.write("print('hello')") fobj.write("print('hello')")
result = CliRunner().invoke(black.main, [str(src)]) result = CliRunner().invoke(black.main, [str(src)])
self.assertEqual(result.exit_code, 0) self.assertEqual(result.exit_code, 0)
cache = black.read_cache() cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
self.assertIn(src, cache) self.assertIn(src, cache)
def test_cache_single_file_already_cached(self) -> None: def test_cache_single_file_already_cached(self) -> None:
@ -508,7 +508,7 @@ def test_cache_single_file_already_cached(self) -> None:
src = (workspace / "test.py").resolve() src = (workspace / "test.py").resolve()
with src.open("w") as fobj: with src.open("w") as fobj:
fobj.write("print('hello')") fobj.write("print('hello')")
black.write_cache({}, [src]) black.write_cache({}, [src], black.DEFAULT_LINE_LENGTH)
result = CliRunner().invoke(black.main, [str(src)]) result = CliRunner().invoke(black.main, [str(src)])
self.assertEqual(result.exit_code, 0) self.assertEqual(result.exit_code, 0)
with src.open("r") as fobj: with src.open("r") as fobj:
@ -525,14 +525,14 @@ def test_cache_multiple_files(self) -> None:
two = (workspace / "two.py").resolve() two = (workspace / "two.py").resolve()
with two.open("w") as fobj: with two.open("w") as fobj:
fobj.write("print('hello')") fobj.write("print('hello')")
black.write_cache({}, [one]) black.write_cache({}, [one], black.DEFAULT_LINE_LENGTH)
result = CliRunner().invoke(black.main, [str(workspace)]) result = CliRunner().invoke(black.main, [str(workspace)])
self.assertEqual(result.exit_code, 0) self.assertEqual(result.exit_code, 0)
with one.open("r") as fobj: with one.open("r") as fobj:
self.assertEqual(fobj.read(), "print('hello')") self.assertEqual(fobj.read(), "print('hello')")
with two.open("r") as fobj: with two.open("r") as fobj:
self.assertEqual(fobj.read(), 'print("hello")\n') self.assertEqual(fobj.read(), 'print("hello")\n')
cache = black.read_cache() cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
self.assertIn(one, cache) self.assertIn(one, cache)
self.assertIn(two, cache) self.assertIn(two, cache)
@ -543,24 +543,26 @@ def test_no_cache_when_writeback_diff(self) -> None:
fobj.write("print('hello')") fobj.write("print('hello')")
result = CliRunner().invoke(black.main, [str(src), "--diff"]) result = CliRunner().invoke(black.main, [str(src), "--diff"])
self.assertEqual(result.exit_code, 0) self.assertEqual(result.exit_code, 0)
self.assertFalse(black.CACHE_FILE.exists()) cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH)
self.assertFalse(cache_file.exists())
def test_no_cache_when_stdin(self) -> None: def test_no_cache_when_stdin(self) -> None:
with cache_dir(): with cache_dir():
result = CliRunner().invoke(black.main, ["-"], input="print('hello')") result = CliRunner().invoke(black.main, ["-"], input="print('hello')")
self.assertEqual(result.exit_code, 0) self.assertEqual(result.exit_code, 0)
self.assertFalse(black.CACHE_FILE.exists()) cache_file = black.get_cache_file(black.DEFAULT_LINE_LENGTH)
self.assertFalse(cache_file.exists())
def test_read_cache_no_cachefile(self) -> None: def test_read_cache_no_cachefile(self) -> None:
with cache_dir(): with cache_dir():
self.assertEqual(black.read_cache(), {}) self.assertEqual(black.read_cache(black.DEFAULT_LINE_LENGTH), {})
def test_write_cache_read_cache(self) -> None: def test_write_cache_read_cache(self) -> None:
with cache_dir() as workspace: with cache_dir() as workspace:
src = (workspace / "test.py").resolve() src = (workspace / "test.py").resolve()
src.touch() src.touch()
black.write_cache({}, [src]) black.write_cache({}, [src], black.DEFAULT_LINE_LENGTH)
cache = black.read_cache() cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
self.assertIn(src, cache) self.assertIn(src, cache)
self.assertEqual(cache[src], black.get_cache_info(src)) self.assertEqual(cache[src], black.get_cache_info(src))
@ -583,7 +585,7 @@ def test_filter_cached(self) -> None:
def test_write_cache_creates_directory_if_needed(self) -> None: def test_write_cache_creates_directory_if_needed(self) -> None:
with cache_dir(exists=False) as workspace: with cache_dir(exists=False) as workspace:
self.assertFalse(workspace.exists()) self.assertFalse(workspace.exists())
black.write_cache({}, []) black.write_cache({}, [], black.DEFAULT_LINE_LENGTH)
self.assertTrue(workspace.exists()) self.assertTrue(workspace.exists())
@event_loop(close=False) @event_loop(close=False)
@ -599,14 +601,14 @@ def test_failed_formatting_does_not_get_cached(self) -> None:
fobj.write('print("hello")\n') fobj.write('print("hello")\n')
result = CliRunner().invoke(black.main, [str(workspace)]) result = CliRunner().invoke(black.main, [str(workspace)])
self.assertEqual(result.exit_code, 123) self.assertEqual(result.exit_code, 123)
cache = black.read_cache() cache = black.read_cache(black.DEFAULT_LINE_LENGTH)
self.assertNotIn(failing, cache) self.assertNotIn(failing, cache)
self.assertIn(clean, cache) self.assertIn(clean, cache)
def test_write_cache_write_fail(self) -> None: def test_write_cache_write_fail(self) -> None:
with cache_dir(), patch.object(Path, "open") as mock: with cache_dir(), patch.object(Path, "open") as mock:
mock.side_effect = OSError mock.side_effect = OSError
black.write_cache({}, []) black.write_cache({}, [], black.DEFAULT_LINE_LENGTH)
def test_check_diff_use_together(self) -> None: def test_check_diff_use_together(self) -> None:
with cache_dir(): with cache_dir():
@ -626,6 +628,16 @@ def test_check_diff_use_together(self) -> None:
) )
self.assertEqual(result.exit_code, 1) self.assertEqual(result.exit_code, 1)
def test_read_cache_line_lengths(self) -> None:
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
path.touch()
black.write_cache({}, [path], 1)
one = black.read_cache(1)
self.assertIn(path, one)
two = black.read_cache(2)
self.assertNotIn(path, two)
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()