Skip to content

Commit

Permalink
PYTHONWARNDEFAULTENCODING = 1
Browse files Browse the repository at this point in the history
  • Loading branch information
Zac-HD committed Jul 4, 2023
1 parent 839ef35 commit d07c923
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 54 deletions.
3 changes: 3 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,9 @@

<!-- For example, Docker, GitHub Actions, pre-commit, editors -->

- Black is now tested with
[`PYTHONWARNDEFAULTENCODING = 1`](https://docs.python.org/3/library/io.html#io-encoding-warning)
(#3763)
- Update GitHub Action to display black output in the job summary (#3688)
- Deprecated `set-output` command in CI test to keep up to date with GitHub's
deprecation announcement (#3757)
Expand Down
71 changes: 26 additions & 45 deletions tests/test_black.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,7 @@ def test_empty_ff(self) -> None:
tmp_file = Path(black.dump_to_file())
try:
self.assertFalse(ff(tmp_file, write_back=black.WriteBack.YES))
with open(tmp_file, encoding="utf8") as f:
actual = f.read()
actual = tmp_file.read_text(encoding="utf8")
finally:
os.unlink(tmp_file)
self.assertFormatEqual(expected, actual)
Expand Down Expand Up @@ -286,8 +285,7 @@ def test_expression_ff(self) -> None:
tmp_file = Path(black.dump_to_file(source))
try:
self.assertTrue(ff(tmp_file, write_back=black.WriteBack.YES))
with open(tmp_file, encoding="utf8") as f:
actual = f.read()
actual = tmp_file.read_text(encoding="utf8")
finally:
os.unlink(tmp_file)
self.assertFormatEqual(expected, actual)
Expand Down Expand Up @@ -390,8 +388,7 @@ def test_skip_source_first_line(self) -> None:
black.main, [str(tmp_file), "-x", f"--config={EMPTY_CONFIG}"]
)
self.assertEqual(result.exit_code, 0)
with open(tmp_file, encoding="utf8") as f:
actual = f.read()
actual = tmp_file.read_text(encoding="utf8")
self.assertFormatEqual(source, actual)

def test_skip_source_first_line_when_mixing_newlines(self) -> None:
Expand Down Expand Up @@ -1081,7 +1078,7 @@ def test_works_in_mono_process_only_environment(self) -> None:
(workspace / "one.py").resolve(),
(workspace / "two.py").resolve(),
]:
f.write_text('print("hello")\n')
f.write_text('print("hello")\n', encoding="utf-8")
self.invokeBlack([str(workspace)])

@event_loop()
Expand Down Expand Up @@ -1118,11 +1115,9 @@ def test_single_file_force_pyi(self) -> None:
contents, expected = read_data("miscellaneous", "force_pyi")
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
with open(path, "w") as fh:
fh.write(contents)
path.write_text(contents, encoding="utf-8")
self.invokeBlack([str(path), "--pyi"])
with open(path, "r") as fh:
actual = fh.read()
actual = path.read_text(encoding="utf8")
# verify cache with --pyi is separate
pyi_cache = black.read_cache(pyi_mode)
self.assertIn(str(path), pyi_cache)
Expand All @@ -1143,12 +1138,10 @@ def test_multi_file_force_pyi(self) -> None:
(workspace / "file2.py").resolve(),
]
for path in paths:
with open(path, "w") as fh:
fh.write(contents)
path.write_text(contents, encoding="utf-8")
self.invokeBlack([str(p) for p in paths] + ["--pyi"])
for path in paths:
with open(path, "r") as fh:
actual = fh.read()
actual = path.read_text(encoding="utf8")
self.assertEqual(actual, expected)
# verify cache with --pyi is separate
pyi_cache = black.read_cache(pyi_mode)
Expand All @@ -1172,11 +1165,9 @@ def test_single_file_force_py36(self) -> None:
source, expected = read_data("miscellaneous", "force_py36")
with cache_dir() as workspace:
path = (workspace / "file.py").resolve()
with open(path, "w") as fh:
fh.write(source)
path.write_text(source, encoding="utf-8")
self.invokeBlack([str(path), *PY36_ARGS])
with open(path, "r") as fh:
actual = fh.read()
actual = path.read_text(encoding="utf8")
# verify cache with --target-version is separate
py36_cache = black.read_cache(py36_mode)
self.assertIn(str(path), py36_cache)
Expand All @@ -1195,12 +1186,10 @@ def test_multi_file_force_py36(self) -> None:
(workspace / "file2.py").resolve(),
]
for path in paths:
with open(path, "w") as fh:
fh.write(source)
path.write_text(source, encoding="utf-8")
self.invokeBlack([str(p) for p in paths] + PY36_ARGS)
for path in paths:
with open(path, "r") as fh:
actual = fh.read()
actual = path.read_text(encoding="utf8")
self.assertEqual(actual, expected)
# verify cache with --target-version is separate
pyi_cache = black.read_cache(py36_mode)
Expand Down Expand Up @@ -1631,8 +1620,8 @@ def test_read_pyproject_toml_from_stdin(self) -> None:
src_pyproject = src_dir / "pyproject.toml"
src_pyproject.touch()

test_toml_file = THIS_DIR / "test.toml"
src_pyproject.write_text(test_toml_file.read_text())
test_toml_content = (THIS_DIR / "test.toml").read_text(encoding="utf-8")
src_pyproject.write_text(test_toml_content, encoding="utf-8")

src_python = src_dir / "foo.py"
src_python.touch()
Expand Down Expand Up @@ -1985,10 +1974,10 @@ def test_cache_broken_file(self) -> None:
mode = DEFAULT_MODE
with cache_dir() as workspace:
cache_file = get_cache_file(mode)
cache_file.write_text("this is not a pickle")
cache_file.write_text("this is not a pickle", encoding="utf-8")
assert black.read_cache(mode) == {}
src = (workspace / "test.py").resolve()
src.write_text("print('hello')")
src.write_text("print('hello')", encoding="utf-8")
invokeBlack([str(src)])
cache = black.read_cache(mode)
assert str(src) in cache
Expand All @@ -1997,10 +1986,10 @@ def test_cache_single_file_already_cached(self) -> None:
mode = DEFAULT_MODE
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
src.write_text("print('hello')")
src.write_text("print('hello')", encoding="utf-8")
black.write_cache({}, [src], mode)
invokeBlack([str(src)])
assert src.read_text() == "print('hello')"
assert src.read_text(encoding="utf-8") == "print('hello')"

@event_loop()
def test_cache_multiple_files(self) -> None:
Expand All @@ -2009,17 +1998,13 @@ def test_cache_multiple_files(self) -> None:
"concurrent.futures.ProcessPoolExecutor", new=ThreadPoolExecutor
):
one = (workspace / "one.py").resolve()
with one.open("w") as fobj:
fobj.write("print('hello')")
one.write_text("print('hello')", encoding="utf-8")
two = (workspace / "two.py").resolve()
with two.open("w") as fobj:
fobj.write("print('hello')")
two.write_text("print('hello')", encoding="utf-8")
black.write_cache({}, [one], mode)
invokeBlack([str(workspace)])
with one.open("r") as fobj:
assert fobj.read() == "print('hello')"
with two.open("r") as fobj:
assert fobj.read() == 'print("hello")\n'
assert one.read_text(encoding="utf-8") == "print('hello')"
assert two.read_text(encoding="utf-8") == 'print("hello")\n'
cache = black.read_cache(mode)
assert str(one) in cache
assert str(two) in cache
Expand All @@ -2029,8 +2014,7 @@ def test_no_cache_when_writeback_diff(self, color: bool) -> None:
mode = DEFAULT_MODE
with cache_dir() as workspace:
src = (workspace / "test.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
src.write_text("print('hello')", encoding="utf-8")
with patch("black.read_cache") as read_cache, patch(
"black.write_cache"
) as write_cache:
Expand All @@ -2049,8 +2033,7 @@ def test_output_locking_when_writeback_diff(self, color: bool) -> None:
with cache_dir() as workspace:
for tag in range(0, 4):
src = (workspace / f"test{tag}.py").resolve()
with src.open("w") as fobj:
fobj.write("print('hello')")
src.write_text("print('hello')", encoding="utf-8")
with patch(
"black.concurrency.Manager", wraps=multiprocessing.Manager
) as mgr:
Expand Down Expand Up @@ -2120,11 +2103,9 @@ def test_failed_formatting_does_not_get_cached(self) -> None:
"concurrent.futures.ProcessPoolExecutor", new=ThreadPoolExecutor
):
failing = (workspace / "failing.py").resolve()
with failing.open("w") as fobj:
fobj.write("not actually python")
failing.write_text("not actually python", encoding="utf-8")
clean = (workspace / "clean.py").resolve()
with clean.open("w") as fobj:
fobj.write('print("hello")\n')
clean.write_text('print("hello")\n', encoding="utf-8")
invokeBlack([str(workspace)], exit_code=123)
cache = black.read_cache(mode)
assert str(failing) not in cache
Expand Down
9 changes: 3 additions & 6 deletions tests/test_ipynb.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,8 +439,7 @@ def test_cache_isnt_written_if_no_jupyter_deps_single(
jupyter_dependencies_are_installed.cache_clear()
nb = get_case_path("jupyter", "notebook_trailing_newline.ipynb")
tmp_nb = tmp_path / "notebook.ipynb"
with open(nb) as src, open(tmp_nb, "w") as dst:
dst.write(src.read())
tmp_nb.write_bytes(nb.read_bytes())
monkeypatch.setattr(
"black.jupyter_dependencies_are_installed", lambda verbose, quiet: False
)
Expand All @@ -465,8 +464,7 @@ def test_cache_isnt_written_if_no_jupyter_deps_dir(
jupyter_dependencies_are_installed.cache_clear()
nb = get_case_path("jupyter", "notebook_trailing_newline.ipynb")
tmp_nb = tmp_path / "notebook.ipynb"
with open(nb) as src, open(tmp_nb, "w") as dst:
dst.write(src.read())
tmp_nb.write_bytes(nb.read_bytes())
monkeypatch.setattr(
"black.files.jupyter_dependencies_are_installed", lambda verbose, quiet: False
)
Expand All @@ -483,8 +481,7 @@ def test_cache_isnt_written_if_no_jupyter_deps_dir(
def test_ipynb_flag(tmp_path: pathlib.Path) -> None:
nb = get_case_path("jupyter", "notebook_trailing_newline.ipynb")
tmp_nb = tmp_path / "notebook.a_file_extension_which_is_definitely_not_ipynb"
with open(nb) as src, open(tmp_nb, "w") as dst:
dst.write(src.read())
tmp_nb.write_bytes(nb.read_bytes())
result = runner.invoke(
main,
[
Expand Down
3 changes: 1 addition & 2 deletions tests/test_no_ipynb.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@ def test_ipynb_diff_with_no_change_dir(tmp_path: pathlib.Path) -> None:
runner = CliRunner()
nb = get_case_path("jupyter", "notebook_trailing_newline.ipynb")
tmp_nb = tmp_path / "notebook.ipynb"
with open(nb) as src, open(tmp_nb, "w") as dst:
dst.write(src.read())
tmp_nb.write_bytes(nb.read_bytes())
result = runner.invoke(main, [str(tmp_path)])
expected_output = (
"Skipping .ipynb files as Jupyter dependencies are not installed.\n"
Expand Down
4 changes: 3 additions & 1 deletion tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,9 @@ isolated_build = true
envlist = {,ci-}py{37,38,39,310,311,py3},fuzz,run_self

[testenv]
setenv = PYTHONPATH = {toxinidir}/src
setenv =
PYTHONPATH = {toxinidir}/src
PYTHONWARNDEFAULTENCODING = 1
skip_install = True
# We use `recreate=True` because otherwise, on the second run of `tox -e py`,
# the `no_jupyter` tests would run with the jupyter extra dependencies installed.
Expand Down

0 comments on commit d07c923

Please sign in to comment.