Use context manager instead of plain open (#1211)

Context manager with open closes the files after usage.

When the object is already a pathlib.Path i used read_text
write_text functions

In some cases pathlib.Path.open were used in context manager,
it is basically the same as builtin open.

Thanks, Konstantin Baikov!
This commit is contained in:
Konstantin Baikov 2021-03-11 17:15:49 +01:00 committed by GitHub
commit 8e18c79431
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 93 additions and 63 deletions

View file

@ -75,10 +75,8 @@ def check_permission_actions_are_documented():
from datasette.plugins import pm
content = (
(pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst")
.open()
.read()
)
pathlib.Path(__file__).parent.parent / "docs" / "authentication.rst"
).read_text()
permissions_re = re.compile(r"\.\. _permissions_([^\s:]+):")
documented_permission_actions = set(permissions_re.findall(content)).union(
UNDOCUMENTED_PERMISSIONS

View file

@ -789,7 +789,8 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
conn.executescript(GENERATED_COLUMNS_SQL)
print(f"Test tables written to {db_filename}")
if metadata:
open(metadata, "w").write(json.dumps(METADATA, indent=4))
with open(metadata, "w") as fp:
fp.write(json.dumps(METADATA, indent=4))
print(f"- metadata written to {metadata}")
if plugins_path:
path = pathlib.Path(plugins_path)
@ -798,7 +799,7 @@ def cli(db_filename, metadata, plugins_path, recreate, extra_db_filename):
test_plugins = pathlib.Path(__file__).parent / "plugins"
for filepath in test_plugins.glob("*.py"):
newpath = path / filepath.name
newpath.write_text(filepath.open().read())
newpath.write_text(filepath.read_text())
print(f" Wrote plugin: {newpath}")
if extra_db_filename:
if pathlib.Path(extra_db_filename).exists():

View file

@ -49,7 +49,8 @@ def test_inspect_cli_writes_to_file(app_client):
cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"]
)
assert 0 == result.exit_code, result.output
data = json.load(open("foo.json"))
with open("foo.json") as fp:
data = json.load(fp)
assert ["fixtures"] == list(data.keys())

View file

@ -14,7 +14,8 @@ def test_serve_with_get(tmp_path_factory):
@hookimpl
def startup(datasette):
open("{}", "w").write("hello")
with open("{}", "w") as fp:
fp.write("hello")
""".format(
str(plugins_dir / "hello.txt")
),

View file

@ -19,13 +19,13 @@ def get_headings(content, underline="-"):
def get_labels(filename):
content = (docs_path / filename).open().read()
content = (docs_path / filename).read_text()
return set(label_re.findall(content))
@pytest.fixture(scope="session")
def settings_headings():
return get_headings((docs_path / "settings.rst").open().read(), "~")
return get_headings((docs_path / "settings.rst").read_text(), "~")
@pytest.mark.parametrize("setting", app.SETTINGS)
@ -43,7 +43,7 @@ def test_settings_are_documented(settings_headings, setting):
),
)
def test_help_includes(name, filename):
expected = open(str(docs_path / filename)).read()
expected = (docs_path / filename).read_text()
runner = CliRunner()
result = runner.invoke(cli, name.split() + ["--help"], terminal_width=88)
actual = f"$ datasette {name} --help\n\n{result.output}"
@ -55,7 +55,7 @@ def test_help_includes(name, filename):
@pytest.fixture(scope="session")
def plugin_hooks_content():
return (docs_path / "plugin_hooks.rst").open().read()
return (docs_path / "plugin_hooks.rst").read_text()
@pytest.mark.parametrize(

View file

@ -32,7 +32,8 @@ def test_package(mock_call, mock_which):
capture = CaptureDockerfile()
mock_call.side_effect = capture
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["package", "test.db", "--secret", "sekrit"])
assert 0 == result.exit_code
mock_call.assert_has_calls([mock.call(["docker", "build", "."])])
@ -47,7 +48,8 @@ def test_package_with_port(mock_call, mock_which):
mock_call.side_effect = capture
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["package", "test.db", "-p", "8080", "--secret", "sekrit"]
)

View file

@ -225,7 +225,8 @@ def test_plugin_config_env_from_list(app_client):
def test_plugin_config_file(app_client):
open(TEMP_PLUGIN_SECRET_FILE, "w").write("FROM_FILE")
with open(TEMP_PLUGIN_SECRET_FILE, "w") as fp:
fp.write("FROM_FILE")
assert {"foo": "FROM_FILE"} == app_client.ds.plugin_config("file-plugin")
# Ensure secrets aren't visible in /-/metadata.json
metadata = app_client.get("/-/metadata.json")

View file

@ -11,7 +11,8 @@ def test_publish_cloudrun_requires_gcloud(mock_which):
mock_which.return_value = False
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "cloudrun", "test.db"])
assert result.exit_code == 1
assert "Publishing to Google Cloud requires gcloud" in result.output
@ -40,7 +41,8 @@ def test_publish_cloudrun_prompts_for_service(
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "cloudrun", "test.db"], input="input-service"
)
@ -81,7 +83,8 @@ def test_publish_cloudrun(mock_call, mock_output, mock_which):
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "cloudrun", "test.db", "--service", "test"]
)
@ -120,7 +123,8 @@ def test_publish_cloudrun_memory(
mock_which.return_value = True
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
["publish", "cloudrun", "test.db", "--service", "test", "--memory", memory],
@ -152,17 +156,19 @@ def test_publish_cloudrun_plugin_secrets(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
open("metadata.yml", "w").write(
textwrap.dedent(
"""
with open("test.db", "w") as fp:
fp.write("data")
with open("metadata.yml", "w") as fp:
fp.write(
textwrap.dedent(
"""
title: Hello from metadata YAML
plugins:
datasette-auth-github:
foo: bar
"""
).strip()
)
).strip()
)
result = runner.invoke(
cli.cli,
[
@ -228,7 +234,8 @@ def test_publish_cloudrun_apt_get_install(mock_call, mock_output, mock_which):
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[
@ -295,7 +302,8 @@ def test_publish_cloudrun_extra_options(
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[

View file

@ -8,7 +8,8 @@ def test_publish_heroku_requires_heroku(mock_which):
mock_which.return_value = False
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "test.db"])
assert result.exit_code == 1
assert "Publishing to Heroku requires heroku" in result.output
@ -22,7 +23,8 @@ def test_publish_heroku_installs_plugin(mock_call, mock_check_output, mock_which
mock_check_output.side_effect = lambda s: {"['heroku', 'plugins']": b""}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("t.db", "w").write("data")
with open("t.db", "w") as fp:
fp.write("data")
result = runner.invoke(cli.cli, ["publish", "heroku", "t.db"], input="y\n")
assert 0 != result.exit_code
mock_check_output.assert_has_calls(
@ -54,7 +56,8 @@ def test_publish_heroku(mock_call, mock_check_output, mock_which):
}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli, ["publish", "heroku", "test.db", "--tar", "gtar"]
)
@ -88,7 +91,8 @@ def test_publish_heroku_plugin_secrets(mock_call, mock_check_output, mock_which)
}[repr(s)]
runner = CliRunner()
with runner.isolated_filesystem():
open("test.db", "w").write("data")
with open("test.db", "w") as fp:
fp.write("data")
result = runner.invoke(
cli.cli,
[

View file

@ -232,7 +232,8 @@ def test_to_css_class(s, expected):
def test_temporary_docker_directory_uses_hard_link():
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
open("hello", "w").write("world")
with open("hello", "w") as fp:
fp.write("world")
# Default usage of this should use symlink
with utils.temporary_docker_directory(
files=["hello"],
@ -249,7 +250,8 @@ def test_temporary_docker_directory_uses_hard_link():
secret="secret",
) as temp_docker:
hello = os.path.join(temp_docker, "hello")
assert "world" == open(hello).read()
with open(hello) as fp:
assert "world" == fp.read()
# It should be a hard link
assert 2 == os.stat(hello).st_nlink
@ -260,7 +262,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
mock_link.side_effect = OSError
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
open("hello", "w").write("world")
with open("hello", "w") as fp:
fp.write("world")
# Default usage of this should use symlink
with utils.temporary_docker_directory(
files=["hello"],
@ -277,7 +280,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
secret=None,
) as temp_docker:
hello = os.path.join(temp_docker, "hello")
assert "world" == open(hello).read()
with open(hello) as fp:
assert "world" == fp.read()
# It should be a copy, not a hard link
assert 1 == os.stat(hello).st_nlink
@ -285,7 +289,8 @@ def test_temporary_docker_directory_uses_copy_if_hard_link_fails(mock_link):
def test_temporary_docker_directory_quotes_args():
with tempfile.TemporaryDirectory() as td:
os.chdir(td)
open("hello", "w").write("world")
with open("hello", "w") as fp:
fp.write("world")
with utils.temporary_docker_directory(
files=["hello"],
name="t",
@ -301,7 +306,8 @@ def test_temporary_docker_directory_quotes_args():
secret="secret",
) as temp_docker:
df = os.path.join(temp_docker, "Dockerfile")
df_contents = open(df).read()
with open(df) as fp:
df_contents = fp.read()
assert "'$PWD'" in df_contents
assert "'--$HOME'" in df_contents
assert "ENV DATASETTE_SECRET 'secret'" in df_contents