mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Workaround for test failure: RuntimeError: There is no current event loop (#1803)
* Remove ensure_eventloop hack * Hack to recover from intermittent RuntimeError calling asyncio.Lock()
This commit is contained in:
parent
1c29b925d3
commit
64288d827f
2 changed files with 19 additions and 18 deletions
|
|
@ -231,7 +231,15 @@ class Datasette:
|
||||||
self.inspect_data = inspect_data
|
self.inspect_data = inspect_data
|
||||||
self.immutables = set(immutables or [])
|
self.immutables = set(immutables or [])
|
||||||
self.databases = collections.OrderedDict()
|
self.databases = collections.OrderedDict()
|
||||||
self._refresh_schemas_lock = asyncio.Lock()
|
try:
|
||||||
|
self._refresh_schemas_lock = asyncio.Lock()
|
||||||
|
except RuntimeError as rex:
|
||||||
|
# Workaround for intermittent test failure, see:
|
||||||
|
# https://github.com/simonw/datasette/issues/1802
|
||||||
|
if "There is no current event loop in thread" in str(rex):
|
||||||
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
|
self._refresh_schemas_lock = asyncio.Lock()
|
||||||
self.crossdb = crossdb
|
self.crossdb = crossdb
|
||||||
self.nolock = nolock
|
self.nolock = nolock
|
||||||
if memory or crossdb or not self.files:
|
if memory or crossdb or not self.files:
|
||||||
|
|
|
||||||
|
|
@ -22,13 +22,6 @@ from unittest import mock
|
||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def ensure_eventloop():
|
|
||||||
# Workaround for "Event loop is closed" error
|
|
||||||
if asyncio.get_event_loop().is_closed():
|
|
||||||
asyncio.set_event_loop(asyncio.new_event_loop())
|
|
||||||
|
|
||||||
|
|
||||||
def test_inspect_cli(app_client):
|
def test_inspect_cli(app_client):
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
result = runner.invoke(cli, ["inspect", "fixtures.db"])
|
result = runner.invoke(cli, ["inspect", "fixtures.db"])
|
||||||
|
|
@ -72,7 +65,7 @@ def test_serve_with_inspect_file_prepopulates_table_counts_cache():
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
def test_spatialite_error_if_attempt_to_open_spatialite(
|
def test_spatialite_error_if_attempt_to_open_spatialite(
|
||||||
ensure_eventloop, spatialite_paths, should_suggest_load_extension
|
spatialite_paths, should_suggest_load_extension
|
||||||
):
|
):
|
||||||
with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths):
|
with mock.patch("datasette.utils.SPATIALITE_PATHS", spatialite_paths):
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
|
|
@ -199,14 +192,14 @@ def test_version():
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("invalid_port", ["-1", "0.5", "dog", "65536"])
|
@pytest.mark.parametrize("invalid_port", ["-1", "0.5", "dog", "65536"])
|
||||||
def test_serve_invalid_ports(ensure_eventloop, invalid_port):
|
def test_serve_invalid_ports(invalid_port):
|
||||||
runner = CliRunner(mix_stderr=False)
|
runner = CliRunner(mix_stderr=False)
|
||||||
result = runner.invoke(cli, ["--port", invalid_port])
|
result = runner.invoke(cli, ["--port", invalid_port])
|
||||||
assert result.exit_code == 2
|
assert result.exit_code == 2
|
||||||
assert "Invalid value for '-p'" in result.stderr
|
assert "Invalid value for '-p'" in result.stderr
|
||||||
|
|
||||||
|
|
||||||
def test_setting(ensure_eventloop):
|
def test_setting():
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
result = runner.invoke(
|
result = runner.invoke(
|
||||||
cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"]
|
cli, ["--setting", "default_page_size", "5", "--get", "/-/settings.json"]
|
||||||
|
|
@ -215,14 +208,14 @@ def test_setting(ensure_eventloop):
|
||||||
assert json.loads(result.output)["default_page_size"] == 5
|
assert json.loads(result.output)["default_page_size"] == 5
|
||||||
|
|
||||||
|
|
||||||
def test_setting_type_validation(ensure_eventloop):
|
def test_setting_type_validation():
|
||||||
runner = CliRunner(mix_stderr=False)
|
runner = CliRunner(mix_stderr=False)
|
||||||
result = runner.invoke(cli, ["--setting", "default_page_size", "dog"])
|
result = runner.invoke(cli, ["--setting", "default_page_size", "dog"])
|
||||||
assert result.exit_code == 2
|
assert result.exit_code == 2
|
||||||
assert '"default_page_size" should be an integer' in result.stderr
|
assert '"default_page_size" should be an integer' in result.stderr
|
||||||
|
|
||||||
|
|
||||||
def test_config_deprecated(ensure_eventloop):
|
def test_config_deprecated():
|
||||||
# The --config option should show a deprecation message
|
# The --config option should show a deprecation message
|
||||||
runner = CliRunner(mix_stderr=False)
|
runner = CliRunner(mix_stderr=False)
|
||||||
result = runner.invoke(
|
result = runner.invoke(
|
||||||
|
|
@ -233,14 +226,14 @@ def test_config_deprecated(ensure_eventloop):
|
||||||
assert "will be deprecated in" in result.stderr
|
assert "will be deprecated in" in result.stderr
|
||||||
|
|
||||||
|
|
||||||
def test_sql_errors_logged_to_stderr(ensure_eventloop):
|
def test_sql_errors_logged_to_stderr():
|
||||||
runner = CliRunner(mix_stderr=False)
|
runner = CliRunner(mix_stderr=False)
|
||||||
result = runner.invoke(cli, ["--get", "/_memory.json?sql=select+blah"])
|
result = runner.invoke(cli, ["--get", "/_memory.json?sql=select+blah"])
|
||||||
assert result.exit_code == 1
|
assert result.exit_code == 1
|
||||||
assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr
|
assert "sql = 'select blah', params = {}: no such column: blah\n" in result.stderr
|
||||||
|
|
||||||
|
|
||||||
def test_serve_create(ensure_eventloop, tmpdir):
|
def test_serve_create(tmpdir):
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
db_path = tmpdir / "does_not_exist_yet.db"
|
db_path = tmpdir / "does_not_exist_yet.db"
|
||||||
assert not db_path.exists()
|
assert not db_path.exists()
|
||||||
|
|
@ -258,7 +251,7 @@ def test_serve_create(ensure_eventloop, tmpdir):
|
||||||
assert db_path.exists()
|
assert db_path.exists()
|
||||||
|
|
||||||
|
|
||||||
def test_serve_duplicate_database_names(ensure_eventloop, tmpdir):
|
def test_serve_duplicate_database_names(tmpdir):
|
||||||
"'datasette db.db nested/db.db' should attach two databases, /db and /db_2"
|
"'datasette db.db nested/db.db' should attach two databases, /db and /db_2"
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
db_1_path = str(tmpdir / "db.db")
|
db_1_path = str(tmpdir / "db.db")
|
||||||
|
|
@ -273,7 +266,7 @@ def test_serve_duplicate_database_names(ensure_eventloop, tmpdir):
|
||||||
assert {db["name"] for db in databases} == {"db", "db_2"}
|
assert {db["name"] for db in databases} == {"db", "db_2"}
|
||||||
|
|
||||||
|
|
||||||
def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir):
|
def test_serve_deduplicate_same_database_path(tmpdir):
|
||||||
"'datasette db.db db.db' should only attach one database, /db"
|
"'datasette db.db db.db' should only attach one database, /db"
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
db_path = str(tmpdir / "db.db")
|
db_path = str(tmpdir / "db.db")
|
||||||
|
|
@ -287,7 +280,7 @@ def test_serve_deduplicate_same_database_path(ensure_eventloop, tmpdir):
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"filename", ["test-database (1).sqlite", "database (1).sqlite"]
|
"filename", ["test-database (1).sqlite", "database (1).sqlite"]
|
||||||
)
|
)
|
||||||
def test_weird_database_names(ensure_eventloop, tmpdir, filename):
|
def test_weird_database_names(tmpdir, filename):
|
||||||
# https://github.com/simonw/datasette/issues/1181
|
# https://github.com/simonw/datasette/issues/1181
|
||||||
runner = CliRunner()
|
runner = CliRunner()
|
||||||
db_path = str(tmpdir / filename)
|
db_path = str(tmpdir / filename)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue