mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Test improvements and fixed deprecation warnings (#2464)
* `asyncio_default_fixture_loop_scope = function` * Fix a bunch of BeautifulSoup deprecation warnings * Fix for PytestUnraisableExceptionWarning: Exception ignored in: <_io.FileIO [closed]> * xfail for sql_time_limit tests (these can be flaky in CI) Refs #2461
This commit is contained in:
parent
962da77d61
commit
53a3b3c80e
11 changed files with 49 additions and 44 deletions
|
|
@ -45,7 +45,7 @@ def test_homepage(app_client_two_attached_databases):
|
|||
)
|
||||
# We should only show visible, not hidden tables here:
|
||||
table_links = [
|
||||
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
|
||||
{"href": a["href"], "text": a.text.strip()} for a in links_p.find_all("a")
|
||||
]
|
||||
assert [
|
||||
{"href": r"/extra+database/searchable_fts", "text": "searchable_fts"},
|
||||
|
|
@ -203,6 +203,7 @@ async def test_disallowed_custom_sql_pragma(ds_client):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="Sometimes flaky in CI due to timing issues")
|
||||
def test_sql_time_limit(app_client_shorter_time_limit):
|
||||
response = app_client_shorter_time_limit.get(
|
||||
"/fixtures/-/query?sql=select+sleep(0.5)"
|
||||
|
|
@ -226,7 +227,7 @@ def test_row_page_does_not_truncate():
|
|||
assert table["class"] == ["rows-and-columns"]
|
||||
assert ["Mission"] == [
|
||||
td.string
|
||||
for td in table.findAll("td", {"class": "col-neighborhood-b352a7"})
|
||||
for td in table.find_all("td", {"class": "col-neighborhood-b352a7"})
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -242,7 +243,7 @@ def test_query_page_truncates():
|
|||
)
|
||||
assert response.status_code == 200
|
||||
table = Soup(response.content, "html.parser").find("table")
|
||||
tds = table.findAll("td")
|
||||
tds = table.find_all("td")
|
||||
assert [str(td) for td in tds] == [
|
||||
'<td class="col-a">this …</td>',
|
||||
'<td class="col-b"><a href="https://example.com/">http…</a></td>',
|
||||
|
|
@ -407,7 +408,7 @@ async def test_row_links_from_other_tables(
|
|||
soup = Soup(response.text, "html.parser")
|
||||
h2 = soup.find("h2")
|
||||
assert h2.text == "Links from other tables"
|
||||
li = h2.findNext("ul").find("li")
|
||||
li = h2.find_next("ul").find("li")
|
||||
text = re.sub(r"\s+", " ", li.text.strip())
|
||||
assert text == expected_text
|
||||
link = li.find("a")["href"]
|
||||
|
|
@ -501,7 +502,7 @@ def test_database_download_for_immutable():
|
|||
# Regular page should have a download link
|
||||
response = client.get("/fixtures")
|
||||
soup = Soup(response.content, "html.parser")
|
||||
assert len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
|
||||
assert len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
|
||||
# Check we can actually download it
|
||||
download_response = client.get("/fixtures.db")
|
||||
assert download_response.status_code == 200
|
||||
|
|
@ -530,7 +531,7 @@ def test_database_download_disallowed_for_mutable(app_client):
|
|||
# Use app_client because we need a file database, not in-memory
|
||||
response = app_client.get("/fixtures")
|
||||
soup = Soup(response.content, "html.parser")
|
||||
assert len(soup.findAll("a", {"href": re.compile(r"\.db$")})) == 0
|
||||
assert len(soup.find_all("a", {"href": re.compile(r"\.db$")})) == 0
|
||||
assert app_client.get("/fixtures.db").status_code == 403
|
||||
|
||||
|
||||
|
|
@ -539,7 +540,7 @@ def test_database_download_disallowed_for_memory():
|
|||
# Memory page should NOT have a download link
|
||||
response = client.get("/_memory")
|
||||
soup = Soup(response.content, "html.parser")
|
||||
assert 0 == len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
|
||||
assert 0 == len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
|
||||
assert 404 == client.get("/_memory.db").status
|
||||
|
||||
|
||||
|
|
@ -549,7 +550,7 @@ def test_allow_download_off():
|
|||
) as client:
|
||||
response = client.get("/fixtures")
|
||||
soup = Soup(response.content, "html.parser")
|
||||
assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
|
||||
assert not len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
|
||||
# Accessing URL directly should 403
|
||||
response = client.get("/fixtures.db")
|
||||
assert 403 == response.status
|
||||
|
|
@ -559,7 +560,7 @@ def test_allow_sql_off():
|
|||
with make_app_client(config={"allow_sql": {}}) as client:
|
||||
response = client.get("/fixtures")
|
||||
soup = Soup(response.content, "html.parser")
|
||||
assert not len(soup.findAll("textarea", {"name": "sql"}))
|
||||
assert not len(soup.find_all("textarea", {"name": "sql"}))
|
||||
# The table page should no longer show "View and edit SQL"
|
||||
response = client.get("/fixtures/sortable")
|
||||
assert b"View and edit SQL" not in response.content
|
||||
|
|
@ -855,7 +856,7 @@ def test_base_url_config(app_client_base_url_prefix, path, use_prefix):
|
|||
soup = Soup(response.content, "html.parser")
|
||||
for form in soup.select("form"):
|
||||
assert form["action"].startswith("/prefix")
|
||||
for el in soup.findAll(["a", "link", "script"]):
|
||||
for el in soup.find_all(["a", "link", "script"]):
|
||||
if "href" in el.attrs:
|
||||
href = el["href"]
|
||||
elif "src" in el.attrs:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue