mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Test improvements and fixed deprecation warnings (#2464)
* `asyncio_default_fixture_loop_scope = function` * Fix a bunch of BeautifulSoup deprecation warnings * Fix for PytestUnraisableExceptionWarning: Exception ignored in: <_io.FileIO [closed]> * xfail for sql_time_limit tests (these can be flaky in CI) Refs #2461
This commit is contained in:
parent
962da77d61
commit
53a3b3c80e
11 changed files with 49 additions and 44 deletions
|
|
@ -68,13 +68,13 @@ def test_table_cell_truncation():
|
|||
"Arcad…",
|
||||
] == [
|
||||
td.string
|
||||
for td in table.findAll("td", {"class": "col-neighborhood-b352a7"})
|
||||
for td in table.find_all("td", {"class": "col-neighborhood-b352a7"})
|
||||
]
|
||||
# URLs should be truncated too
|
||||
response2 = client.get("/fixtures/roadside_attractions")
|
||||
assert response2.status == 200
|
||||
table = Soup(response2.body, "html.parser").find("table")
|
||||
tds = table.findAll("td", {"class": "col-url"})
|
||||
tds = table.find_all("td", {"class": "col-url"})
|
||||
assert [str(td) for td in tds] == [
|
||||
'<td class="col-url type-str"><a href="https://www.mysteryspot.com/">http…</a></td>',
|
||||
'<td class="col-url type-str"><a href="https://winchestermysteryhouse.com/">http…</a></td>',
|
||||
|
|
@ -210,7 +210,7 @@ async def test_searchable_view_persists_fts_table(ds_client):
|
|||
response = await ds_client.get(
|
||||
"/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk"
|
||||
)
|
||||
inputs = Soup(response.text, "html.parser").find("form").findAll("input")
|
||||
inputs = Soup(response.text, "html.parser").find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [("_fts_table", "searchable_fts"), ("_fts_pk", "pk")] == [
|
||||
(hidden["name"], hidden["value"]) for hidden in hiddens
|
||||
|
|
@ -234,7 +234,7 @@ async def test_sort_by_desc_redirects(ds_client):
|
|||
async def test_sort_links(ds_client):
|
||||
response = await ds_client.get("/fixtures/sortable?_sort=sortable")
|
||||
assert response.status_code == 200
|
||||
ths = Soup(response.text, "html.parser").findAll("th")
|
||||
ths = Soup(response.text, "html.parser").find_all("th")
|
||||
attrs_and_link_attrs = [
|
||||
{
|
||||
"attrs": th.attrs,
|
||||
|
|
@ -341,7 +341,7 @@ async def test_facet_display(ds_client):
|
|||
)
|
||||
assert response.status_code == 200
|
||||
soup = Soup(response.text, "html.parser")
|
||||
divs = soup.find("div", {"class": "facet-results"}).findAll("div")
|
||||
divs = soup.find("div", {"class": "facet-results"}).find_all("div")
|
||||
actual = []
|
||||
for div in divs:
|
||||
actual.append(
|
||||
|
|
@ -353,7 +353,7 @@ async def test_facet_display(ds_client):
|
|||
"qs": a["href"].split("?")[-1],
|
||||
"count": int(str(a.parent).split("</a>")[1].split("<")[0]),
|
||||
}
|
||||
for a in div.find("ul").findAll("a")
|
||||
for a in div.find("ul").find_all("a")
|
||||
],
|
||||
}
|
||||
)
|
||||
|
|
@ -422,7 +422,7 @@ async def test_facets_persist_through_filter_form(ds_client):
|
|||
"/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet_array=tags"
|
||||
)
|
||||
assert response.status_code == 200
|
||||
inputs = Soup(response.text, "html.parser").find("form").findAll("input")
|
||||
inputs = Soup(response.text, "html.parser").find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [
|
||||
("_facet", "planet_int"),
|
||||
|
|
@ -435,7 +435,7 @@ async def test_facets_persist_through_filter_form(ds_client):
|
|||
async def test_next_does_not_persist_in_hidden_field(ds_client):
|
||||
response = await ds_client.get("/fixtures/searchable?_size=1&_next=1")
|
||||
assert response.status_code == 200
|
||||
inputs = Soup(response.text, "html.parser").find("form").findAll("input")
|
||||
inputs = Soup(response.text, "html.parser").find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [
|
||||
("_size", "1"),
|
||||
|
|
@ -448,7 +448,7 @@ async def test_table_html_simple_primary_key(ds_client):
|
|||
assert response.status_code == 200
|
||||
table = Soup(response.text, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert "id\xa0▼" == ths[0].find("a").string.strip()
|
||||
for expected_col, th in zip(("content",), ths[1:]):
|
||||
a = th.find("a")
|
||||
|
|
@ -479,7 +479,7 @@ async def test_table_csv_json_export_interface(ds_client):
|
|||
links = (
|
||||
Soup(response.text, "html.parser")
|
||||
.find("p", {"class": "export-links"})
|
||||
.findAll("a")
|
||||
.find_all("a")
|
||||
)
|
||||
actual = [link["href"] for link in links]
|
||||
expected = [
|
||||
|
|
@ -493,7 +493,7 @@ async def test_table_csv_json_export_interface(ds_client):
|
|||
assert expected == actual
|
||||
# And the advanced export box at the bottom:
|
||||
div = Soup(response.text, "html.parser").find("div", {"class": "advanced-export"})
|
||||
json_links = [a["href"] for a in div.find("p").findAll("a")]
|
||||
json_links = [a["href"] for a in div.find("p").find_all("a")]
|
||||
assert [
|
||||
"/fixtures/simple_primary_key.json?id__gt=2",
|
||||
"/fixtures/simple_primary_key.json?id__gt=2&_shape=array",
|
||||
|
|
@ -503,7 +503,7 @@ async def test_table_csv_json_export_interface(ds_client):
|
|||
# And the CSV form
|
||||
form = div.find("form")
|
||||
assert form["action"].endswith("/simple_primary_key.csv")
|
||||
inputs = [str(input) for input in form.findAll("input")]
|
||||
inputs = [str(input) for input in form.find_all("input")]
|
||||
assert [
|
||||
'<input name="_dl" type="checkbox"/>',
|
||||
'<input type="submit" value="Export CSV"/>',
|
||||
|
|
@ -519,7 +519,7 @@ async def test_csv_json_export_links_include_labels_if_foreign_keys(ds_client):
|
|||
links = (
|
||||
Soup(response.text, "html.parser")
|
||||
.find("p", {"class": "export-links"})
|
||||
.findAll("a")
|
||||
.find_all("a")
|
||||
)
|
||||
actual = [link["href"] for link in links]
|
||||
expected = [
|
||||
|
|
@ -571,7 +571,7 @@ async def test_rowid_sortable_no_primary_key(ds_client):
|
|||
assert response.status_code == 200
|
||||
table = Soup(response.text, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert "rowid\xa0▼" == ths[1].find("a").string.strip()
|
||||
|
||||
|
||||
|
|
@ -580,7 +580,7 @@ async def test_table_html_compound_primary_key(ds_client):
|
|||
response = await ds_client.get("/fixtures/compound_primary_key")
|
||||
assert response.status_code == 200
|
||||
table = Soup(response.text, "html.parser").find("table")
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert "Link" == ths[0].string.strip()
|
||||
for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]):
|
||||
a = th.find("a")
|
||||
|
|
@ -811,7 +811,7 @@ async def test_advanced_export_box(ds_client, path, has_object, has_stream, has_
|
|||
if has_object:
|
||||
expected_json_shapes.append("object")
|
||||
div = soup.find("div", {"class": "advanced-export"})
|
||||
assert expected_json_shapes == [a.text for a in div.find("p").findAll("a")]
|
||||
assert expected_json_shapes == [a.text for a in div.find("p").find_all("a")]
|
||||
# "stream all rows" option
|
||||
if has_stream:
|
||||
assert "stream all rows" in str(div)
|
||||
|
|
@ -828,13 +828,13 @@ async def test_extra_where_clauses(ds_client):
|
|||
soup = Soup(response.text, "html.parser")
|
||||
div = soup.select(".extra-wheres")[0]
|
||||
assert "2 extra where clauses" == div.find("h3").text
|
||||
hrefs = [a["href"] for a in div.findAll("a")]
|
||||
hrefs = [a["href"] for a in div.find_all("a")]
|
||||
assert [
|
||||
"/fixtures/facetable?_where=_city_id%3D1",
|
||||
"/fixtures/facetable?_where=_neighborhood%3D%27Dogpatch%27",
|
||||
] == hrefs
|
||||
# These should also be persisted as hidden fields
|
||||
inputs = soup.find("form").findAll("input")
|
||||
inputs = soup.find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [("_where", "_neighborhood='Dogpatch'"), ("_where", "_city_id=1")] == [
|
||||
(hidden["name"], hidden["value"]) for hidden in hiddens
|
||||
|
|
@ -859,7 +859,7 @@ async def test_extra_where_clauses(ds_client):
|
|||
async def test_other_hidden_form_fields(ds_client, path, expected_hidden):
|
||||
response = await ds_client.get(path)
|
||||
soup = Soup(response.text, "html.parser")
|
||||
inputs = soup.find("form").findAll("input")
|
||||
inputs = soup.find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden
|
||||
|
||||
|
|
@ -878,7 +878,7 @@ async def test_search_and_sort_fields_not_duplicated(ds_client, path, expected_h
|
|||
# https://github.com/simonw/datasette/issues/1214
|
||||
response = await ds_client.get(path)
|
||||
soup = Soup(response.text, "html.parser")
|
||||
inputs = soup.find("form").findAll("input")
|
||||
inputs = soup.find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden
|
||||
|
||||
|
|
@ -960,7 +960,7 @@ async def test_metadata_sort(ds_client):
|
|||
assert response.status_code == 200
|
||||
table = Soup(response.text, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert ["id", "name\xa0▼"] == [th.find("a").string.strip() for th in ths]
|
||||
rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")]
|
||||
expected = [
|
||||
|
|
@ -996,7 +996,7 @@ async def test_metadata_sort_desc(ds_client):
|
|||
assert response.status_code == 200
|
||||
table = Soup(response.text, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert ["pk\xa0▲", "name"] == [th.find("a").string.strip() for th in ths]
|
||||
rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")]
|
||||
expected = [
|
||||
|
|
@ -1098,7 +1098,7 @@ async def test_column_metadata(ds_client):
|
|||
response = await ds_client.get("/fixtures/roadside_attractions")
|
||||
soup = Soup(response.text, "html.parser")
|
||||
dl = soup.find("dl")
|
||||
assert [(dt.text, dt.nextSibling.text) for dt in dl.findAll("dt")] == [
|
||||
assert [(dt.text, dt.next_sibling.text) for dt in dl.find_all("dt")] == [
|
||||
("address", "The street address for the attraction"),
|
||||
("name", "The name of the attraction"),
|
||||
]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue