mirror of
https://github.com/simonw/datasette.git
synced 2025-12-10 16:51:24 +01:00
Fix for BeautifulSoup findAll warnings
This commit is contained in:
parent
13c338c8f1
commit
9becb04e1b
5 changed files with 38 additions and 38 deletions
|
|
@ -40,7 +40,7 @@ def test_homepage(app_client_two_attached_databases):
|
|||
)
|
||||
# We should only show visible, not hidden tables here:
|
||||
table_links = [
|
||||
{"href": a["href"], "text": a.text.strip()} for a in links_p.findAll("a")
|
||||
{"href": a["href"], "text": a.text.strip()} for a in links_p.find_all("a")
|
||||
]
|
||||
assert [
|
||||
{"href": r"/extra+database/searchable", "text": "searchable"},
|
||||
|
|
@ -186,7 +186,7 @@ def test_row_page_does_not_truncate():
|
|||
assert table["class"] == ["rows-and-columns"]
|
||||
assert ["Mission"] == [
|
||||
td.string
|
||||
for td in table.findAll("td", {"class": "col-neighborhood-b352a7"})
|
||||
for td in table.find_all("td", {"class": "col-neighborhood-b352a7"})
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -202,7 +202,7 @@ def test_query_page_truncates():
|
|||
)
|
||||
assert response.status == 200
|
||||
table = Soup(response.body, "html.parser").find("table")
|
||||
tds = table.findAll("td")
|
||||
tds = table.find_all("td")
|
||||
assert [str(td) for td in tds] == [
|
||||
'<td class="col-a">this …</td>',
|
||||
'<td class="col-b"><a href="https://example.com/">http…</a></td>',
|
||||
|
|
@ -421,7 +421,7 @@ def test_database_download_for_immutable():
|
|||
# Regular page should have a download link
|
||||
response = client.get("/fixtures")
|
||||
soup = Soup(response.body, "html.parser")
|
||||
assert len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
|
||||
assert len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
|
||||
# Check we can actually download it
|
||||
download_response = client.get("/fixtures.db")
|
||||
assert download_response.status == 200
|
||||
|
|
@ -449,7 +449,7 @@ def test_database_download_for_immutable():
|
|||
def test_database_download_disallowed_for_mutable(app_client):
|
||||
response = app_client.get("/fixtures")
|
||||
soup = Soup(response.body, "html.parser")
|
||||
assert 0 == len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
|
||||
assert 0 == len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
|
||||
assert 403 == app_client.get("/fixtures.db").status
|
||||
|
||||
|
||||
|
|
@ -458,7 +458,7 @@ def test_database_download_disallowed_for_memory():
|
|||
# Memory page should NOT have a download link
|
||||
response = client.get("/_memory")
|
||||
soup = Soup(response.body, "html.parser")
|
||||
assert 0 == len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
|
||||
assert 0 == len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
|
||||
assert 404 == client.get("/_memory.db").status
|
||||
|
||||
|
||||
|
|
@ -468,7 +468,7 @@ def test_allow_download_off():
|
|||
) as client:
|
||||
response = client.get("/fixtures")
|
||||
soup = Soup(response.body, "html.parser")
|
||||
assert not len(soup.findAll("a", {"href": re.compile(r"\.db$")}))
|
||||
assert not len(soup.find_all("a", {"href": re.compile(r"\.db$")}))
|
||||
# Accessing URL directly should 403
|
||||
response = client.get("/fixtures.db")
|
||||
assert 403 == response.status
|
||||
|
|
@ -478,7 +478,7 @@ def test_allow_sql_off():
|
|||
with make_app_client(metadata={"allow_sql": {}}) as client:
|
||||
response = client.get("/fixtures")
|
||||
soup = Soup(response.body, "html.parser")
|
||||
assert not len(soup.findAll("textarea", {"name": "sql"}))
|
||||
assert not len(soup.find_all("textarea", {"name": "sql"}))
|
||||
# The table page should no longer show "View and edit SQL"
|
||||
response = client.get("/fixtures/sortable")
|
||||
assert b"View and edit SQL" not in response.body
|
||||
|
|
@ -767,7 +767,7 @@ def test_base_url_config(app_client_base_url_prefix, path, use_prefix):
|
|||
soup = Soup(response.body, "html.parser")
|
||||
for form in soup.select("form"):
|
||||
assert form["action"].startswith("/prefix")
|
||||
for el in soup.findAll(["a", "link", "script"]):
|
||||
for el in soup.find_all(["a", "link", "script"]):
|
||||
if "href" in el.attrs:
|
||||
href = el["href"]
|
||||
elif "src" in el.attrs:
|
||||
|
|
|
|||
|
|
@ -337,7 +337,7 @@ def test_permissions_debug(app_client):
|
|||
assert response.status == 200
|
||||
# Should show one failure and one success
|
||||
soup = Soup(response.body, "html.parser")
|
||||
check_divs = soup.findAll("div", {"class": "check"})
|
||||
check_divs = soup.find_all("div", {"class": "check"})
|
||||
checks = [
|
||||
{
|
||||
"action": div.select_one(".check-action").text,
|
||||
|
|
|
|||
|
|
@ -108,7 +108,7 @@ def test_hook_plugin_prepare_connection_arguments(app_client):
|
|||
def test_hook_extra_css_urls(app_client, path, expected_decoded_object):
|
||||
response = app_client.get(path)
|
||||
assert response.status == 200
|
||||
links = Soup(response.body, "html.parser").findAll("link")
|
||||
links = Soup(response.body, "html.parser").find_all("link")
|
||||
special_href = [
|
||||
l for l in links if l.attrs["href"].endswith("/extra-css-urls-demo.css")
|
||||
][0]["href"]
|
||||
|
|
@ -121,7 +121,7 @@ def test_hook_extra_css_urls(app_client, path, expected_decoded_object):
|
|||
|
||||
def test_hook_extra_js_urls(app_client):
|
||||
response = app_client.get("/")
|
||||
scripts = Soup(response.body, "html.parser").findAll("script")
|
||||
scripts = Soup(response.body, "html.parser").find_all("script")
|
||||
script_attrs = [s.attrs for s in scripts]
|
||||
for attrs in [
|
||||
{
|
||||
|
|
@ -145,7 +145,7 @@ def test_plugins_with_duplicate_js_urls(app_client):
|
|||
# What matters is that https://plugin-example.datasette.io/jquery.js is only there once
|
||||
# and it comes before plugin1.js and plugin2.js which could be in either
|
||||
# order
|
||||
scripts = Soup(response.body, "html.parser").findAll("script")
|
||||
scripts = Soup(response.body, "html.parser").find_all("script")
|
||||
srcs = [s["src"] for s in scripts if s.get("src")]
|
||||
# No duplicates allowed:
|
||||
assert len(srcs) == len(set(srcs))
|
||||
|
|
@ -513,7 +513,7 @@ def test_hook_register_output_renderer_can_render(app_client):
|
|||
links = (
|
||||
Soup(response.body, "html.parser")
|
||||
.find("p", {"class": "export-links"})
|
||||
.findAll("a")
|
||||
.find_all("a")
|
||||
)
|
||||
actual = [l["href"] for l in links]
|
||||
# Should not be present because we sent ?_no_can_render=1
|
||||
|
|
|
|||
|
|
@ -67,13 +67,13 @@ def test_table_cell_truncation():
|
|||
"Arcad…",
|
||||
] == [
|
||||
td.string
|
||||
for td in table.findAll("td", {"class": "col-neighborhood-b352a7"})
|
||||
for td in table.find_all("td", {"class": "col-neighborhood-b352a7"})
|
||||
]
|
||||
# URLs should be truncated too
|
||||
response2 = client.get("/fixtures/roadside_attractions")
|
||||
assert response2.status == 200
|
||||
table = Soup(response2.body, "html.parser").find("table")
|
||||
tds = table.findAll("td", {"class": "col-url"})
|
||||
tds = table.find_all("td", {"class": "col-url"})
|
||||
assert [str(td) for td in tds] == [
|
||||
'<td class="col-url type-str"><a href="https://www.mysteryspot.com/">http…</a></td>',
|
||||
'<td class="col-url type-str"><a href="https://winchestermysteryhouse.com/">http…</a></td>',
|
||||
|
|
@ -204,7 +204,7 @@ def test_searchable_view_persists_fts_table(app_client):
|
|||
response = app_client.get(
|
||||
"/fixtures/searchable_view?_fts_table=searchable_fts&_fts_pk=pk"
|
||||
)
|
||||
inputs = Soup(response.body, "html.parser").find("form").findAll("input")
|
||||
inputs = Soup(response.body, "html.parser").find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [("_fts_table", "searchable_fts"), ("_fts_pk", "pk")] == [
|
||||
(hidden["name"], hidden["value"]) for hidden in hiddens
|
||||
|
|
@ -226,7 +226,7 @@ def test_sort_by_desc_redirects(app_client):
|
|||
def test_sort_links(app_client):
|
||||
response = app_client.get("/fixtures/sortable?_sort=sortable")
|
||||
assert response.status == 200
|
||||
ths = Soup(response.body, "html.parser").findAll("th")
|
||||
ths = Soup(response.body, "html.parser").find_all("th")
|
||||
attrs_and_link_attrs = [
|
||||
{
|
||||
"attrs": th.attrs,
|
||||
|
|
@ -332,7 +332,7 @@ def test_facet_display(app_client):
|
|||
)
|
||||
assert response.status == 200
|
||||
soup = Soup(response.body, "html.parser")
|
||||
divs = soup.find("div", {"class": "facet-results"}).findAll("div")
|
||||
divs = soup.find("div", {"class": "facet-results"}).find_all("div")
|
||||
actual = []
|
||||
for div in divs:
|
||||
actual.append(
|
||||
|
|
@ -344,7 +344,7 @@ def test_facet_display(app_client):
|
|||
"qs": a["href"].split("?")[-1],
|
||||
"count": int(str(a.parent).split("</a>")[1].split("<")[0]),
|
||||
}
|
||||
for a in div.find("ul").findAll("a")
|
||||
for a in div.find("ul").find_all("a")
|
||||
],
|
||||
}
|
||||
)
|
||||
|
|
@ -412,7 +412,7 @@ def test_facets_persist_through_filter_form(app_client):
|
|||
"/fixtures/facetable?_facet=planet_int&_facet=_city_id&_facet_array=tags"
|
||||
)
|
||||
assert response.status == 200
|
||||
inputs = Soup(response.body, "html.parser").find("form").findAll("input")
|
||||
inputs = Soup(response.body, "html.parser").find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [
|
||||
("_facet", "planet_int"),
|
||||
|
|
@ -424,7 +424,7 @@ def test_facets_persist_through_filter_form(app_client):
|
|||
def test_next_does_not_persist_in_hidden_field(app_client):
|
||||
response = app_client.get("/fixtures/searchable?_size=1&_next=1")
|
||||
assert response.status == 200
|
||||
inputs = Soup(response.body, "html.parser").find("form").findAll("input")
|
||||
inputs = Soup(response.body, "html.parser").find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == [
|
||||
("_size", "1"),
|
||||
|
|
@ -436,7 +436,7 @@ def test_table_html_simple_primary_key(app_client):
|
|||
assert response.status == 200
|
||||
table = Soup(response.body, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert "id\xa0▼" == ths[0].find("a").string.strip()
|
||||
for expected_col, th in zip(("content",), ths[1:]):
|
||||
a = th.find("a")
|
||||
|
|
@ -466,7 +466,7 @@ def test_table_csv_json_export_interface(app_client):
|
|||
links = (
|
||||
Soup(response.body, "html.parser")
|
||||
.find("p", {"class": "export-links"})
|
||||
.findAll("a")
|
||||
.find_all("a")
|
||||
)
|
||||
actual = [l["href"] for l in links]
|
||||
expected = [
|
||||
|
|
@ -480,7 +480,7 @@ def test_table_csv_json_export_interface(app_client):
|
|||
assert expected == actual
|
||||
# And the advaced export box at the bottom:
|
||||
div = Soup(response.body, "html.parser").find("div", {"class": "advanced-export"})
|
||||
json_links = [a["href"] for a in div.find("p").findAll("a")]
|
||||
json_links = [a["href"] for a in div.find("p").find_all("a")]
|
||||
assert [
|
||||
"/fixtures/simple_primary_key.json?id__gt=2",
|
||||
"/fixtures/simple_primary_key.json?id__gt=2&_shape=array",
|
||||
|
|
@ -490,7 +490,7 @@ def test_table_csv_json_export_interface(app_client):
|
|||
# And the CSV form
|
||||
form = div.find("form")
|
||||
assert form["action"].endswith("/simple_primary_key.csv")
|
||||
inputs = [str(input) for input in form.findAll("input")]
|
||||
inputs = [str(input) for input in form.find_all("input")]
|
||||
assert [
|
||||
'<input name="_dl" type="checkbox"/>',
|
||||
'<input type="submit" value="Export CSV"/>',
|
||||
|
|
@ -505,7 +505,7 @@ def test_csv_json_export_links_include_labels_if_foreign_keys(app_client):
|
|||
links = (
|
||||
Soup(response.body, "html.parser")
|
||||
.find("p", {"class": "export-links"})
|
||||
.findAll("a")
|
||||
.find_all("a")
|
||||
)
|
||||
actual = [l["href"] for l in links]
|
||||
expected = [
|
||||
|
|
@ -554,7 +554,7 @@ def test_rowid_sortable_no_primary_key(app_client):
|
|||
assert response.status == 200
|
||||
table = Soup(response.body, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert "rowid\xa0▼" == ths[1].find("a").string.strip()
|
||||
|
||||
|
||||
|
|
@ -562,7 +562,7 @@ def test_table_html_compound_primary_key(app_client):
|
|||
response = app_client.get("/fixtures/compound_primary_key")
|
||||
assert response.status == 200
|
||||
table = Soup(response.body, "html.parser").find("table")
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert "Link" == ths[0].string.strip()
|
||||
for expected_col, th in zip(("pk1", "pk2", "content"), ths[1:]):
|
||||
a = th.find("a")
|
||||
|
|
@ -783,7 +783,7 @@ def test_advanced_export_box(app_client, path, has_object, has_stream, has_expan
|
|||
if has_object:
|
||||
expected_json_shapes.append("object")
|
||||
div = soup.find("div", {"class": "advanced-export"})
|
||||
assert expected_json_shapes == [a.text for a in div.find("p").findAll("a")]
|
||||
assert expected_json_shapes == [a.text for a in div.find("p").find_all("a")]
|
||||
# "stream all rows" option
|
||||
if has_stream:
|
||||
assert "stream all rows" in str(div)
|
||||
|
|
@ -799,13 +799,13 @@ def test_extra_where_clauses(app_client):
|
|||
soup = Soup(response.body, "html.parser")
|
||||
div = soup.select(".extra-wheres")[0]
|
||||
assert "2 extra where clauses" == div.find("h3").text
|
||||
hrefs = [a["href"] for a in div.findAll("a")]
|
||||
hrefs = [a["href"] for a in div.find_all("a")]
|
||||
assert [
|
||||
"/fixtures/facetable?_where=_city_id%3D1",
|
||||
"/fixtures/facetable?_where=_neighborhood%3D%27Dogpatch%27",
|
||||
] == hrefs
|
||||
# These should also be persisted as hidden fields
|
||||
inputs = soup.find("form").findAll("input")
|
||||
inputs = soup.find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [("_where", "_neighborhood='Dogpatch'"), ("_where", "_city_id=1")] == [
|
||||
(hidden["name"], hidden["value"]) for hidden in hiddens
|
||||
|
|
@ -829,7 +829,7 @@ def test_extra_where_clauses(app_client):
|
|||
def test_other_hidden_form_fields(app_client, path, expected_hidden):
|
||||
response = app_client.get(path)
|
||||
soup = Soup(response.body, "html.parser")
|
||||
inputs = soup.find("form").findAll("input")
|
||||
inputs = soup.find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden
|
||||
|
||||
|
|
@ -847,7 +847,7 @@ def test_search_and_sort_fields_not_duplicated(app_client, path, expected_hidden
|
|||
# https://github.com/simonw/datasette/issues/1214
|
||||
response = app_client.get(path)
|
||||
soup = Soup(response.body, "html.parser")
|
||||
inputs = soup.find("form").findAll("input")
|
||||
inputs = soup.find("form").find_all("input")
|
||||
hiddens = [i for i in inputs if i["type"] == "hidden"]
|
||||
assert [(hidden["name"], hidden["value"]) for hidden in hiddens] == expected_hidden
|
||||
|
||||
|
|
@ -896,7 +896,7 @@ def test_metadata_sort(app_client):
|
|||
assert response.status == 200
|
||||
table = Soup(response.body, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert ["id", "name\xa0▼"] == [th.find("a").string.strip() for th in ths]
|
||||
rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")]
|
||||
expected = [
|
||||
|
|
@ -931,7 +931,7 @@ def test_metadata_sort_desc(app_client):
|
|||
assert response.status == 200
|
||||
table = Soup(response.body, "html.parser").find("table")
|
||||
assert table["class"] == ["rows-and-columns"]
|
||||
ths = table.findAll("th")
|
||||
ths = table.find_all("th")
|
||||
assert ["pk\xa0▲", "name"] == [th.find("a").string.strip() for th in ths]
|
||||
rows = [[str(td) for td in tr.select("td")] for tr in table.select("tbody tr")]
|
||||
expected = [
|
||||
|
|
@ -1032,7 +1032,7 @@ def test_column_metadata(app_client):
|
|||
response = app_client.get("/fixtures/roadside_attractions")
|
||||
soup = Soup(response.body, "html.parser")
|
||||
dl = soup.find("dl")
|
||||
assert [(dt.text, dt.nextSibling.text) for dt in dl.findAll("dt")] == [
|
||||
assert [(dt.text, dt.nextSibling.text) for dt in dl.find_all("dt")] == [
|
||||
("name", "The name of the attraction"),
|
||||
("address", "The street address for the attraction"),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ from datasette.utils.sqlite import sqlite3
|
|||
|
||||
|
||||
def assert_footer_links(soup):
|
||||
footer_links = soup.find("footer").findAll("a")
|
||||
footer_links = soup.find("footer").find_all("a")
|
||||
assert 4 == len(footer_links)
|
||||
datasette_link, license_link, source_link, about_link = footer_links
|
||||
assert "Datasette" == datasette_link.text.strip()
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue